├── .cache
└── .gitignore
├── playbooks
├── ansible.cfg
├── tasks
│ ├── gh-set-secret.yml
│ └── gh-set-env-secrets.yml
├── host_vars
│ └── localhost.yml
├── deploy-bot-pat.yml
└── sync.yml
├── docs
├── index.md
├── images
│ ├── logo.png
│ ├── favicon.ico
│ └── logo.svg
├── guides
│ ├── python
│ │ ├── pytest.md
│ │ ├── release_draft.png
│ │ ├── dependencies.md
│ │ ├── release.md
│ │ ├── packaging.md
│ │ └── tox.md
│ ├── vscode
│ │ ├── use-cases.md
│ │ └── release.md
│ ├── ansible
│ │ └── permissions.md
│ ├── gha.md
│ ├── code-reviews.md
│ ├── mentoring.md
│ ├── calver.md
│ └── releases.md
├── _abbreviations.md
└── stats
│ ├── molecule-plugins.md
│ └── repos.md
├── test
├── __init__.py
├── test_one.py
├── test_two.py
├── test_repos.py
├── test_forum_post.py
└── test_check_platform_constraints.py
├── .github
├── CODEOWNERS
├── workflows
│ ├── ah_token_refresh.yml
│ ├── release_collection.yml
│ ├── release_galaxy.yml
│ ├── release.yml
│ ├── push.yml
│ ├── test.yml
│ ├── release_ah.yml
│ ├── push_network.yml
│ ├── ack.yml
│ ├── finalize.yml
│ ├── forum_post.py
│ └── tox.yml
└── release-drafter.yml
├── .tool-versions
├── .ansible-lint
├── .config
├── dictionary.txt
└── platform-constraints.txt
├── assets
└── logos
│ ├── README.md
│ └── logo-ansible-extension.svg
├── codecov.yml
├── .vscode
├── extensions.json
└── settings.json
├── config
├── repos.lst
└── devtools.yml
├── sonar-project.properties
├── .pre-commit-hooks.yaml
├── cspell.config.yaml
├── biome.json
├── .yamllint
├── .readthedocs.yml
├── LICENSE
├── src
└── team_devtools
│ ├── __init__.py
│ └── check_platform_constraints.py
├── AGENTS.md
├── renovate.json
├── .pre-commit-config.yaml
├── .gitignore
├── mkdocs.yml
├── README.md
└── pyproject.toml
/.cache/.gitignore:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/playbooks/ansible.cfg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | ../README.md
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 | """Tests."""
2 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @ansible/devtools
2 |
--------------------------------------------------------------------------------
/.tool-versions:
--------------------------------------------------------------------------------
1 | nodejs latest
2 | python latest
3 |
--------------------------------------------------------------------------------
/.ansible-lint:
--------------------------------------------------------------------------------
1 | ---
2 | exclude_paths:
3 | - mkdocs.yml
4 | - dependabot.yml
5 |
--------------------------------------------------------------------------------
/.config/dictionary.txt:
--------------------------------------------------------------------------------
1 | SME
2 | abhikdps
3 | alisonlhart
4 | cidrblock
5 | compat
6 | pytest
7 |
--------------------------------------------------------------------------------
/docs/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/team-devtools/HEAD/docs/images/logo.png
--------------------------------------------------------------------------------
/docs/images/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/team-devtools/HEAD/docs/images/favicon.ico
--------------------------------------------------------------------------------
/docs/guides/python/pytest.md:
--------------------------------------------------------------------------------
1 | Useful resources for pytest:
2 |
3 | - https://pganssle-talks.github.io/xfail-lightning
4 |
--------------------------------------------------------------------------------
/docs/guides/python/release_draft.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ansible/team-devtools/HEAD/docs/guides/python/release_draft.png
--------------------------------------------------------------------------------
/assets/logos/README.md:
--------------------------------------------------------------------------------
1 | # Logos
2 |
3 | # Ansible extension for vscode
4 |
5 |
6 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | ---
2 | comment: false
3 | coverage:
4 | status:
5 | patch: true
6 | project:
7 | default:
8 | threshold: 0.5%
9 |
--------------------------------------------------------------------------------
/test/test_one.py:
--------------------------------------------------------------------------------
1 | """Test module for the package."""
2 |
3 |
4 | def test_placeholder() -> None:
5 | """Placeholder test."""
6 | from team_devtools import __version__
7 |
8 | assert __version__
9 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "biomejs.biome",
4 | "charliermarsh.ruff",
5 | "github.vscode-github-actions",
6 | "tombi-toml.tombi"
7 | ],
8 | "unwantedRecommendations": [
9 | "esbenp.prettier-vscode",
10 | "tamasfe.even-better-toml"
11 | ]
12 | }
13 |
--------------------------------------------------------------------------------
/config/repos.lst:
--------------------------------------------------------------------------------
1 | actions
2 | ansible-compat
3 | ansible-content-actions
4 | ansible-creator
5 | ansible-dev-environment
6 | ansible-dev-tools
7 | ansible-lint
8 | ansible-navigator
9 | ansible-sign
10 | mkdocs-ansible
11 | molecule
12 | pytest-ansible
13 | team-devtools
14 | tox-ansible
15 | vscode-ansible
16 |
--------------------------------------------------------------------------------
/playbooks/tasks/gh-set-secret.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Configure secret
3 | ansible.builtin.shell: >
4 | gh secret --repo {{ repo }} set --env {{ env.env_name }} {{ item.gh_secret_name }} --body {{ item.gh_secret_value }}
5 | changed_when: true
6 | no_log: false
7 | loop: "{{ env.env_secrets | dict2items('gh_secret_name', 'gh_secret_value') }}"
8 |
--------------------------------------------------------------------------------
/docs/_abbreviations.md:
--------------------------------------------------------------------------------
1 | *[deliverable]: A package, container or application that is supposed to be published at the end of the build.
2 | *[GHA]: GitHub Actions CI/CD
3 | *[PR]: Pull Request
4 | *[ADT]: Ansible Dev Tools, also known as ansible-dev-tools python package.
5 | *[PDE]: Product Delivery Engineering
6 | *[SME]: Subject Matter Expert, check team google doc for current assignations.
7 |
--------------------------------------------------------------------------------
/sonar-project.properties:
--------------------------------------------------------------------------------
1 | sonar.python.coverage.reportPaths=**/coverage.xml
2 | sonar.python.version=3.13
3 | sonar.sources=src/
4 | sonar.tests=test/
5 | # Ignore GitHub Actions rule S7637 across the repository
6 | sonar.issue.ignore.multicriteria=gha1
7 | sonar.issue.ignore.multicriteria.gha1.ruleKey=githubactions:S7637
8 | sonar.issue.ignore.multicriteria.gha1.resourceKey=.github/workflows/**/*
9 |
--------------------------------------------------------------------------------
/.pre-commit-hooks.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | - id: check-platform-constraints
3 | name: Check platform constraints and update renovate.json
4 | entry: check-platform-constraints
5 | language: python
6 | additional_dependencies: ["packaging>=23.2", "pyyaml"]
7 | files: ^pyproject\.toml$
8 | pass_filenames: false
9 | description: Validates dependencies against platform constraints and updates renovate.json
10 |
--------------------------------------------------------------------------------
/test/test_two.py:
--------------------------------------------------------------------------------
1 | """Test module for archive exclusion."""
2 |
3 | import os
4 | from pathlib import Path
5 |
6 |
7 | def test_two() -> None:
8 | """Create a file that should not be collected by archive action."""
9 | if "TOX_ENV_DIR" in os.environ:
10 | path = Path(os.environ["TOX_ENV_DIR"]) / "log" / "popen-gw0"
11 | path.mkdir(parents=True, exist_ok=True)
12 | (path / "foo.txt").touch()
13 |
--------------------------------------------------------------------------------
/docs/guides/python/dependencies.md:
--------------------------------------------------------------------------------
1 | # Managing Python dependencies
2 |
3 | To upgrade all dependencies from the uv.lock file, run the command below:
4 |
5 | ```shell
6 | uv sync --upgrade
7 | ```
8 |
9 | ### Exporting dependencies
10 |
11 | To obtain a classic pip `requirements.txt` file with runtime (non testing)
12 | dependencies, you can run:
13 |
14 | ```shell
15 | uv export --all-extras --no-dev --no-default-groups
16 | ```
17 |
--------------------------------------------------------------------------------
/.config/platform-constraints.txt:
--------------------------------------------------------------------------------
1 | # Platform compatibility constraints for AAP and RHEL
2 | # These represent the MAXIMUM versions we can use to remain compatible
3 | # with downstream platform packages
4 |
5 | # AAP 2.5/2.6 ships ansible-core 2.16.x
6 | ansible-core<2.17
7 |
8 | # RHEL 8/9 ships cffi 1.15.x
9 | cffi<1.16
10 |
11 | # RHEL 8/9 ships setuptools 65.5.1
12 | setuptools<65.6
13 |
14 | # galaxy-importer constraint
15 | packaging<25.0
16 |
--------------------------------------------------------------------------------
/cspell.config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | dictionaryDefinitions:
3 | - name: words
4 | path: .config/dictionary.txt
5 | addWords: true
6 | dictionaries:
7 | - bash
8 | - networking-terms
9 | - python
10 | - words
11 | - "!aws"
12 | - "!backwards-compatibility"
13 | - "!cryptocurrencies"
14 | - "!cpp"
15 | ignorePaths:
16 | - .config/requirements*
17 | - \.*
18 | - cspell.config.yaml
19 | - mkdocs.yml
20 | - pyproject.toml
21 | - tox.ini
22 |
23 | languageSettings:
24 | - languageId: python
25 | allowCompoundWords: false
26 |
--------------------------------------------------------------------------------
/biome.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://biomejs.dev/schemas/latest/schema.json",
3 | "assist": {
4 | "actions": {
5 | "source": {
6 | "useSortedKeys": "on"
7 | }
8 | }
9 | },
10 | "files": {
11 | "includes": ["**"]
12 | },
13 | "formatter": {
14 | "enabled": true,
15 | "indentStyle": "space"
16 | },
17 | "json": {
18 | "formatter": {
19 | "enabled": true,
20 | "indentStyle": "space"
21 | }
22 | },
23 | "vcs": {
24 | "clientKind": "git",
25 | "enabled": true,
26 | "useIgnoreFile": true
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/playbooks/tasks/gh-set-env-secrets.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Create github environment
3 | ansible.builtin.shell: >
4 | gh api --method PUT -H "Accept: application/vnd.github+json" repos/{{ repo }}/environments/{{ env.env_name }}
5 | changed_when: true
6 | loop: "{{ env_secrets | dict2items(key_name='env_name', value_name='env_secrets') }}"
7 | loop_control:
8 | loop_var: env
9 |
10 | - name: For each secret in environment
11 | ansible.builtin.include_tasks: gh-set-secret.yml
12 | loop: "{{ env_secrets | dict2items(key_name='env_name', value_name='env_secrets') }}"
13 | loop_control:
14 | loop_var: env
15 |
--------------------------------------------------------------------------------
/playbooks/host_vars/localhost.yml:
--------------------------------------------------------------------------------
1 | ---
2 | env_secrets:
3 | ack: # github environment name
4 | BOT_PAT: !vault |
5 | $ANSIBLE_VAULT;1.1;AES256
6 | 39316132336366663432646530316462373436646132633437363032613335626263616564663437
7 | 3739303161373964643437623663393663343666326564660a633637383631386433373633383736
8 | 38656339306634363737656530333531313638313865666232306238626230326365373063363133
9 | 6464643330333332320a633631663661616532356262363034386664316339306463306264393636
10 | 37386663626565386362626133623538343264353363613164373662306335343038376237313566
11 | 6564643761366534643538666532386339353238656237313532
12 |
--------------------------------------------------------------------------------
/.yamllint:
--------------------------------------------------------------------------------
1 | ---
2 | extends: default
3 | rules:
4 | braces:
5 | max-spaces-inside: 1
6 | comments:
7 | # prettier compatibility
8 | min-spaces-from-content: 1
9 | # caused false-positives with file sequence items being commented
10 | # https://github.com/adrienverge/yamllint/issues/384
11 | comments-indentation: disable
12 | # ansible standards do require --- prefix
13 | document-start: disable
14 | # we need to decide if we want to impose one or not
15 | line-length: disable
16 | octal-values:
17 | forbid-implicit-octal: true
18 | forbid-explicit-octal: true
19 | truthy:
20 | # "on:" is perfectly valid key on github actions:
21 | check-keys: false
22 |
--------------------------------------------------------------------------------
/playbooks/deploy-bot-pat.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Deploy BOT_PAT secret
3 | hosts: localhost
4 | gather_facts: false
5 | tasks:
6 | - name: Load repositories
7 | ansible.builtin.include_vars:
8 | file: ../config/devtools.yml
9 | name: devtools
10 |
11 | - name: Display info
12 | ansible.builtin.debug:
13 | msg: "{{ item }}"
14 | loop: "{{ env_secrets | dict2items(key_name='env_name', value_name='env_secrets') }}"
15 |
16 | - name: Loop over repositories
17 | ansible.builtin.include_tasks: tasks/gh-set-env-secrets.yml
18 | loop: "{{ devtools.repos }}"
19 | loop_control:
20 | label: "{{ repo }}"
21 | loop_var: repo
22 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "[json]": {
3 | "editor.defaultFormatter": "biomejs.biome"
4 | },
5 | "[markdown]": {
6 | "editor.defaultFormatter": "biomejs.biome"
7 | },
8 | "[python]": {
9 | "editor.codeActionsOnSave": {
10 | "source.fixAll": "explicit",
11 | "source.organizeImports": "explicit"
12 | },
13 | "editor.defaultFormatter": "charliermarsh.ruff",
14 | "editor.formatOnSave": true
15 | },
16 | "[toml]": {
17 | "editor.defaultFormatter": "tombi-toml.tombi"
18 | },
19 | "editor.codeActionsOnSave": {
20 | "source.action.useSortedKeys.biome": "explicit",
21 | "source.fixAll.biome": "always",
22 | "source.organizeImports.biome": "explicit"
23 | },
24 | "editor.defaultFormatter": "biomejs.biome",
25 | "editor.formatOnSave": true
26 | }
27 |
--------------------------------------------------------------------------------
/.github/workflows/ah_token_refresh.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Refresh the automation hub token
3 | on:
4 | workflow_call:
5 | inputs:
6 | environment:
7 | description: The deployment environment
8 | required: true
9 | type: string
10 | secrets:
11 | ah_token:
12 | required: true
13 |
14 | jobs:
15 | refresh:
16 | runs-on: ubuntu-24.04
17 | environment: ${{ inputs.environment }}
18 | steps:
19 | - name: Refresh the automation hub token
20 | run: >-
21 | curl https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token
22 | -d grant_type=refresh_token
23 | -d client_id=cloud-services
24 | -d refresh_token="${{ secrets.ah_token }}"
25 | --fail --silent --show-error --output /dev/null
26 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: 2
3 |
4 | mkdocs:
5 | fail_on_warning: true
6 | configuration: mkdocs.yml
7 |
8 | build:
9 | os: ubuntu-24.04
10 | tools:
11 | python: "3.13"
12 | commands:
13 | - curl https://mise.run | sh
14 | - ~/.local/bin/mise settings experimental=true
15 | - ~/.local/bin/mise settings python.compile=false
16 | - ~/.local/bin/mise settings python.uv_venv_auto=true
17 | - ~/.local/bin/mise trust
18 | - ~/.local/bin/mise install
19 | - PATH=~/.local/share/mise/shims:$PATH ~/.local/bin/mise doctor || true
20 | - PATH=~/.local/share/mise/shims:$PATH ~/.local/bin/mise exec --command "python3 -m pip install tox"
21 | - PATH=~/.local/share/mise/shims:$PATH ~/.local/bin/mise exec --command "python3 -m tox -e docs"
22 | submodules:
23 | include: all
24 | recursive: true
25 |
--------------------------------------------------------------------------------
/docs/guides/vscode/use-cases.md:
--------------------------------------------------------------------------------
1 | WIP work to codify various use-cases for vscode-ansible extension.
2 |
3 | - Host OS:
4 | - Linux (`L`)
5 | - MacOS (`M`)
6 | - Windows (`W`)
7 | - Execution
8 | - Native (`N`) -- ansible installed locally, likely inside a virtualenv
9 | - Podman (`P`) - community-ansible-dev-tools image
10 | - Docker (`C`) - community-ansible-dev-tools image
11 |
12 | ## Cases
13 |
14 | - If user opens project on Windows without using WSL, we need to display error that LS is in broken state
15 | - If `ansible`, `docker` and `podman` are all 3 missing, we need to display visible error that LS is in broken state
16 | - We need a status/support page that reports current detected configuration, with info like
17 | - local ansible and ansible-lint, path, venv or not, versions
18 | - podman status: server, container version and tag, container sanity check
19 | - docker status: same as podman
20 |
--------------------------------------------------------------------------------
/playbooks/sync.yml:
--------------------------------------------------------------------------------
1 | ---
2 | - name: Checkout and Update DevTools project source code
3 | hosts: localhost
4 | connection: local
5 | gather_facts: false
6 | vars:
7 | repos:
8 | - name: ansible-lint
9 | repo: ansible/ansible-lint
10 | - name: ansible-compat
11 | repo: ansible/ansible-compat
12 | - name: ansible-navigator
13 | repo: ansible/ansible-navigator
14 | tasks:
15 | ## - name: Load project configuration
16 | # ansible.builtin.include_vars:
17 | # file: ../config/devtools.yml
18 | # name: devtools
19 | - name: Clone projects # noqa: latest[git]
20 | loop: "{{ repos }}"
21 | loop_control:
22 | label: "{{ item.name }}"
23 | ansible.builtin.git:
24 | repo: https://github.com/{{ item.repo }}
25 | dest: "{{ playbook_dir }}/../.cache/{{ item.name }}"
26 | single_branch: true
27 | # version: HEAD
28 | update: true
29 | # track_submodules: true
30 | # recursive: true
31 |
--------------------------------------------------------------------------------
/docs/images/logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
8 |
--------------------------------------------------------------------------------
/.github/workflows/release_collection.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Release the ansible collection
3 | on:
4 | workflow_call:
5 | inputs:
6 | ah_publish:
7 | description: Publish the collection on automation hub
8 | default: true
9 | type: boolean
10 | environment:
11 | description: The deployment environment
12 | required: true
13 | type: string
14 | galaxy_publish:
15 | default: true
16 | description: Publish the collection on galaxy
17 | type: boolean
18 | secrets:
19 | ah_token:
20 | required: false
21 | ansible_galaxy_api_key:
22 | required: false
23 |
24 | jobs:
25 | release_automation_hub:
26 | uses: ansible/team-devtools/.github/workflows/release_ah.yml@main
27 | with:
28 | environment: release
29 | secrets:
30 | ah_token: ${{ secrets.ah_token }}
31 |
32 | release_galaxy:
33 | uses: ansible/team-devtools/.github/workflows/release_galaxy.yml@main
34 | needs: [release_automation_hub]
35 | with:
36 | environment: release
37 | secrets:
38 | ansible_galaxy_api_key: ${{ secrets.ansible_galaxy_api_key }}
39 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Community managed Ansible repositories
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.github/workflows/release_galaxy.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Galaxy Release
3 | on:
4 | workflow_call:
5 | inputs:
6 | environment:
7 | description: The deployment environment
8 | required: true
9 | type: string
10 | galaxy_publish:
11 | default: true
12 | description: Publish the collection on galaxy
13 | type: boolean
14 | secrets:
15 | ansible_galaxy_api_key:
16 | required: false
17 |
18 | jobs:
19 | release:
20 | runs-on: ubuntu-24.04
21 | environment: ${{ inputs.environment }}
22 | steps:
23 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
24 |
25 | - name: Build the collection
26 | run: |
27 | ansible-galaxy collection build -v --force
28 |
29 | - name: Publish the collection on Galaxy
30 | if: ${{ inputs.galaxy_publish }}
31 | run: |
32 | [[ "${{ secrets.ansible_galaxy_api_key != '' }}" ]] || { echo "ansible_galaxy_api_key is required to publish on galaxy" ; exit 1; }
33 | TARBALL=$(ls -1 ./*.tar.gz)
34 | ansible-galaxy collection publish "${TARBALL}" --api-key "${{ secrets.ansible_galaxy_api_key }}"
35 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # It is not currently possible to use trusted publishing with a shared workflow
3 | # https://github.com/pypi/warehouse/blob/main/docs/user/trusted-publishers/troubleshooting.md#reusable-workflows-on-github
4 | # this file is for reference only now
5 |
6 | name: release
7 |
8 | on:
9 | workflow_call:
10 | jobs:
11 | release:
12 | name: release ${{ github.event.ref }}
13 | environment: release
14 | runs-on: ubuntu-24.04
15 | permissions:
16 | id-token: write
17 |
18 | env:
19 | FORCE_COLOR: 1
20 | PY_COLORS: 1
21 | TOX_PARALLEL_NO_SPINNER: 1
22 |
23 | steps:
24 | - name: Set up Python
25 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6
26 | with:
27 | python-version: "3.14"
28 |
29 | - name: Install tox
30 | run: python3 -m pip install --user "tox>=4.0.0"
31 |
32 | - name: Check out src from Git
33 | uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
34 | with:
35 | fetch-depth: 0 # needed by setuptools-scm
36 |
37 | - name: Build dists
38 | run: python3 -m tox -e pkg
39 | - name: Publish to pypi.org
40 | uses: pypa/gh-action-pypi-publish@release/v1
41 |
--------------------------------------------------------------------------------
/.github/workflows/push.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # push workflow is shared and expected to perform actions after a merge happens
3 | # on a maintenance branch (default or release). For example updating the
4 | # draft release-notes.
5 | name: push
6 |
7 | permissions:
8 | contents: write
9 | pull-requests: read
10 |
11 | on:
12 | workflow_call: # allows reuse of this workflow from other devtools repos
13 |
14 | jobs:
15 | update_release_draft:
16 | runs-on: ubuntu-24.04
17 | steps:
18 | - uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5 # v6
19 | env:
20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
21 |
22 | - name: Send CI failure notification
23 | if: failure() && github.ref == 'refs/heads/main'
24 | env:
25 | SLACK_WEBHOOK_URL: ${{ secrets.DEVTOOLS_CI_SLACK_URL }}
26 | run: |
27 | if [ -n "$SLACK_WEBHOOK_URL" ]; then
28 | MESSAGE_ID="ci-failure-${{ github.run_id }}-$(date +%s)"
29 | curl -X POST -H 'Content-type: application/json' \
30 | --data "{\"text\":\" Release/CI workflow failed on main branch for '${{ github.repository }}' (push.yml). Check logs: '${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}\", \"ts\":\"$MESSAGE_ID\"}" \
31 | $SLACK_WEBHOOK_URL
32 | fi
33 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: tox
3 | on:
4 | merge_group:
5 | branches:
6 | - main
7 | push:
8 | branches: # any integration branch but not tag
9 | - main
10 | pull_request:
11 | branches:
12 | - main
13 | schedule:
14 | - cron: "0 0 * * *"
15 | jobs:
16 | test:
17 | # tests reusable tox workflow
18 | uses: ./.github/workflows/tox.yml
19 | with:
20 | default_python: "3.13"
21 | max_python: "3.14"
22 | min_python: "3.13"
23 | run_post: echo 'Running post'
24 | run_pre: echo 'Running pre'
25 | other_names: |
26 | docs
27 | lint
28 | py-mise-macos:tox -e py:runner=macos-15;mise=true
29 | py-mise-ubuntu:tox -e py:runner=ubuntu-24.04;mise=true
30 | py313-py314-macos:tox -e py313,py314:runner=macos-15
31 | platforms: linux
32 | node-version-file: .tool-versions
33 | secrets: inherit
34 |
35 | check: # This job does nothing and is only used for the branch protection
36 | if: always()
37 | needs:
38 | - test
39 | runs-on: ubuntu-24.04
40 | steps:
41 | - name: Decide whether the needed jobs succeeded or failed
42 | uses: re-actors/alls-green@release/v1
43 | with:
44 | jobs: ${{ toJSON(needs) }}
45 | push:
46 | if: github.event_name == 'push'
47 | uses: ./.github/workflows/push.yml
48 | secrets: inherit
49 |
--------------------------------------------------------------------------------
/docs/guides/python/release.md:
--------------------------------------------------------------------------------
1 | # Release process for python packages
2 |
3 | Thanks to the integrations which are in place, it is very easy to release a newer version of our python projects i.e. [ansible-lint], [ansible-navigator], [ansible-compat] and [molecule].
4 |
5 | [ansible-lint]: https://github.com/ansible/ansible-lint
6 | [ansible-navigator]: https://github.com/ansible/ansible-navigator
7 | [ansible-compat]: https://github.com/ansible/ansible-compat
8 | [molecule]: https://github.com/ansible-community/molecule
9 |
10 | Anyone with appropriate permission to the respective projects will be able to rollout a new release.
11 |
12 | If we go to the Github **Releases** section, we'll be able to see something like:
13 |
14 | 
15 |
16 | This shows the upcoming version with changes to be included in the next release.
17 |
18 | We use [release-drafter](https://github.com/release-drafter/release-drafter) which helps us to get the changelog for the releases.
19 | When a PR is merged, release-drafter runs and adds PR details to the changelog as shown above.
20 |
21 | # Releasing a new version
22 |
23 | Go to `Draft a new release` inside Github releases and release it! while being sure that you create a discussion thread for it. This will create a new tag.
24 | Once released, the latest version will be deployed and published to PyPI registry.
25 |
26 | This release process is applicable to all our python based projects.
27 |
--------------------------------------------------------------------------------
/test/test_repos.py:
--------------------------------------------------------------------------------
1 | """Test module for the package."""
2 |
3 | import logging
4 | import os
5 | from collections.abc import Generator
6 | from subprocess import run
7 |
8 | import pytest
9 | from _pytest.mark.structures import ParameterSet
10 |
11 | from team_devtools import Label, Repo, get_labels, get_repos
12 |
13 |
14 | LOGGER = logging.getLogger(__name__)
15 | CI = False
16 | if "CI" in os.environ and os.environ["CI"] in ("true", "1"):
17 | CI = True
18 |
19 |
20 | def each_repo_label() -> Generator[ParameterSet, None, None]:
21 | """Generate a parameter set for each repo and label.
22 |
23 | Yields:
24 | ParameterSet: A parameter set for each repo and label.
25 | """
26 | for repo in get_repos():
27 | for label in get_labels():
28 | yield pytest.param(repo, label, id=f"{repo.repo}:{label.name}")
29 |
30 |
31 | @pytest.mark.parametrize(("repo", "label"), each_repo_label())
32 | @pytest.mark.skipif(bool(os.environ.get("CI", "")), reason="Not running in CI yet.")
33 | def test_label(repo: Repo, label: Label) -> None:
34 | """Reconfigure label inside a repo."""
35 | assert label.name
36 | assert label.color, f"Label {label.name} does not have a color"
37 | result = run( # noqa: S602
38 | f"gh label create {label.name} --color {label.color.strip('#')} --force --repo {repo.name}",
39 | shell=True,
40 | check=False,
41 | capture_output=True,
42 | text=True,
43 | )
44 | assert result.returncode == 0, result
45 |
--------------------------------------------------------------------------------
/.github/workflows/release_ah.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Automation Hub Release
3 | on:
4 | workflow_call:
5 | inputs:
6 | ah_publish:
7 | description: Publish the collection on automation hub
8 | default: true
9 | type: boolean
10 | environment:
11 | description: The deployment environment
12 | required: true
13 | type: string
14 | secrets:
15 | ah_token:
16 | required: false
17 |
18 | jobs:
19 | release:
20 | runs-on: ubuntu-24.04
21 | environment: ${{ inputs.environment }}
22 | steps:
23 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
24 |
25 | - name: Build the collection
26 | run: |
27 | ansible-galaxy collection build -v --force
28 |
29 | - name: Publish the collection on Automation Hub
30 | if: ${{ inputs.ah_publish }}
31 | run: |
32 | [[ "${{ secrets.ah_token != '' }}" ]] || { echo "ah_token is required to publish on automation hub" ; exit 1; }
33 | TARBALL=$(ls -1 ./*.tar.gz)
34 | cat << EOF > ansible.cfg
35 | [galaxy]
36 | server_list = rh_automation_hub
37 | [galaxy_server.rh_automation_hub]
38 | url=https://cloud.redhat.com/api/automation-hub/
39 | auth_url=https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token
40 | token=${{ secrets.ah_token }}
41 | EOF
42 | ansible-galaxy collection publish "${TARBALL}"
43 | rm ansible.cfg
44 |
--------------------------------------------------------------------------------
/docs/guides/ansible/permissions.md:
--------------------------------------------------------------------------------
1 | # Ansible filesystem requirements
2 |
3 | For security reasons Ansible will refuse to load any configuration files from **world-writable filesystems** (`o+w`). You can find more details on [#42388](https://github.com/ansible/ansible/issues/42388)
4 |
5 | ## WSL on Windows
6 |
7 | Both WSL1 and WSL2 default mounting of Windows drive on Linux uses insecure file permissions, so you will not be able to run Ansible correctly from paths like `/mnt/c`. Still, if Ansible is installed on linux partition, it will work.
8 |
9 | You can also try to reconfigure `/etc/wsl.conf` to ensure that your Windows mounts do have UNIX compatible permissions that are not insecure. For example our team is using a configuration like below for testing:
10 |
11 | ```ini
12 | # /etc/wsl.conf
13 | [automount]
14 | enabled = true
15 | root = /
16 | options = "metadata,umask=077"
17 | [interop]
18 | enabled = false
19 | appendWindowsPath = false
20 | [network]
21 | hostname = wsl
22 | ```
23 |
24 | To test that your changes are working correctly, just to a `ls -la /mnt/c` and check if `o+w` is still present or not. You can even try to remove the write permissions for others from a file in order to see if chmod works on that particular drive: `chmod o-w filename`.
25 |
26 | ### Performance
27 |
28 | Filesystem operations from Windows mount under WSL are **very slow** on both versions of WSL so we strongly recommend you to avoid using them.
29 |
30 | On some versions of Windows there were even bugs causing system instability and kernel panic when a lot of activity happened on these. We hope that these were addressed but you need to aware of them.
31 |
--------------------------------------------------------------------------------
/docs/stats/molecule-plugins.md:
--------------------------------------------------------------------------------
1 | # Molecule Plugins in 2023
2 |
3 | As part of the effort to revive molecule project some drastic measures were put in place.
4 |
5 | Unmaintained drivers archived between January 5th-8th, 2023: openstack, goss, libvirt, alicloud, vmware, digitalocean, kubevirt, lxd, inspec. Anyone wanting to **revive and maintain** one of these will have to make a pull request against molecule-plugins repository.
6 |
7 | The relatively active drivers were moved to [molecule-plugins](https://github.com/ansible-community/molecule-plugins) repository, where we expected them to be maintained by the community.
8 |
9 | ### Which drivers should we still see in addition to delegated?
10 |
11 | - azure
12 | - docker
13 | - containers
14 | - podman
15 | - ec2
16 | - gce
17 | - vagrant
18 |
19 | ### How did we measure the driver popularity?
20 |
21 | With a simple [sourcegraph](https://sourcegraph.com/search?q=context:global+file:molecule.yml+content:%22name:+.*containers%22&patternType=regexp&sm=0) search, we can get an use count for public repos. Here are some of the results:
22 |
23 | ```mermaid
24 | pie title Popularity of molecule drivers on github
25 | "docker" : 3700
26 | "vagrant" : 1300
27 | "delegated": 944
28 | "ec2": 272
29 | "gce": 269
30 | "podman": 189
31 | "openstack": 49
32 | "azure": 20
33 | "containers": 19
34 | "vmware": 11
35 | "lxd": 2
36 | ```
37 |
38 | ## What if I used a driver that was not migrated?
39 |
40 | If you are using one of the drivers that was not migrated and you want to maintain it, you can start by creating a pull-request towards molecule-plugins repository to add its code there.
41 |
--------------------------------------------------------------------------------
/src/team_devtools/__init__.py:
--------------------------------------------------------------------------------
1 | """Team DevTools package."""
2 |
3 | try:
4 | from ._version import __version__
5 | except ImportError: # pragma: no cover
6 | __version__ = "unknown"
7 | from collections.abc import Generator
8 | from dataclasses import dataclass
9 | from pathlib import Path
10 |
11 | import yaml
12 |
13 |
14 | @dataclass
15 | class Repo:
16 | """Repository class."""
17 |
18 | name: str
19 | org: str = ""
20 | repo: str = ""
21 |
22 | def __post_init__(self) -> None:
23 | """Post init.
24 |
25 | Raises:
26 | ValueError: If the repo name is invalid.
27 | """
28 | if "/" not in self.name:
29 | msg = f"Invalid repo name: {self.name}"
30 | raise ValueError(msg)
31 | self.org, self.repo = self.name.split("/", 1)
32 |
33 |
34 | @dataclass
35 | class Label:
36 | """Label class."""
37 |
38 | name: str
39 | color: str = ""
40 | description: str = ""
41 |
42 |
43 | def get_repos() -> Generator[Repo]:
44 | """Get all repos.
45 |
46 | Yields:
47 | Repo: A repository.
48 | """
49 | with Path("config/devtools.yml").open(encoding="utf-8") as file:
50 | data = yaml.safe_load(file)
51 | for repo in data["repos"]:
52 | if "/" in repo:
53 | yield Repo(name=repo)
54 |
55 |
56 | def get_labels() -> Generator[Label]:
57 | """Get all labels.
58 |
59 | Yields:
60 | Label: A label.
61 | """
62 | with Path("config/devtools.yml").open(encoding="utf-8") as file:
63 | data = yaml.safe_load(file)
64 | for k, v in data["labels"].items():
65 | yield Label(name=k, **v)
66 |
--------------------------------------------------------------------------------
/test/test_forum_post.py:
--------------------------------------------------------------------------------
1 | """Module for testing forum_post.py."""
2 |
3 | from __future__ import annotations
4 |
5 | import importlib.util
6 | import sys
7 |
8 | import pytest
9 |
10 |
11 | spec = importlib.util.spec_from_file_location("forum_post", ".github/workflows/forum_post.py")
12 | forum_post = importlib.util.module_from_spec(spec)
13 | sys.modules["forum_post"] = forum_post
14 | spec.loader.exec_module(forum_post)
15 |
16 |
17 | @pytest.fixture
18 | def post_instance() -> forum_post.Post:
19 | """A Post instance for testing."""
20 | post = forum_post.Post(
21 | project="ansible/molecule",
22 | release="v25.3.1",
23 | forum_api_key="unused",
24 | forum_user="unused",
25 | )
26 | # prefill values that need network access.
27 | post.category_id = "18"
28 | post.created = "2025-02-19T12:53:51Z"
29 | post.release_notes = (
30 | "## Bugfixes\r\n\r\n- Fix molecule matrix with no scenario name. (#4400) @Qalthos\r\n"
31 | )
32 | return post
33 |
34 |
35 | def test_prepare_post(post_instance: forum_post.Post) -> None:
36 | """Test that discourse post payload is generated correctly."""
37 | payload = post_instance._prepare_post() # noqa: SLF001
38 | release_notes = forum_post.POST_MD.format(
39 | project_short="molecule",
40 | release=post_instance.release,
41 | release_notes=post_instance.release_notes,
42 | )
43 | assert payload == {
44 | "title": "Release Announcement: molecule v25.3.1",
45 | "raw": release_notes,
46 | "category": post_instance.category_id,
47 | "created_at": post_instance.created,
48 | "tags": ["devtools", "release", "molecule"],
49 | }
50 |
--------------------------------------------------------------------------------
/docs/guides/gha.md:
--------------------------------------------------------------------------------
1 | # Github Actions
2 |
3 | - workflow name should be **short** and **lowercase** and match the name of the workflow file
4 | - do not use a workflow name like "ci" because all workflows are related to CI one way or another so they would not be informative
5 |
6 | ## Common workflow names
7 |
8 | - `tox` : testing jobs that rely on tox, job names should match `tox -e xxx`
9 | - `npm` : testing jobs that reply on npm, job name should match `npm run xxx`
10 | - `ack` : shared workflow that is triggered on any pull-request review, like editing labels
11 | - `push` : shared workflow that is triggered on a merge, usually updates release note drafts
12 | - `release` : workflow that is triggered when a release is made
13 |
14 | ## Use a final `check` job to summarize results
15 |
16 | All GHA pipelines should use a finalizing job named `check` that depends on the other ones. This allows us to make only this job as required in branch protection, thus lowering the manual maintenance of branch protection.
17 |
18 | The current spinnet to use is documented below. It looks bit more complex because we use a special action developed by @webknjaz for checking the result of dependent jobs and avoiding a known bug with GHA that can cause skipped jobs to not set the build status to failed.
19 |
20 | ```yaml
21 | jobs:
22 | ...
23 | check: # This job does nothing and is only used for the branch protection
24 | if: always()
25 |
26 | needs:
27 | - linters
28 | - unit
29 |
30 | runs-on:ubuntu-24.04
31 |
32 | steps:
33 | - name: Decide whether the needed jobs succeeded or failed
34 | uses: re-actors/alls-green@release/v1
35 | with:
36 | jobs: ${{ toJSON(needs) }}
37 | ```
38 |
--------------------------------------------------------------------------------
/AGENTS.md:
--------------------------------------------------------------------------------
1 | ## Static checks
2 |
3 | ## Agents
4 |
5 | - `.vscode/extensions.json` file must:
6 | - have `tombi-toml.tombi` entry inside `recommendations` list
7 | - `panekj.even-betterer-toml` must not be listed inside `recommendations` list
8 | - `.vscode/settings.json` file must:
9 | - contain `[toml]` entry with `"editor.defaultFormatter": "tombi-toml.tombi"` inside
10 | - file `.taplo.toml` must be removed if present
11 | - `.pre-commit-config.yaml` file must:
12 | - not not have a hook that uses `repo: https://github.com/pappasam/toml-sort`. If present the entire entry for this hook must be removed
13 | - have a hook that uses https://github.com/tombi-toml/tombi-pre-commit that looks like below:
14 |
15 | ````yaml
16 | - repo: https://github.com/tombi-toml/tombi-pre-commit
17 | rev: v0.6.17
18 | hooks:
19 | - id: tombi-format
20 | alias: toml
21 | - id: tombi-lint
22 | alias: toml```
23 | ````
24 |
25 | Inside the same file the the root level key `ci` must contain a key `skip` that
26 | is a sequence that must contain `tombi-format` and `tombi-lint` entries at least.
27 |
28 | Ensure that running `pre-commit run all` is passing. Keep in mind that this
29 | might need to run more than once in case it does reformat some files.
30 |
31 |
32 | ### Commit messages
33 |
34 | - Commit messages should conform to [conventional commits][] standards
35 | - If a github ticket or a JIRA ticket is related or fixed by the commit, a
36 | clear line of the commit message should include `Fixes: #123` or
37 | `Related: AAP-123` in order to enable auto-linking and auto-closing of those.
38 | Keep in mind to not use URLs, these need to be plain text.
39 |
40 | [conventional commits]: https://www.conventionalcommits.org
41 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": ["github>ansible/actions//config/renovate.json"],
4 | "packageRules": [
5 | {
6 | "allowedVersions": "<2.17",
7 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
8 | "matchPackageNames": ["ansible-core"]
9 | },
10 | {
11 | "allowedVersions": "<1.16",
12 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
13 | "matchPackageNames": ["cffi"]
14 | },
15 | {
16 | "allowedVersions": "<4.3",
17 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
18 | "matchPackageNames": ["django"]
19 | },
20 | {
21 | "allowedVersions": "<6.1",
22 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
23 | "matchPackageNames": ["importlib-metadata"]
24 | },
25 | {
26 | "allowedVersions": "<4.22",
27 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
28 | "matchPackageNames": ["jsonschema"]
29 | },
30 | {
31 | "allowedVersions": "<25.0",
32 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
33 | "matchPackageNames": ["packaging"]
34 | },
35 | {
36 | "allowedVersions": "<0.5.3",
37 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
38 | "matchPackageNames": ["python-gnupg"]
39 | },
40 | {
41 | "allowedVersions": "<65.6",
42 | "description": "Platform compatibility constraint from .config/platform-constraints.txt",
43 | "matchPackageNames": ["setuptools"]
44 | }
45 | ]
46 | }
47 |
--------------------------------------------------------------------------------
/assets/logos/logo-ansible-extension.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
18 |
--------------------------------------------------------------------------------
/docs/guides/python/packaging.md:
--------------------------------------------------------------------------------
1 | This document is work-in-progress. For the moment it contains only generic guidelines, most of them already used by our projects.
2 |
3 | - Ensure we use `src/` layout so we do not accidentally import the module without installation
4 | - Use PEP-517 and seek removal `setup.py`, test if via `tox -e pkg` command.
5 | - Use PEP-621 and seek removal of `setup.cfg` and `pyprojects.toml` files.
6 | - Make docs and test dependencies extras
7 | - Have a single `requirements.txt` that is a lock file produced by pip-compile (pip-tools)
8 | - Enable dependabot to update the `requirements.txt` lock file, optionally focus on a subset of dependencies and limit the maximum number of open pull-requests
9 | - Create tox `deps` job that is updating all dependencies using pip-compile.
10 |
11 | # tox -e pkg
12 |
13 | Use the template below when implementing the packaging. You will need to update the last two commands that are used to verify that what was installed is functional.
14 |
15 | ```ini
16 | [testenv:pkg]
17 | description =
18 | Build package, verify metadata, install package
19 | deps =
20 | build >= 0.7.0
21 | twine
22 | skip_install = true
23 | commands =
24 | # ensure dist/ folder is empty
25 | {envpython} -c 'import os.path, shutil, sys; \
26 | dist_dir = os.path.join("{toxinidir}", "dist"); \
27 | os.path.isdir(dist_dir) or sys.exit(0); \
28 | print("Removing \{!s\} contents...".format(dist_dir), file=sys.stderr); \
29 | shutil.rmtree(dist_dir)'
30 |
31 | # build wheel and sdist using PEP-517 (note that args
32 | {envpython} -m build \
33 | --outdir {toxinidir}/dist/ \
34 | {toxinidir}
35 |
36 | # Validate metadata using twine
37 | twine check --strict {toxinidir}/dist/*
38 |
39 | # Install the wheel
40 | sh -c "python3 -m pip install {toxinidir}/dist/*.whl"
41 |
42 | # Basic sanity check
43 | ansible-navigator --version
44 |
45 | # Uninstall package
46 | pip uninstall -y ansible-navigator
47 | ```
48 |
--------------------------------------------------------------------------------
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # Format and labels used by Ansible DevTools projects
3 | name-template: v$RESOLVED_VERSION
4 | tag-template: v$RESOLVED_VERSION
5 | # Use '-' instead of '*' for unordered list to match prettier behavior
6 | change-template: "- $TITLE (#$NUMBER) @$AUTHOR"
7 | categories:
8 | # Keep titles single worlds, or they become md bookmarks
9 | - title: Breaking
10 | labels:
11 | - breaking
12 | - title: Features
13 | labels:
14 | - feat # 006b75
15 | - title: Fixes
16 | labels:
17 | - docs
18 | - fix # fbca04
19 | - perf
20 | - refactor
21 | - title: Maintenance # fef2c0
22 | labels:
23 | - build
24 | - chore
25 | - ci
26 | - style
27 | - test
28 | replacers:
29 | # https://github.com/release-drafter/release-drafter/issues/569#issuecomment-645942909
30 | - search: /(?:and )?@(pre-commit-ci|dependabot|renovate|konflux-internal-p02)(?:\[bot\])?,?/g
31 | replace: ""
32 | version-resolver:
33 | # some projects are using SemVer, so keep 'major' label for major.
34 | minor:
35 | labels:
36 | - breaking
37 | - feat
38 | patch:
39 | labels:
40 | - docs
41 | - fix
42 | - perf
43 | - refactor
44 | default: patch
45 | exclude-contributors:
46 | - dependabot
47 | - dependabot[bot]
48 | - github-actions
49 | - github-actions[bot]
50 | - konflux-internal-p02
51 | - konflux-internal-p02[bot]
52 | - pre-commit-ci
53 | - pre-commit-ci[bot]
54 | - renovate
55 | - renovate[bot]
56 | autolabeler:
57 | - label: breaking
58 | title: # "!" determines a breaking change
59 | - "^[a-z]+(\\([^)]+\\))?!:"
60 | body:
61 | - "/^BREAKING CHANGE: /i"
62 | - label: feat
63 | title:
64 | - /^(feat)/i
65 | - label: fix
66 | title:
67 | - /^(fix)/i
68 | - label: chore
69 | title:
70 | - /pre-commit autoupdate/
71 | - /^(chore|build|ci|test|style)/i
72 | template: |
73 | $CHANGES
74 |
--------------------------------------------------------------------------------
/docs/guides/code-reviews.md:
--------------------------------------------------------------------------------
1 | Whenever you are on reviewed or reviewer side, be gentle and be sure you read the articles mentioned below:
2 |
3 | 1. https://mtlynch.io/code-review-love/
4 | 2. https://mtlynch.io/human-code-reviews-1/
5 | 3. https://mtlynch.io/human-code-reviews-2/
6 |
7 | ## Coverage
8 |
9 | A thumb rule regarding code coverage is that any incoming change should not lower the current coverage level. Codecov is expected to prevent us from merging changes that will lower the coverage level.
10 |
11 | - [](https://codecov.io/github/ansible/ansible-lint) ansible-lint
12 |
13 | - [](https://codecov.io/github/ansible/ansible-compat) ansible-compat
14 |
15 | - [](https://codecov.io/github/ansible/ansible-navigator) ansible-navigator
16 | - [](https://codecov.io/github/ansible/molecule) molecule
17 |
18 | - [](https://codecov.io/github/ansible/ansible-dev-environment) ansible-dev-environment
19 |
20 | - [](https://codecov.io/github/ansible/ansible-creator) ansible-creator
21 |
22 | - [](https://codecov.io/github/ansible/ansible-language-server) ansible-language-server
23 |
24 | - [](https://codecov.io/github/ansible/vscode-ansible) vscode-ansible
25 |
26 | - [](https://codecov.io/github/ansible/pytest-ansible) pytest-ansible
27 |
28 | - [](https://codecov.io/github/ansible/tox-ansible) tox-ansible
29 |
--------------------------------------------------------------------------------
/docs/guides/vscode/release.md:
--------------------------------------------------------------------------------
1 | ## Release cadence
2 |
3 | A pre-release will be done every time we do a stable release.
4 | This will ensure VSCode extension `auto-update` functionality doesn't break.
5 | Refer vscode [docs](https://code.visualstudio.com/api/working-with-extensions/publishing-extension#prerelease-extensions) for pre-release and release versions.
6 |
7 | ## Release steps
8 |
9 | These steps are currently manual but we should consider automating, at least some of them in the future.
10 |
11 | Assuming that the next version is `0.8.1`:
12 |
13 | 1. `git checkout -b release/0.8.1`
14 | 1. Copy draft release notes from github
15 | 1. Edit `CHANGELOG.md` and insert copied text
16 | 1. `npm version 0.8.1 --no-git-tag-version --no-commit-hooks`
17 | 1. `git commit -a -m "Release 0.8.1"`. If done correctly you should see 3 files modified, the changelog and the two package files.
18 | 1. `pre-commit run -a`
19 | 1. `gh pr create --draft --label skip-changelog`
20 | 1. Open pull request on github wait for it to pass. Merge it.
21 | 1. Go github releases and release it! while being sure that you create a discussion thread for it. This will create a \*_tag_.
22 | 1. Go to https://studio-jenkins-csb-codeready.apps.ocp-c1.prod.psi.redhat.com/job/ansible/job/vscode-ansible/ and login using IPA/kerberos password (that is **not** the pin+otp one) and requires you to be on corporate VPN. In case you forgot it, you can reset it using https://identity.corp.redhat.com/resetipa
23 | 1. trigger the effective publishing. Remember to check both publishing options as they are disabled by default.
24 | 1. Use the `Approve` button to approve the effective publishing. This can be found either in the live console last line or on the graphical pipeline view as a dialog.
25 |
26 | On average it takes 5-10 minutes for the uploaded new release to appear on marketplace but it takes up to 48h for users to receive it as vscode does not check for newer extensions even if you try to use the manual refresh button.
27 |
28 | ## Implementation details
29 |
30 | The first part of our jenkins release pipeline is defined at https://gitlab.cee.redhat.com/codeready-studio/cci-config/-/blob/master/jobs/ansible/vscode-ansible.groovy and the rest inside ./Jenkinsfile
31 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | ci:
3 | # We rely on renovate to update it but there is no way to fully disable it
4 | # https://github.com/pre-commit-ci/issues/issues/83
5 | autoupdate_schedule: quarterly
6 | skip:
7 | - ansible-lint
8 | - tombi-format
9 | - tombi-lint
10 | repos:
11 | - repo: meta
12 | hooks:
13 | - id: check-useless-excludes
14 | - repo: https://github.com/astral-sh/uv-pre-commit
15 | rev: 0.9.16
16 | hooks:
17 | - id: uv-sync
18 | - id: uv-lock
19 | name: "deps: upgrade via uv sync --upgrade"
20 | alias: deps
21 | args: ["--upgrade"]
22 | stages: [manual]
23 | - repo: https://github.com/biomejs/pre-commit
24 | rev: "v2.3.8"
25 | hooks:
26 | - id: biome-check
27 | name: biome
28 | alias: biome
29 | args: [--unsafe]
30 | - repo: https://github.com/codespell-project/codespell
31 | rev: v2.4.1
32 | hooks:
33 | - id: codespell
34 | additional_dependencies: ["tomli>=2.2.1; python_version<'3.11'"]
35 | - repo: https://github.com/pre-commit/pre-commit-hooks.git
36 | rev: v6.0.0
37 | hooks:
38 | - id: end-of-file-fixer
39 | - id: trailing-whitespace
40 | - id: mixed-line-ending
41 | - id: fix-byte-order-marker
42 | - id: check-executables-have-shebangs
43 | - id: check-merge-conflict
44 | - id: debug-statements
45 | language_version: python3
46 | - repo: https://github.com/tombi-toml/tombi-pre-commit
47 | rev: v0.7.3
48 | hooks:
49 | - id: tombi-format
50 | alias: toml
51 | - id: tombi-lint
52 | alias: toml
53 | - repo: https://github.com/astral-sh/ruff-pre-commit
54 | rev: v0.14.8
55 | hooks:
56 | - id: ruff-format
57 | alias: ruff
58 | - id: ruff-check
59 | args: ["--fix"]
60 | alias: ruff
61 | - repo: https://github.com/adrienverge/yamllint.git
62 | rev: v1.37.1
63 | hooks:
64 | - id: yamllint
65 | files: \.(yaml|yml)$
66 | types: [file, yaml]
67 | entry: yamllint --strict
68 | - repo: https://github.com/ansible/ansible-lint
69 | rev: v25.12.0
70 | hooks:
71 | - id: ansible-lint
72 | args: [--fix]
73 | # - repo: https://github.com/ansible/team-devtools
74 | # rev: main
75 | # hooks:
76 | # - id: check-platform-constraints
77 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | *.cover
42 | *.py,cover
43 | .cache
44 | .coverage
45 | .coverage.*
46 | .hypothesis/
47 | .nox/
48 | .pytest_cache/
49 | .tox/
50 | codecov
51 | codecov.SHA256SUM
52 | codecov.SHA256SUM.sig
53 | coverage.xml
54 | htmlcov/
55 | junit.xml
56 | nosetests.xml
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | db.sqlite3
66 | db.sqlite3-journal
67 |
68 | # Flask stuff:
69 | instance/
70 | .webassets-cache
71 |
72 | # Scrapy stuff:
73 | .scrapy
74 |
75 | # Sphinx documentation
76 | docs/_build/
77 |
78 | # PyBuilder
79 | target/
80 |
81 | # Jupyter Notebook
82 | .ipynb_checkpoints
83 |
84 | # IPython
85 | profile_default/
86 | ipython_config.py
87 |
88 | # pyenv
89 | .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
99 | __pypackages__/
100 |
101 | # Celery stuff
102 | celerybeat-schedule
103 | celerybeat.pid
104 |
105 | # SageMath parsed files
106 | *.sage.py
107 |
108 | # Environments
109 | .env
110 | .venv
111 | env/
112 | venv/
113 | ENV/
114 | env.bak/
115 | venv.bak/
116 |
117 | # Spyder project settings
118 | .spyderproject
119 | .spyproject
120 |
121 | # Rope project settings
122 | .ropeproject
123 |
124 | # mkdocs documentation
125 | /site
126 |
127 | # mypy
128 | .mypy_cache/
129 | .dmypy.json
130 | dmypy.json
131 |
132 | # Pyre type checker
133 | .pyre/
134 | .DS_Store
135 | .envrc
136 | .vault
137 |
138 | _readthedocs/
139 | src/team_devtools/_version.py
140 | .ansible
141 | coverage.lcov
142 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | ---
2 | site_name: Ansible DevTools
3 | site_url: https://ansible.readthedocs.io/projects/team-devtools/
4 | repo_url: https://github.com/ansible/team-devtools
5 | edit_uri: blob/main/docs/
6 | copyright: Copyright © Red Hat, Inc.
7 |
8 | theme:
9 | name: ansible
10 | features:
11 | - content.code.copy
12 | - content.action.edit
13 | - content.tooltips
14 | - navigation.expand
15 | exclude_docs: |
16 | _abbreviations.md
17 | extra:
18 | social:
19 | - icon: fontawesome/brands/github-alt
20 | link: https://github.com/ansible/team-devtools
21 | nav:
22 | - home: index.md
23 | - guides:
24 | - Code Reviews: guides/code-reviews.md
25 | - Manage Releases: guides/releases.md
26 | - Github Actions: guides/gha.md
27 | - Mentoring: guides/mentoring.md
28 | - CalVer Releases: guides/calver.md
29 | - Ansible:
30 | - Permissions: guides/ansible/permissions.md
31 | - Python:
32 | - Python Packaging: guides/python/packaging.md
33 | - Python Dependencies: guides/python/dependencies.md
34 | - PyTest: guides/python/pytest.md
35 | - Tox: guides/python/tox.md
36 | - Release: guides/python/release.md
37 | - VsCode:
38 | - Typescript: guides/vscode/use-cases.md
39 | - Release: guides/vscode/release.md
40 | - stats:
41 | - Molecule Plugins: stats/molecule-plugins.md
42 | - Repository Status: stats/repos.md
43 | # - ansible-language-server: '!import https://github.com/ansible/ansible-language-server?branch=main&multi_docs=False'
44 | # - projects: "*include .cache/*/mkdocs.yml"
45 | plugins:
46 | - autorefs
47 | - material/search
48 | - exclude:
49 | glob:
50 | - "*/changelog-fragments.d/*"
51 | - material/social
52 | # Disabled due to https://github.com/timvink/mkdocs-git-revision-date-localized-plugin/issues/89
53 | # - git-revision-date-localized:
54 | # enable_creation_date: false
55 | # fallback_to_build_date: true
56 | # exclude:
57 | # - index.md
58 |
59 | markdown_extensions:
60 | - abbr
61 | - attr_list
62 | - admonition
63 | - def_list
64 | - footnotes
65 | - pymdownx.highlight:
66 | anchor_linenums: true
67 | - pymdownx.inlinehilite
68 | - pymdownx.snippets:
69 | auto_append:
70 | - docs/_abbreviations.md
71 | check_paths: true
72 | - pymdownx.superfences
73 | - pymdownx.magiclink:
74 | repo_url_shortener: true
75 | repo_url_shorthand: true
76 | social_url_shorthand: true
77 | social_url_shortener: true
78 | user: facelessuser
79 | repo: pymdown-extensions
80 | normalize_issue_symbols: true
81 | - pymdownx.tabbed:
82 | alternate_style: true
83 | - pymdownx.tasklist:
84 | custom_checkbox: true
85 | - toc:
86 | # reduced to keep TOC nice under Changelog page
87 | toc_depth: 3
88 | permalink: true
89 | - pymdownx.superfences:
90 | custom_fences:
91 | - name: mermaid
92 | class: mermaid
93 | format: !!python/name:pymdownx.superfences.fence_code_format
94 |
--------------------------------------------------------------------------------
/docs/guides/mentoring.md:
--------------------------------------------------------------------------------
1 | # Ansible DevTools Open Source Mentorship
2 |
3 | **Ansible DevTools** team works almost exclusively on upstream projects, we do engage with other contributors every day. That is why, we are welcoming various periodic mentorship programs.
4 |
5 | ## For mentees
6 |
7 | If you want to learn better on how to improve your **open-source collaboration** and maybe become maintainer of one project, that program might help you achieve this much easier.
8 |
9 | The mentorship program should also help you improve collaboration with other open-source projects, especially with those from Python ecosystem.
10 |
11 | ### Checklist
12 |
13 | - [ ] Read and acknowledge our [CoC](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html). TDLD; just be nice and friendly!
14 | - [ ] Join `#ansible-devtools` [matrix or irc](https://docs.ansible.com/ansible/latest/community/communication.html#working-groups) channels using a permanent username, preferably the same as your GitHub account. Remain there as that would be the main medium of communication. Matrix keeps messages by default but if you choose the irc bridged room, you will need to use a bouncer to not miss messages while away.
15 | - [ ] You are welcomed to join or follow us on [twitter](https://twitter.com/i/communities/1530798265561714693), [reddit](https://www.reddit.com/r/ansible/) and engage with others. Socializing is an important skill to master, but also try to avoid overdoing it. Making your name known to others plays an important part in improving collaboration with them but it is a slow process.
16 | - [ ] Ask your mentor to setup a weekly meeting with you, likely 30 minutes.
17 | - [ ] Check time availability during the day and remember when it is more likely to be able to have quick chats with your mentor.
18 | - [ ] Keep an agenda of items you want to discuss with your mentor, as you are the one that drives the process, not him.
19 | - [ ] Decide on what you want to focus on. We recommend trying with very easy changes and slowly going for more complex.
20 | - [ ] You will see that often enough, you discover unrelated problems that need to be sorted before you can even address the one you were working on. You can either fix them yourself or nicely try to persuade someone else to fix them, often they are problems on some external dependencies.
21 |
22 | ### Suggestions
23 |
24 | - Prefer using the public channel unless the question is really sensitive, like a security or a personal one
25 | - If blocked trying to do something, ask for someone to help you with directions
26 | - If you fail to understand a particular CI/CD failure, ask about it
27 |
28 | ## For mentors
29 |
30 | - [ ] Maintain a recurrent meetings with your mentee and support them in their goals
31 | - [ ] Be available on chat, make use of public and private channels
32 | - [ ] Remember that each side is expected to learn from the other one and that the mentorship program is not about teaching someone that the way you do is the right way.
33 | - [ ] Welcome mentees to question anything, to see why things are one way or another and allow them to come with new ideas.
34 |
35 | ## Asking for mentorship
36 |
37 | - As mentoring is a personal experience, you will need to ask your potential mentor about it in a private message, chat or email.
38 | - Check if you have enough available time to invest in that. It would not be unreasonable to assume that at least **4h/week** would be needed for at least few months.
39 | - Be sure you mention what you aim to gain from that experience, what areas you want to focus on.
40 |
41 | At the moment this was written, both [`ssbarnea`](https://github.com/ssbarnea) and [`ganeshrn`](https://github.com/ganeshrn) may be able to accept a mentee. You should also post a message on https://github.com/ansible/team-devtools/discussions/81
42 |
--------------------------------------------------------------------------------
/.github/workflows/push_network.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # push workflow is shared and expected to perform actions after a merge happens
3 | # on a maintenance branch (default or release). For example updating the
4 | # draft release-notes.
5 | # based on great work from
6 | # https://github.com/T-Systems-MMS/ansible-collection-icinga-director
7 | name: push_network
8 |
9 | on:
10 | workflow_call:
11 | # https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#onworkflow_callinputs
12 | inputs:
13 | repo:
14 | description: Repository url
15 | required: true
16 | type: string
17 | secrets:
18 | BOT_PAT:
19 | description: Bot secret
20 | required: true
21 |
22 | jobs:
23 | update_release_draft:
24 | runs-on: ubuntu-24.04
25 | environment: push
26 | steps:
27 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
28 | with:
29 | repository: ${{ inputs.repo }}
30 | fetch-depth: 0
31 | token: ${{ secrets.BOT_PAT }}
32 |
33 | - name: Set up Python
34 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6
35 | with:
36 | python-version: "3.14"
37 |
38 | - name: Install antsibull-changelog, antsichaut
39 | run: >
40 | python3 -m pip install
41 | antsibull-changelog
42 | git+https://github.com/ansible-community/antsichaut.git
43 | pre-commit
44 | --disable-pip-version-check
45 |
46 | - name: Run release drafter
47 | id: release_drafter
48 | uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5 # v6
49 | env:
50 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
51 |
52 | - name: Remove the v prefix from the release drafter version
53 | run: |
54 | VERSION=${{ steps.release_drafter.outputs.tag_name }}
55 | echo "VERSION=${VERSION#v}" >> $GITHUB_ENV
56 |
57 | - name: Generate new version in changelog.yaml
58 | run: antsibull-changelog release -v --version "${{ env.VERSION }}"
59 |
60 | - name: Get previous tag
61 | id: previoustag
62 | uses: WyriHaximus/github-action-get-previous-tag@master
63 | env:
64 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
65 |
66 | ## this part is commented out to check push workflow
67 | ## ref - https://github.com/ansible-collections/cisco.nxos/pull/765
68 | # - name: Generate changelog.yaml
69 | # run: antsichaut
70 | # env:
71 | # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
72 | # SINCE_VERSION: "${{ steps.previoustag.outputs.tag }}"
73 |
74 | # - name: Update Changelog.rst
75 | # run: antsibull-changelog generate -v
76 |
77 | - name: Cleanup to use prettier
78 | run: pre-commit run prettier --all-files
79 | continue-on-error: true
80 |
81 | - name: Update the galaxy.yml version
82 | run: |
83 | sed -i -e 's/version:.*/version: ${{ env.VERSION }}/' galaxy.yml
84 |
85 | - name: Determine if a PR is necessary
86 | run: git diff --no-ext-diff --quiet --exit-code
87 | continue-on-error: true
88 | id: pr_check
89 |
90 | - name: Create PR for changelog
91 | run: |
92 | git config user.name "Ansible Bot"
93 | git config user.email devtools@ansible.com
94 | git checkout -t -b ${{ env.BRANCH_NAME }}
95 | git add .
96 | git commit -m "Changelog updated"
97 | git push origin ${{ env.BRANCH_NAME }}
98 | gh pr create --title "Changelog updated" --body "Changelog updated" --base main
99 | if: steps.pr_check.outcome == 'failure'
100 | env:
101 | GH_TOKEN: ${{ secrets.BOT_PAT }}
102 | BRANCH_NAME: chore-${{ github.workflow }}-${{ github.run_number }}
103 |
--------------------------------------------------------------------------------
/config/devtools.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # TODO: allow layered loading from other repos like dependabot config
3 | # Symlink this as ~/pre.yml and use https://github.com/pycontribs/gh-pre
4 | # This file documents repositories that are nurtured by Ansible DevTools team.
5 | # Not all these repos are fully managed or maintained by the team.
6 | # We aim to use this file when performing maintenance and audit.
7 | # to review open pull requests and issues.
8 | _version: 1
9 | # _extends: gh-org/gh-repo
10 | teams:
11 | devtools:
12 | # https://rover.redhat.com/groups/group/ansible-devtools (ldap)
13 | # https://github.com/orgs/ansible/teams/devtools/members (github)
14 | - alisonlhart
15 | - audgirka
16 | - qalthos
17 | - shatakshiiii
18 | - ssbarnea
19 | indirect:
20 | - KB-perByte # Sagar
21 | - ansibuddy
22 | - ariordan-redhat
23 | - cidrblock # Brad
24 | - ganeshrn
25 | - ruchip16
26 | - trishnaguha
27 | labels:
28 | AAP:
29 | color: "#F9D0C4"
30 | description: "Ansible Automation Platform"
31 | # official prefixes from: https://www.conventionalcommits.org
32 | fix:
33 | color: "#fbca04"
34 | description: "Patch a bug in the codebase"
35 | feat:
36 | color: "#c6476b"
37 | description: "Introduce a feature in the code base"
38 | breaking:
39 | color: "#FBCA04"
40 | description: "Breaking change"
41 | chore:
42 | description: "Maintenance tasks, include ones with chore, build, ci, docs, style, refactor, perf, test."
43 | color: "#EEEEEE"
44 | # other labels, no special meaning
45 | new:
46 | color: "#0E8A16"
47 | # upstream:
48 | # color: "#C239D6"
49 | # stale:
50 | # color: "#AAAAAA"
51 | # skip-changelog:
52 | # color: "#EEEEEE"
53 | # description: "Do not include in changelog"
54 | # keep:
55 | # color: "#000000"
56 | # description: "Do not close"
57 | repos:
58 | # repo key can be 'gh-org/gh-repo' or other simple, override url could be
59 | # configured using 'repo:' key.
60 | _default:
61 | # special meaning, all other keys will inherit attributes this entry
62 | group: primary
63 | # owners: is used to record SMEs, primary, backup, ...
64 | # ansible-community/molecule-plugins:
65 | # section: community
66 | ansible/actions:
67 | owners: [ssbarnea]
68 | section: experimental
69 | ansible/ansible-compat:
70 | owners: [qalthos, audgirka]
71 | section: primary
72 | ansible/ansible-content-actions:
73 | owners: [KB-perByte, ssbarnea]
74 | section: experimental
75 | # https://github.com/marketplace/actions/ansible-content-actions
76 | ansible/ansible-creator:
77 | owners: [audgirka, shatakshiiii]
78 | section: experimental
79 | ansible/ansible-dev-environment:
80 | owners: [qalthos, shatakshiiii]
81 | section: primary
82 | ansible/ansible-dev-tools:
83 | owners: [audgirka, shatakshiiii]
84 | section: primary
85 | # https://github.com/orgs/ansible/packages/container/community-ansible-dev-tools/settings
86 | ansible/ansible-lint:
87 | owners: [audgirka, qalthos]
88 | section: primary
89 | ansible/ansible-navigator:
90 | owners: [shatakshiiii]
91 | section: primary
92 | ansible/ansible-sign:
93 | owners: [ssbarnea]
94 | section: primary
95 | ansible/mkdocs-ansible:
96 | section: community
97 | ansible/molecule:
98 | owners: [qalthos, audgirka]
99 | ansible/pytest-ansible:
100 | owners: [shatakshiiii, qalthos]
101 | section: experimental
102 | ansible/team-devtools:
103 | owners: [ssbarnea]
104 | ansible/tox-ansible:
105 | owners: [shatakshiiii, qalthos]
106 | section: experimental
107 | ansible/vscode-ansible:
108 | owners: [alisonlhart, audgirka]
109 | ansible/ansible-contributor-metrics:
110 | owners: [ssbarnea]
111 | section: experimental
112 | pycontribs/subprocess-tee:
113 | section: community
114 | sections:
115 | community:
116 | deprecated:
117 | experimental:
118 | primary:
119 | # unmaintained, obsoleted or archived:
120 | # ansible-community/protogen
121 | # ansible-community/role2collection
122 | # ansible/ansible-language-server (now part vscode-ansible)
123 | # ansible/ansible-lint-action (now part of ansible-lint)
124 | # ansible/community-ansible-dev-tools-image (now part of ansible-dev-tools)
125 | # ansible/creator-ee (now part of ansible-dev-tools)
126 | # pycontribs/selinux
127 | # pytest-molecule
128 |
--------------------------------------------------------------------------------
/docs/guides/calver.md:
--------------------------------------------------------------------------------
1 | # CalVer Releases
2 |
3 | Devtools projects are released with [CalVer](https://calver.org/) scheme version numbers.
4 | The particular scheme we are using is `YY.MM.MICRO`, meaning that a release in
5 | March 2025 will be named `25.3.0`, and if a patch (ie, non-feature) release is
6 | required for that release, it will be named `25.3.1`, even if it is released in
7 | April. The month will not increment until a new version with features or other
8 | significant changes is released.
9 |
10 | Because of the date-based approach, feature releases of this sort happen on a
11 | regular cadence which depends on the project but is between every month and
12 | every three months.
13 |
14 | As this is currently a new scheme, care should be taken to ensure that releases
15 | increment the month and year properly. No release should increment the most
16 | significant section unless it takes place on a different year than the previous
17 | release, and a release that happens more than one month after the previous release
18 | should take care to increment the second most significant section of the version
19 | appropriately.
20 |
21 | Because this scheme eschews semantic meaning to the version numbers, extra care
22 | should be taken at release time that the changelog is complete and appropriately
23 | categorized.
24 |
25 | - Deprecation notices should be added to the changelog as soon as they are
26 | agreed upon. The deprecation cycle cannot start before the deprecation has
27 | been announced.
28 | - Changes should be properly categorized so that users can see at a glance what
29 | kind of changes are to be expected. Breaking changes should be clearly called
30 | out and announced well in advance to limit surprise, as version bounding is no
31 | longer possible.
32 |
33 | # Devtools Projects Transitioning to CalVer Releases
34 |
35 | Projects in the devtools space will be switching to [CalVer](https://calver.org/)
36 | releases over the next few weeks. This is a surprising change, so let's get into
37 | what that means.
38 |
39 | ## What
40 |
41 | These are the projects that will be transitioning to CalVer:
42 |
43 | - ansible-compat
44 | - ansible-creator
45 | - ansible-dev-environment
46 | - ansible-lint
47 | - ansible-navigator
48 | - molecule
49 | - pytest-ansible
50 | - tox-ansible
51 | - vscode-ansible
52 |
53 | We will use a `YY.MM.MICRO` version format. Thus, a release for March 2025 will be
54 | named `25.3.0`, and if a patch (ie, non-feature) release is required for that release,
55 | it will be named `25.3.1`, even if it is released in April, and the month will not
56 | increment until a new version with features or other significant changes is released.
57 |
58 | ## Why
59 |
60 | This is a bit of a change so let's go over what we hope to accomplish with it.
61 |
62 | - Predictable, transparent release cadence
63 |
64 | With this, we are committing to time-based releases for all projects.
65 | While the exact frequency will vary between projects, each will have a release
66 | between one and three months after the last feature release.
67 |
68 | - Version number indicates the age of a release
69 |
70 | With CalVer, the age of a release can be trivially determined from the version
71 | number, instead of having to look up the release notes as at present.
72 |
73 | - Easier to translate versions between tools
74 |
75 | Many of our tools are interrelated. A consistent version scheme allows one to
76 | have a good idea of how related but independent tools are expected to work together.
77 |
78 | ## How
79 |
80 | Following this announcement, the next feature release in each project will
81 | be a CalVer release.
82 |
83 | Feature releases will not happen more often than once a month, though
84 | patch releases may happen more often as needed. We will also make
85 | releases at least once every three months for each project.
86 |
87 | Releases will still split out changes by category, including new features,
88 | bugfixes, documentation updates, announced deprecations, and removed features.
89 |
90 | One of the things we are bringing with this change is an emphasis on fewer
91 | breaking changes / more emphasis on deprecation cycles and overlapping features.
92 | When something is deprecated, it will be called out in the release notes, along
93 | with replacement and how long the feature will remain in place.
94 |
95 | ## What's Next
96 |
97 | As mentioned, this change will begin rolling out over the next few weeks. However,
98 | if you have any comments or concerns, please let us know.
99 |
--------------------------------------------------------------------------------
/.github/workflows/ack.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # ack workflow runs on any change made to a pull-request and aims to verify
3 | # that is following our practices. Initial version is checking correct label
4 | # presence.
5 | name: ack
6 | on:
7 | issues:
8 | types: [opened, reopened]
9 | merge_group:
10 | pull_request_target:
11 | types: [opened, labeled, unlabeled, synchronize]
12 | workflow_call: # allows reuse of this workflow from other devtools repos
13 | secrets:
14 | BOT_PAT:
15 | required: false
16 | jobs:
17 | ack:
18 | runs-on: ubuntu-24.04
19 | environment: ack
20 | env:
21 | BOT_PAT: ${{ secrets.BOT_PAT || secrets.ANSIBUDDY_BOT_PAT }}
22 | permissions:
23 | checks: write
24 | contents: write # needed to update release
25 | pull-requests: write # pr approval and merge
26 | steps:
27 | - name: Checkout
28 | uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
29 |
30 | - name: Dump GitHub context
31 | env:
32 | GITHUB_CONTEXT: ${{ toJson(github) }}
33 | run: echo "$GITHUB_CONTEXT"
34 |
35 | - uses: bcoe/conventional-release-labels@v1
36 | with:
37 | type_labels: '{"feat": "feat", "fix": "fix", "breaking": "breaking", "chore": "chore"}'
38 | ignored_types: "[]"
39 |
40 | - name: Verify PR label action
41 | if: github.event_name != 'merge_group'
42 | uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5
43 | with:
44 | mode: exactly
45 | count: 1
46 | add_comment: true
47 | message: >
48 | Label error. Requires {{errorString}} {{count}} of: {{ provided }}.
49 | Found: {{ applied }}. Follow https://www.conventionalcommits.org to
50 | get auto-labeling to work correctly.
51 | exit_type: failure
52 | # see conventional commits prefixes: https://www.conventionalcommits.org
53 | labels: |
54 | breaking
55 | build
56 | chore
57 | ci
58 | docs
59 | feat
60 | fix
61 | perf
62 | refactor
63 | style
64 | test
65 |
66 | - name: Update release notes if this is already merged
67 | if: github.event.pull_request.merged == true
68 | uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5 # v6
69 | env:
70 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
71 |
72 | - name: Add issue or pull-request to 'devtools' project
73 | # Workaround for running this step only if BOT_PAT secret is found
74 | # also this action would not work on merge_group events.
75 | if: env.BOT_PAT != null && github.event_name != 'merge_group'
76 | # Continuing on errors to avoid failures for duplicate content in the project board.
77 | # Occurs when PRs are created from draft on the board, which is standard behavior
78 | # from external orgs (e.g. from ansible-automation-platform org).
79 | # See https://github.com/actions/add-to-project/issues/389
80 | continue-on-error: true
81 | uses: actions/add-to-project@main
82 | with:
83 | project-url: https://github.com/orgs/ansible/projects/86
84 | # Do not use secrets.GITHUB_TOKEN here because it does not have
85 | # access to projects. Only personal access tokens (PAT) can be used.
86 | github-token: ${{ env.BOT_PAT }}
87 | # labeled: skip-changelog
88 | # label-operator: NOT
89 |
90 | # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#approve-a-pull-request
91 | - name: Enable auto-merge and approve PRs from safe bots
92 | # do not use github.actor as this can be someone else than the PR author
93 | if: >
94 | env.BOT_PAT != null &&
95 | github.event_name != 'merge_group' && (
96 | github.event.pull_request.user.login == 'dependabot[bot]' ||
97 | github.event.pull_request.user.login == 'pre-commit-ci[bot]' ||
98 | github.event.pull_request.user.login == 'renovate[bot]'
99 | )
100 | run: |
101 | set -e
102 | gh pr merge --auto --squash "$PR_URL"
103 | gh pr review --approve "$PR_URL"
104 | env:
105 | PR_URL: ${{ github.event.pull_request.html_url }}
106 | GH_TOKEN: ${{ env.BOT_PAT }}
107 |
--------------------------------------------------------------------------------
/.github/workflows/finalize.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: finalize
3 | on:
4 | workflow_call:
5 | inputs:
6 | run-id:
7 | description: "The workflow run ID to download artifacts from (for workflow_run triggers)"
8 | required: false
9 | type: string
10 | default: ""
11 | workflow-event:
12 | description: "The event that triggered the workflow (pull_request or push)"
13 | required: false
14 | type: string
15 | default: ""
16 | head-sha:
17 | description: "The head SHA of the workflow run (for workflow_run triggers)"
18 | required: false
19 | type: string
20 | default: ""
21 | head-branch:
22 | description: "The head branch of the workflow run (for workflow_run triggers)"
23 | required: false
24 | type: string
25 | default: ""
26 | head-repository:
27 | description: "The head repository full name (for workflow_run triggers)"
28 | required: false
29 | type: string
30 | default: ""
31 | secrets:
32 | CICD_ORG_SONAR_TOKEN_CICD_BOT:
33 | required: true
34 |
35 | permissions: read-all
36 |
37 | jobs:
38 | finalize:
39 | name: finalize
40 | runs-on: ubuntu-latest
41 | steps:
42 | - name: Checkout repository
43 | uses: actions/checkout@v6
44 | with:
45 | fetch-depth: 0
46 | show-progress: false
47 |
48 | # Download coverage artifacts from the test job (same workflow run)
49 | - name: Download artifacts
50 | uses: actions/download-artifact@v6
51 | with:
52 | name: logs.zip
53 | github-token: ${{ secrets.GITHUB_TOKEN }}
54 | run-id: ${{ inputs.run-id }}
55 |
56 | # For PRs: Extract PR number from artifact file
57 | - name: Fetch PR Number artifact
58 | if: inputs.workflow-event == 'pull_request'
59 | uses: actions/download-artifact@v6
60 | with:
61 | name: pr_number
62 | path: .
63 | github-token: ${{ secrets.GITHUB_TOKEN }}
64 | run-id: ${{ inputs.run-id }}
65 |
66 | - name: Extract PR Number
67 | if: inputs.workflow-event == 'pull_request'
68 | run: |
69 | cat pr_number.txt
70 | PR_NUM=$(head -n1 pr_number.txt | awk '{print $3}')
71 | echo "Found PR number: $PR_NUM"
72 | echo "PR_NUMBER=$PR_NUM" >> $GITHUB_ENV
73 |
74 | - name: Get Additional PR Information
75 | if: inputs.workflow-event == 'pull_request'
76 | env:
77 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
78 | run: |
79 | PR_DATA=$(gh api "repos/${{ github.repository }}/pulls/${{ env.PR_NUMBER }}")
80 | echo "PR_BASE=$(echo "$PR_DATA" | jq -r '.base.ref')" >> $GITHUB_ENV
81 | echo "PR_HEAD=$(echo "$PR_DATA" | jq -r '.head.ref')" >> $GITHUB_ENV
82 |
83 | - name: Checkout PR branch
84 | if: inputs.workflow-event == 'pull_request'
85 | run: |
86 | gh pr checkout ${{ env.PR_NUMBER }}
87 | env:
88 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
89 |
90 | - name: Prepare SonarCloud args
91 | if: hashFiles('**/coverage.xml') != ''
92 | shell: bash
93 | run: |
94 | REPO_NAME="${{ github.repository }}"
95 | COMMIT_SHA="${{ inputs.head-sha }}"
96 |
97 | # Split repo name into owner and repo
98 | IFS="/" read -r REPO_OWNER REPO_NAME_ONLY <<< "$REPO_NAME"
99 |
100 | SONAR_ARGS="-Dsonar.projectKey=${REPO_OWNER}_${REPO_NAME_ONLY} -Dsonar.organization=${REPO_OWNER}"
101 | SONAR_ARGS="${SONAR_ARGS} -Dsonar.scm.revision=$COMMIT_SHA"
102 |
103 | # Add PR-specific args if this is a pull request
104 | WORKFLOW_EVENT="${{ inputs.workflow-event }}"
105 | if [[ "$WORKFLOW_EVENT" == "pull_request" && -n "$PR_NUMBER" ]]; then
106 | SONAR_ARGS="${SONAR_ARGS} -Dsonar.pullrequest.key=$PR_NUMBER"
107 | SONAR_ARGS="${SONAR_ARGS} -Dsonar.pullrequest.branch=$PR_HEAD"
108 | SONAR_ARGS="${SONAR_ARGS} -Dsonar.pullrequest.base=$PR_BASE"
109 | fi
110 |
111 | echo "SONAR_ARGS=$SONAR_ARGS" >> $GITHUB_ENV
112 |
113 | - name: Check for coverage files
114 | run: |
115 | COVERAGE_FILES=$(find . -name "coverage.xml" -type f 2>/dev/null | wc -l)
116 | if [ "$COVERAGE_FILES" -gt 0 ]; then
117 | echo "Coverage Data: Available ($COVERAGE_FILES file(s) found)"
118 | find . -name "coverage.xml" -type f | sed 's/^/│ /'
119 | else
120 | echo "Coverage Data: Not available - exiting"
121 | exit 1
122 | fi
123 |
124 | echo "Running SonarCloud analysis..."
125 |
126 | - name: SonarCloud Scan
127 | uses: SonarSource/sonarqube-scan-action@v6
128 | env:
129 | SONAR_TOKEN: ${{ secrets.CICD_ORG_SONAR_TOKEN_CICD_BOT }}
130 | with:
131 | args: ${{ env.SONAR_ARGS }}
132 |
--------------------------------------------------------------------------------
/docs/guides/python/tox.md:
--------------------------------------------------------------------------------
1 | Ansible DevTools team is using [tox](https://tox.wiki/) as a generic test runner even on projects that are not python based. We do this because npm is quite limited and it lacks ability to document various commands and to expose them in a friendly way.
2 |
3 | Tox uses the concept of default tests (implicit) and optional ones. If you call tox without any arguments it will run all default ones. Most times you just want to run a specific one using `tox -e `.
4 |
5 | To list current test environments and their descriptions, just run `tox -va`. The output will look similar to:
6 |
7 | ```text
8 | $ tox -av
9 | default environments:
10 | lint -> Run linting
11 | packaging -> Build package, verify metadata, install package.
12 | test-ui -> Run UI tests (extest, install test)
13 | test-e2e -> Run e2e tests (mocha)
14 |
15 | additional environments:
16 | test-ui-oldest -> Run UI tests (extest, install test) Run UI tests using oldest version of vscode we support
17 | code -> Build extension and installs it in your vscode instance (forced)
18 | deps -> Update dependencies
19 | ```
20 |
21 | The following sections are more aimed towards documenting practices about maintaining tox files across our repositories and CI/CD integration.
22 |
23 | ## envlist should not contain specific python versions
24 |
25 | Instead of listing all currently supported python versions inside `envlist`, we should only list the generic `py`. This should allow a developer to run all tests for his current version of python with just `tox`.
26 |
27 | On the other hand we should never use just `tox` or `tox -e py` on CI/CD pipelines as this would introduce bugs sooner or later as we will discover that the python version used for testing changed.
28 |
29 | ```ini
30 | envlist =
31 | lint
32 | py{310,39,38} # <-- BAD, replace with just `py`
33 | ```
34 |
35 | One extra advantage for this approach is that we no longer need to update config file when we drop or remove one python version from the supported matrix.
36 |
37 | ## envlist items should be sorted by duration
38 |
39 | In order to fail-fast, we want to run quick tests first, like `lint`. The longest test suite should be the last.
40 |
41 | ## all envlist entries must have description
42 |
43 | When you run `tox -av`, you will also see the description of each environment, which can be quite informative for a new contributor. Be sure that all of them have a description, one that is less than ~100 characters as wrapping them in terminal makes the whole list harder to read. If you want to add extra details, use comments inside the ini file.
44 |
45 | ## use short and consistent naming for env names
46 |
47 | Tox env names should be short (easy to type) and consistent across projects, so they do not conflict with muscle-memory.
48 |
49 | Tox env name should not be about the tool as that may change, it should be more about the nature/category/area of the testing.
50 |
51 | Current list of popular names:
52 |
53 | - `py` - unitests
54 | - `lint` - linting, runts all linters (likely pre-commit)
55 | - `docs` - anything related to building docs (aka Sphinx)
56 | - `packaging` - tests related to python packaging
57 | - `deps` - Bumping of test dependencies. This should not be included in default `envlist`, expected to be run only manually.
58 |
59 | ## Dependency pinning
60 |
61 | We never pin dependencies directly in the package metadata because we do not want to prevent people from using it with other tools. The general rule is that we keep the dependencies as relaxed as possible but we do have to add ceiling from time to time. Still, even ceiling is discouraged and should be done only when we already have knowledge that the new releases are going to break us.
62 |
63 | We cannot really rely on semantic versioning for ceiling as we already know that there are project that are breaking even on minor patches and other projects which release major version often but even so unlikely to break us at all. In the end the decision should be taken case by case.
64 |
65 | In order to keep our local testing and CI/CD pipelines from randomly getting broken we would pin test dependencies using a single project wide constraints file names `requirements.txt`. Do not confuse this file with normal dependencies declared in project metadata (`setup.cfg`). The reason why we use this little bit misleading filename is because that is the only filename recognized by dependabot.
66 |
67 | This file is updated by either manually running `tox -e deps` job or dependabot scheduled runs. That one creates pull-requests for updating each dependency, so we would know before we start using it, and avoid getting our CI/CD broken due to external causes.
68 |
69 | In order to achieve this, we define `PIP_CONSTRAINTS=requirements.txt` inside `tox.ini` and also on https://readthedocs.org/ configuration. This is telling pip to always use versions mentioned there.
70 |
71 | There are few particular tox environments where we neuter this option by using `PIP_CONSTRAINTS=/dev/null`:
72 |
73 | - `deps` - as we want to allow pip-compile to be able to bump dependencies
74 | - `lint` as pre-commit tool has it own pinning logic which is incompatible with use of pip constraints file
75 | - `devel` ones, where we usually use some pre-released or unreleased versions of key dependencies or sister projects. Use of pinning would prevent us from doing this.
76 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | hide:
3 | - toc
4 | ---
5 |
6 | # devtools
7 |
8 | This repository is used to share practices, workflows and decisions affecting projects maintained by Ansible DevTools team.
9 |
10 | ## Python DevTools project dependencies
11 |
12 | It should be noted that our vscode extension would either depend on `ansible-dev-tools` python package or directly use our execution environments.
13 |
14 | ```mermaid
15 | %%{init: {'theme':'neutral', 'themeVariables': { 'edgeLabelBackground': 'transparent'}}}%%
16 | graph LR
17 |
18 | lint(ansible-lint):::pyclass
19 | compat(ansible-compat):::pyclass
20 | navigator(ansible-navigator):::pyclass
21 | adt(ansible-dev-tools):::pyclass;
22 | ade(ansible-dev-environment):::pyclass
23 | creator(ansible-creator):::pyclass;
24 | pytest-ansible(pytest-ansible):::pyclass
25 | tox-ansible(tox-ansible):::pyclass
26 | molecule(molecule):::pyclass
27 | community.molecule(community.molecule):::collectionclass
28 | builder(ansible-builder):::pyclass
29 | runner(ansible-runner):::pyclass
30 | image(community-ansible-dev-tools-image):::containerclass
31 | sign(ansible-sign):::pyclass
32 |
33 | classDef tsclass fill:#f90,stroke:#f90,color:#333
34 | classDef containerclass fill:#060,stroke:#060,color:#fff
35 | classDef collectionclass fill:#5bbdbf,stroke-width:0px
36 | classDef pyclass fill:#09f5,stroke:#09f0,color:#fff
37 | style external color:#0FF5,fill:#fff0,stroke:#0FF5
38 | linkStyle default stroke:grey,text-decoration:none
39 |
40 | subgraph external
41 | builder
42 | runner
43 | sign
44 | end
45 |
46 | adt ==> lint
47 | adt ==> navigator
48 | adt ==> molecule
49 | adt ==> ade
50 | adt ==> creator
51 | adt ==> sign
52 |
53 | lint ==> compat
54 | compat ==. test .==> community.molecule
55 | molecule ==> compat
56 | molecule ==. test .==> community.molecule:::collectionclass
57 |
58 | navigator ==.==> lint
59 | navigator ==.==> image
60 | navigator ==> runner
61 | navigator ==..==> builder
62 |
63 | adt ==> ade;
64 | adt ==> creator
65 | adt ==> pytest-ansible
66 | adt ==> tox-ansible;
67 |
68 | ade ==> builder;
69 |
70 | click adt "https://github.com/ansible/ansible-dev-tools"
71 | click ade "https://github.com/ansible/ansible-dev-environment"
72 | click runner "https://github.com/ansible/ansible-runner"
73 | click builder "https://github.com/ansible/ansible-builder"
74 | click community.molecule "https://github.com/ansible-collections/community.molecule"
75 | click molecule href "https://github.com/ansible/molecule"
76 | click image href "#container-image"
77 | click ws "https://github.com/ansible/ansible-workspace-env-reference-image"
78 | click lint href "https://github.com/ansible/ansible-lint"
79 | click compat href "https://github.com/ansible/ansible-compat"
80 | click navigator href "https://github.com/ansible/ansible-navigator"
81 | click creator href "https://github.com/ansible/ansible-creator"
82 | click tox-ansible href "https://github.com/ansible/tox-ansible"
83 | click pytest-ansible href "https://github.com/ansible/pytest-ansible"
84 |
85 | linkStyle 0,1,2,3,4,5,6,7,8,9 color:darkcyan
86 |
87 | ```
88 |
89 | ## TypeScript repositories
90 |
91 | classDef tsclass fill:#f90,stroke:#f90,color:#333;
92 | classDef containerclass fill:#060,stroke:#060,color:#fff;
93 | classDef thirdpartyclass fill:#9f6,stroke:#9f6,color:#333;
94 |
95 | ```mermaid
96 |
97 | graph TB;
98 |
99 | ansible-backstage-plugins:::tsclass;
100 | vscode-ansible:::tsclass == external ==> vscode-yaml;
101 | vscode-yaml:::tsclass;
102 |
103 | click ansible-backstage-plugins "https://github.com/ansible/ansible-backstage-plugins"
104 | click ansible-dev-environment "https://github.com/ansible/ansible-dev-environment"
105 | click community.molecule "https://github.com/ansible-collections/community.molecule"
106 | click vscode-ansible href "https://github.com/ansible/vscode-ansible"
107 | click vscode-yaml href "https://github.com/redhat-developer/vscode-yaml"
108 | ```
109 |
110 | ## Container Images
111 |
112 | `community-ansible-dev-tools-image` **execution environment** is a development
113 | **container image** that contains most of the most important tools used in the
114 | development and testing of collections. Still, while we bundle several
115 | collections in it, you need to be warned that **we might remove any included
116 | collection without notice** if that prevents us from
117 | building the container.
118 |
119 | Below you can see the list of collections are currently included in the
120 | container images but this list is subject to change, even on a minor release.
121 | If a collections fails to install or causes installation failures, we will
122 | release the container without it.
123 |
124 | - ansible.posix
125 | - ansible.windows
126 | - awx.awx
127 | - containers.podman
128 | - kubernetes.core
129 | - redhatinsights.insights
130 | - theforeman.foreman
131 |
132 | Some common command line **tools** are also included in order to help developers:
133 |
134 | - git
135 | - podman
136 | - tar
137 | - zsh (default shell)
138 |
139 | ```mermaid
140 | graph TB;
141 |
142 | ee("community-ansible-dev-tools-image
fedora-minimal based container")
143 | adt(ansible-dev-tools)
144 | devspaces("ansible-devspaces
ubi8 based container")
145 | adt(ansible-dev-tools)
146 |
147 | build --> ee;
148 | build --> devspaces;
149 |
150 | subgraph build
151 | collections
152 | adt
153 | tools
154 | end
155 |
156 | click adt "https://github.com/ansible/ansible-dev-tools"
157 | click ee "https://github.com/ansible/ansible-dev-tools/pkgs/container/community-ansible-dev-tools"
158 | click devspaces "https://github.com/ansible/ansible-dev-tools/pkgs/container/ansible-devspaces"
159 |
160 | ```
161 |
--------------------------------------------------------------------------------
/.github/workflows/forum_post.py:
--------------------------------------------------------------------------------
1 | # cspell:ignore devcontainer # noqa: INP001
2 | """A forum poster."""
3 |
4 | from __future__ import annotations
5 |
6 | import argparse
7 | import json
8 | import urllib.request
9 | from urllib.request import Request
10 |
11 |
12 | POST_MD = """Hello everyone,
13 |
14 | We are happy to announce the release of {project_short} {release}.
15 |
16 | # How to get it
17 |
18 | You can install the latest version of all the ansible developer tools by running the following command:
19 |
20 | ```bash
21 | python3 -m pip install -U ansible-dev-tools
22 | ```
23 |
24 | This will install the following developer tools:
25 |
26 | - [ansible-builder](https://ansible.readthedocs.io/projects/builder/): a utility for building Ansible execution environments.
27 | - [ansible-core](https://ansible.readthedocs.io/projects/ansible/): Ansible is a radically simple IT automation platform that makes your applications and systems easier to deploy and maintain. Automate everything from code deployment to network configuration to cloud management, in a language that approaches plain English, using SSH, with no agents to install on remote systems.
28 | - [ansible-creator](https://ansible.readthedocs.io/projects/creator/): a utility for scaffolding Ansible project and content with recommended practices.
29 | - [ansible-dev-environment](https://ansible.readthedocs.io/projects/dev-environment/): a utility for building and managing a virtual environment for Ansible content development.
30 | - [ansible-lint](https://ansible.readthedocs.io/projects/lint/): a utility to identify and correct stylistic errors and anti-patterns in Ansible playbooks and roles.
31 | - [ansible-navigator](https://ansible.readthedocs.io/projects/navigator/) a text-based user interface (TUI) for developing and troubleshooting Ansible content with execution environments.
32 | - [ansible-sign](https://ansible.readthedocs.io/projects/sign/): a utility for signing and verifying Ansible content.
33 | - [molecule](https://ansible.readthedocs.io/projects/molecule/): a functional test runner for Ansible collections, playbooks and roles
34 | - [pytest-ansible](https://ansible.readthedocs.io/projects/pytest-ansible/): a pytest testing framework extension that provides additional functionality for testing Ansible module and plugin Python code.
35 | - [tox-ansible](https://ansible.readthedocs.io/projects/tox-ansible/): an extension to the tox testing utility that provides additional functionality to check Ansible module and plugin Python code under different Python interpreters and Ansible core versions.
36 |
37 | For a single tool, you can install it by running:
38 |
39 | ```bash
40 | python3 -m pip -U install ==
41 | ```
42 |
43 | All ansible developer tools are also packaged in an image that you can use as a [VS Code development container](https://code.visualstudio.com/docs/devcontainers/containers). The image is updated shortly after releases of any individual tool.
44 | The [community-dev-tools](https://github.com/ansible/ansible-dev-tools/pkgs/container/community-ansible-dev-tools) image is available on GitHub Container Registry.
45 |
46 | ```
47 | podman run -it ghcr.io/ansible/community-ansible-dev-tools:latest
48 | ```
49 |
50 | Sample `devcontainer.json` files are available in the [ansible-dev-tools](https://github.com/ansible/ansible-dev-tools/tree/main/.devcontainer) repository.
51 |
52 | # Release notes for {project_short} {release}
53 |
54 | {release_notes}
55 |
56 | Release notes for all versions can be found in the [changelog](https://github.com/ansible/{project_short}/releases).
57 |
58 | """
59 |
60 |
61 | class Post:
62 | """A class to post a release on the Ansible forum."""
63 |
64 | def __init__(self, project: str, release: str, forum_api_key: str, forum_user: str) -> None:
65 | """Initialize the Post class.
66 |
67 | Args:
68 | project: The project name.
69 | release: The release version.
70 | forum_api_key: The forum API key.
71 | forum_user: The forum user.
72 | """
73 | self.category_id: str
74 | self.created: str
75 | self.forum_api_key = forum_api_key
76 | self.forum_user = forum_user
77 | self.project = project
78 | self.project_short = project.split("/")[-1]
79 | self.release = release
80 | self.release_notes: str
81 |
82 | def _get_release_notes(self) -> tuple[str, str]:
83 | """Get the release notes for the project."""
84 | release_url = f"https://api.github.com/repos/{self.project}/releases/tags/{self.release}"
85 | with urllib.request.urlopen(release_url) as url: # noqa: S310
86 | data = json.load(url)
87 |
88 | release_notes = data["body"]
89 | created = data["published_at"]
90 | return release_notes, created
91 |
92 | def _get_category_id(self) -> str:
93 | """Get the category ID for the project."""
94 | categories_url = "https://forum.ansible.com/categories.json?include_subcategories=true"
95 | categories_request = Request(categories_url) # noqa: S310
96 | categories_request.add_header("Api-Key", self.forum_api_key)
97 | categories_request.add_header("Api-Username", self.forum_user)
98 | with urllib.request.urlopen(url=categories_request) as url: # noqa: S310
99 | data = json.load(url)
100 | category = next(
101 | c for c in data["category_list"]["categories"] if c["name"] == "News & Announcements"
102 | )
103 | return next(c for c in category["subcategory_list"] if c["name"] == "Ecosystem Releases")[
104 | "id"
105 | ]
106 |
107 | def _prepare_post(self) -> dict[str, str | list[str]]:
108 | post_md = POST_MD.format(
109 | project_short=self.project_short,
110 | release=self.release,
111 | release_notes=self.release_notes,
112 | )
113 | title = f"Release Announcement: {self.project_short} {self.release}"
114 |
115 | return {
116 | "title": title,
117 | "raw": post_md,
118 | "category": self.category_id,
119 | "created_at": self.created,
120 | "tags": ["devtools", "release", self.project_short],
121 | }
122 |
123 | def post(self) -> None:
124 | """Post the release announcement to the forum."""
125 | # Populate release notes and forum category
126 | self.release_notes, self.created = self._get_release_notes()
127 | self.category_id = self._get_category_id()
128 |
129 | payload = self._prepare_post()
130 | data = json.dumps(payload).encode("utf-8")
131 |
132 | url = "https://forum.ansible.com/posts.json"
133 | request = Request(url) # noqa: S310
134 | request.method = "POST"
135 | request.add_header("Api-Key", self.forum_api_key)
136 | request.add_header("Api-Username", self.forum_user)
137 | request.add_header("Content-Type", "application/json")
138 | with urllib.request.urlopen(url=request, data=data): # noqa: S310
139 | print(f"Posted {payload['title']} to the forum.") # noqa: T201
140 |
141 |
142 | def main() -> None:
143 | """Run the Post class."""
144 | parser = argparse.ArgumentParser(
145 | description="Post a release announcement to the Ansible forum.",
146 | )
147 | parser.add_argument("project", help="The project name. e.g. ansible/tox-ansible")
148 | parser.add_argument("release", help="The release version.")
149 | parser.add_argument("forum_api_key", help="The forum API key.")
150 | parser.add_argument("forum_user", help="The forum user.")
151 | args = parser.parse_args()
152 | post = Post(args.project, args.release, args.forum_api_key, args.forum_user)
153 | post.post()
154 |
155 |
156 | if __name__ == "__main__":
157 | main()
158 |
--------------------------------------------------------------------------------
/docs/stats/repos.md:
--------------------------------------------------------------------------------
1 | # Ansible DevTools Repository Status
2 |
3 |
4 |
5 | ## actions
6 |
7 | [](https://github.com/ansible/actions/actions/workflows/tox.yml?query=event%3Aschedule)
8 | [](https://codecov.io/github/ansible/actions)
9 | [](https://github.com/ansible/actions/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
10 |
11 | ## ansible-compat
12 |
13 | [](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml?query=event%3Aschedule)
14 | [](https://codecov.io/github/ansible/ansible-compat)
15 | [](https://github.com/ansible/ansible-compat/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
16 |
17 | ## ansible-content-actions
18 |
19 | [](https://github.com/ansible/ansible-content-actions/actions/workflows/ci.yml?query=event%3Aschedule)
20 | [](https://github.com/ansible/ansible-content-actions/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
21 |
22 |
23 | ## ansible-creator
24 |
25 | [](https://github.com/ansible/ansible-creator/actions/workflows/tox.yml?query=event%3Aschedule)
26 | [](https://codecov.io/github/ansible/ansible-creator)
27 | [](https://github.com/ansible/ansible-creator/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
28 |
29 | ## ansible-dev-environment
30 |
31 | [](https://github.com/ansible/ansible-dev-environment/actions/workflows/tox.yml?query=event%3Aschedule)
32 | [](https://codecov.io/github/ansible/ansible-dev-environment)
33 | [](https://github.com/ansible/ansible-dev-environment/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
34 |
35 | ## ansible-dev-tools
36 |
37 | [](https://github.com/ansible/ansible-dev-tools/actions/workflows/tox.yml?query=event%3Aschedule)
38 | [](https://codecov.io/gh/ansible/ansible-dev-tools)
39 | [](https://github.com/ansible/ansible-dev-tools/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
40 |
41 | ## ansible-lint
42 |
43 | [](https://github.com/ansible/ansible-lint/actions/workflows/tox.yml?query=event%3Aschedule)
44 | [](https://codecov.io/github/ansible/ansible-lint)
45 | [](https://github.com/ansible/ansible-lint/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
46 |
47 | ## ansible-navigator
48 |
49 | [](https://github.com/ansible/ansible-navigator/actions/workflows/tox.yml?query=event%3Aschedule)
50 | [](https://codecov.io/github/ansible/ansible-navigator)
51 | [](https://github.com/ansible/ansible-navigator/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
52 |
53 | ## mkdocs-ansible
54 |
55 | [](https://github.com/ansible/mkdocs-ansible/actions/workflows/tox.yml?query=event%3Aschedule)
56 | [](https://codecov.io/github/ansible/mkdocs-ansible)
57 | [](https://github.com/ansible/mkdocs-ansible/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
58 |
59 | ## molecule
60 | [](https://github.com/ansible/molecule/actions/workflows/tox.yml?query=event%3Aschedule)
61 | [](https://codecov.io/github/ansible/molecule)
62 | [](https://github.com/ansible/molecule/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
63 |
64 | ## pytest-ansible
65 | [](https://github.com/ansible/pytest-ansible/actions/workflows/tox.yml?query=event%3Aschedule)
66 | [](https://codecov.io/github/ansible/pytest-ansible)
67 | [](https://github.com/ansible/pytest-ansible/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
68 |
69 | ## team-devtools
70 | [](https://github.com/ansible/team-devtools/actions/workflows/test.yml?query=event%3Aschedule)
71 | [](https://github.com/ansible/team-devtools/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
72 |
73 | ## tox-ansible
74 | [](https://github.com/ansible/tox-ansible/actions/workflows/tox.yml?query=event%3Aschedule)
75 | [](https://codecov.io/github/ansible/tox-ansible)
76 | [](https://github.com/ansible/tox-ansible/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
77 |
78 | ## vscode-ansible
79 | [](https://github.com/ansible/vscode-ansible/actions/workflows/ci.yaml?query=event%3Aschedule)
80 | [](https://codecov.io/github/ansible/vscode-ansible)
81 | [](https://github.com/ansible/vscode-ansible/pulls?q=sort%3Aupdated-desc+is%3Apr+is%3Aopen+-is%3Adraft)
82 |
83 | ## [ansible-backstage-plugins](https://github.com/ansible/ansible-backstage-plugins)
84 | (private)
85 |
--------------------------------------------------------------------------------
/src/team_devtools/check_platform_constraints.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """Validate and enforce platform constraints.
3 |
4 | This script:
5 | 1. Defines platform constraints as constants
6 | 2. Validates dependencies in pyproject.toml against these constraints
7 | 3. Updates renovate.json with allowedVersions rules to prevent automated bumps
8 | """
9 |
10 | import json
11 | import sys
12 | from pathlib import Path
13 |
14 | import tomllib
15 | from packaging.requirements import Requirement
16 | from packaging.specifiers import SpecifierSet
17 | from packaging.version import Version
18 |
19 |
20 | # Platform compatibility constraints for AAP and RHEL
21 | # These represent the MAXIMUM versions we can use to remain compatible
22 | # with downstream platform packages
23 | PLATFORM_CONSTRAINTS = {
24 | "ansible-core": "<2.17", # AAP 2.5/2.6 ships ansible-core 2.16.x
25 | "cffi": "<1.16", # RHEL 8/9 ships cffi 1.15.x
26 | "django": "<4.3", # at 4.2
27 | "importlib-metadata": "<6.1", # at 6.0.1
28 | "jsonschema": "<4.22", # at 4.21.1
29 | "packaging": "<25.0", # galaxy-importer constraint
30 | "pyyaml": "<6.0.2", # at 6.0.1
31 | "python-gnupg": "<0.5.3", # at 0.5.2,
32 | "setuptools": "<65.6", # RHEL 8/9 ships setuptools 65.5.1
33 | }
34 |
35 |
36 | def get_constraints() -> dict[str, SpecifierSet]:
37 | """Get platform constraints as SpecifierSet objects.
38 |
39 | Returns dict like: {"ansible-core": SpecifierSet("<2.17"), "cffi": SpecifierSet("<1.16")}
40 | """
41 | return {name: SpecifierSet(spec) for name, spec in PLATFORM_CONSTRAINTS.items()}
42 |
43 |
44 | def check_dependency_compatibility(dep_str: str, constraints: dict[str, SpecifierSet]) -> list[str]:
45 | """Check if a dependency violates platform constraints.
46 |
47 | Returns list of violation messages.
48 | """
49 | violations = []
50 |
51 | try:
52 | req = Requirement(dep_str)
53 | except Exception: # noqa: BLE001
54 | return violations
55 |
56 | # Check if this package has platform constraints
57 | if req.name not in constraints:
58 | return violations
59 |
60 | platform_specifier = constraints[req.name]
61 |
62 | # Find the minimum version required by the dependency
63 | min_version = None
64 | for spec in req.specifier:
65 | if spec.operator in (">=", ">"):
66 | version = Version(spec.version)
67 | if min_version is None or version > min_version:
68 | min_version = version
69 |
70 | if min_version is None:
71 | return violations
72 |
73 | # Check if minimum version satisfies platform constraints
74 | if min_version not in platform_specifier:
75 | violations.append(
76 | f"❌ {dep_str}\n"
77 | f" Platform constraint: {req.name}{platform_specifier}\n"
78 | f" Your minimum version ({min_version}) violates platform constraint\n"
79 | f" Lower the minimum version to be compatible"
80 | )
81 |
82 | return violations
83 |
84 |
85 | def update_renovate_config(
86 | renovate_path: Path, constraints: dict[str, SpecifierSet]
87 | ) -> tuple[bool, str]:
88 | """Update renovate.json with packageRules for platform constraints.
89 |
90 | Returns (changed, message) tuple.
91 | """
92 | if not renovate_path.exists():
93 | return False, "renovate.json not found"
94 |
95 | with renovate_path.open(encoding="utf-8") as f:
96 | config = json.load(f)
97 |
98 | # Build packageRules for our constraints
99 | new_rules = []
100 | for package, constraint in constraints.items():
101 | new_rules.append(
102 | {
103 | "matchPackageNames": [package],
104 | "allowedVersions": str(constraint),
105 | "description": "Platform compatibility constraint",
106 | }
107 | )
108 |
109 | # Get existing packageRules or create new
110 | existing_rules = config.get("packageRules", [])
111 |
112 | # Remove old auto-generated rules (those with our description or old description)
113 | old_descriptions = {
114 | "Platform compatibility constraint",
115 | "Platform compatibility constraint from .config/platform-constraints.txt",
116 | }
117 | filtered_rules = [
118 | rule for rule in existing_rules if rule.get("description") not in old_descriptions
119 | ]
120 |
121 | # Add new rules
122 | config["packageRules"] = filtered_rules + new_rules
123 |
124 | # Check if anything changed
125 | if config.get("packageRules") == existing_rules:
126 | return False, "renovate.json already up to date"
127 |
128 | # Write back with pretty formatting
129 | with renovate_path.open("w", encoding="utf-8") as f:
130 | json.dump(config, f, indent=2)
131 | f.write("\n")
132 |
133 | return True, f"Updated renovate.json with {len(new_rules)} constraint rule(s)"
134 |
135 |
136 | def check_all_dependencies(pyproject: dict, constraints: dict[str, SpecifierSet]) -> list[str]:
137 | """Check all dependency types in pyproject.toml.
138 |
139 | Args:
140 | pyproject: Parsed pyproject.toml data
141 | constraints: Platform constraints
142 |
143 | Returns:
144 | List of violation messages
145 | """
146 | violations = []
147 |
148 | # Check project.dependencies
149 | dependencies = pyproject.get("project", {}).get("dependencies", [])
150 | for dep in dependencies:
151 | dep_violations = check_dependency_compatibility(dep, constraints)
152 | violations.extend(dep_violations)
153 |
154 | # Check project.optional-dependencies
155 | optional_deps = pyproject.get("project", {}).get("optional-dependencies", {})
156 | for deps in optional_deps.values():
157 | for dep in deps:
158 | dep_violations = check_dependency_compatibility(dep, constraints)
159 | violations.extend(dep_violations)
160 |
161 | # Check dependency-groups (PEP 735)
162 | dep_groups = pyproject.get("dependency-groups", {})
163 | for deps in dep_groups.values():
164 | for dep in deps:
165 | dep_violations = check_dependency_compatibility(dep, constraints)
166 | violations.extend(dep_violations)
167 |
168 | return violations
169 |
170 |
171 | def main() -> int:
172 | """Main entry point."""
173 | # Get constraints from constants
174 | constraints = get_constraints()
175 |
176 | # Check the current working directory's pyproject.toml and renovate.json
177 | cwd = Path.cwd()
178 | pyproject_file = cwd / "pyproject.toml"
179 | renovate_file = cwd / "renovate.json"
180 |
181 | print(f"📋 Platform constraints loaded: {len(constraints)}") # noqa: T201
182 | for package, constraint in constraints.items():
183 | print(f" • {package}{constraint!s}") # noqa: T201
184 | print() # noqa: T201
185 |
186 | # Check pyproject.toml dependencies
187 | violations = []
188 | if pyproject_file.exists():
189 | with pyproject_file.open("rb") as f:
190 | pyproject = tomllib.load(f)
191 | violations = check_all_dependencies(pyproject, constraints)
192 |
193 | # Update renovate.json
194 | changed, message = update_renovate_config(renovate_file, constraints)
195 | if changed:
196 | print(f"✅ {message}") # noqa: T201
197 | else:
198 | print(f"[i] {message}") # noqa: T201
199 | print() # noqa: T201
200 |
201 | # Report violations
202 | if violations:
203 | print("🚫 Platform constraint violations in pyproject.toml:") # noqa: T201
204 | print() # noqa: T201
205 | for violation in violations:
206 | print(violation) # noqa: T201
207 | print() # noqa: T201
208 | return 1
209 |
210 | print("✅ All dependencies compatible with platform constraints") # noqa: T201
211 | return 0
212 |
213 |
214 | if __name__ == "__main__": # pragma: no cover
215 | sys.exit(main())
216 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | #:tombi schema.strict = false
2 |
3 | [project]
4 | name = "team-devtools"
5 | description = "..."
6 | readme = "README.md"
7 | # https://peps.python.org/pep-0621/#readme
8 | requires-python = ">=3.11"
9 | dependencies = ["packaging>=23.2"]
10 | dynamic = ["version"]
11 |
12 | [project.scripts]
13 | check-platform-constraints = "team_devtools.check_platform_constraints:main"
14 |
15 | [dependency-groups]
16 | dev = [
17 | "coverage[toml]>=7.6.9",
18 | "mkdocs-exclude>=1.0.2",
19 | "pytest>=9",
20 | "pytest-instafail>=0.5.0",
21 | "pytest-plus>=0.8.1",
22 | "pytest-sugar>=1.1.1",
23 | "pytest-xdist>=3.8.0",
24 | ]
25 | docs = [
26 | "argparse-manpage>=4.7",
27 | "mkdocs-ansible>=24.12.0",
28 | "mkdocs-exclude>=1.0.2",
29 | ]
30 | lint = ["pre-commit>=3.5.0", "pre-commit-uv>=4.1.4"]
31 | pkg = ["build[virtualenv]", "twine"]
32 |
33 | [build-system]
34 | requires = [
35 | "setuptools_scm[toml] >= 7.0.5", # required for "no-local-version" scheme
36 | "setuptools >= 65.3.0", # required by pyproject+setuptools_scm integration and editable installs
37 | ]
38 | build-backend = "setuptools.build_meta"
39 |
40 | [tool.codespell]
41 | ignore-words-list = ["SME"]
42 |
43 | # Keep this default because xml/report do not know to use load it from config file:
44 | # data_file = ".coverage"
45 | [tool.coverage.paths]
46 | source = ["src", "test", ".tox/*/site-packages"]
47 |
48 | [tool.coverage.report]
49 | exclude_also = ["if TYPE_CHECKING:", "pragma: no cover"]
50 | fail_under = 94
51 | ignore_errors = false
52 | omit = ["test/*"]
53 | show_missing = true
54 | skip_covered = true
55 | skip_empty = true
56 |
57 | [tool.coverage.run]
58 | # branch is more reliable than lines, protects against false positives
59 | branch = true
60 | concurrency = ["multiprocessing", "thread"]
61 | # avoid failse positive warning, we still fail if coverage level is not met
62 | disable_warnings = ["no-data-collected"]
63 | parallel = true
64 | relative_files = true # needed by sonarcloud
65 | source = ["src"]
66 |
67 | [tool.pytest]
68 | addopts = ["--durations=10", "--failed-first", "-p no:pytest_cov"]
69 | norecursedirs = [
70 | "*.egg",
71 | ".cache",
72 | ".config",
73 | ".eggs",
74 | ".git",
75 | ".github",
76 | ".mypy_cache",
77 | ".projects",
78 | ".tox",
79 | "__pycache__",
80 | "build",
81 | "collections",
82 | "dist",
83 | "docs",
84 | "site",
85 | "src/*.egg-info",
86 | ]
87 |
88 | [tool.ruff]
89 | builtins = ["__"]
90 | cache-dir = "./.cache/.ruff"
91 | fix = true
92 | line-length = 100
93 | required-version = ">=0.7.1"
94 | target-version = "py310"
95 |
96 | [tool.ruff.lint]
97 | external = [
98 | "DOC", # pydoclint
99 | ]
100 | ignore = [
101 | "COM812", # conflicts with ISC001 on format
102 | "E501", # line-too-long / rely on black
103 | "ISC001", # conflicts with COM812 on format
104 | "S603", # subprocess
105 | "S607", # subprocess
106 | ]
107 | select = ["ALL"]
108 |
109 | [tool.ruff.lint.flake8-pytest-style]
110 | parametrize-values-type = "tuple"
111 |
112 | [tool.ruff.lint.isort]
113 | known-first-party = ["src"]
114 | lines-after-imports = 2 # Ensures consistency for cases when there's variable vs function/class definitions after imports
115 |
116 | [tool.ruff.lint.per-file-ignores]
117 | "test/*" = ["S101", "PLC0415"]
118 |
119 | [tool.ruff.lint.pydocstyle]
120 | convention = "google"
121 |
122 | [tool.setuptools_scm]
123 | # To prevent accidental pick of mobile version tags such 'v6'
124 | git_describe_command = [
125 | "git",
126 | "describe",
127 | "--dirty",
128 | "--long",
129 | "--tags",
130 | "--match",
131 | "v*.*",
132 | ]
133 | local_scheme = "no-local-version"
134 | tag_regex = "^(?Pv)?(?P[0-9.]+)(?P.*)?$"
135 | version_file = "src/team_devtools/_version.py"
136 |
137 | [tool.tomlsort]
138 | in_place = true
139 | sort_inline_tables = true
140 | sort_table_keys = true
141 |
142 | [tool.tox]
143 | env_list = ["pkg", "lint", "docs", "py", "devel"]
144 | isolated_build = true
145 | min_version = "4.28.4"
146 | requires = [
147 | "tox>=4.28.4",
148 | "tox-extra >= 2.0.1",
149 | "tox-uv >= 1.28.0",
150 | "setuptools >= 65.3.0", # editable installs
151 | ]
152 |
153 | [tool.tox.env.devel]
154 | commands = [
155 | ["sh", "-c", "uv pip list -q --format=freeze | tr '\\n' ','"],
156 | { extend = true, of = ["tool", "tox", "env_run_base", "commands"], replace = "ref" },
157 | ["git", "restore", "uv.lock"],
158 | ]
159 | description = "Run the tests with newest dependencies (no lock and allowing prereleases)"
160 | runner = "uv-venv-runner"
161 | uv_sync_flags = ["--upgrade", "--prerelease=allow"]
162 | uv_sync_locked = false
163 |
164 | [tool.tox.env.docs]
165 | commands = [
166 | [
167 | "mkdocs",
168 | { default = ["build", "--strict", "--site-dir=_readthedocs/html/"], extend = true, replace = "posargs" },
169 | ],
170 | ]
171 | commands_post = []
172 | commands_pre = []
173 | dependency_groups = ["docs"]
174 | description = "Builds docs"
175 | skip_install = false
176 |
177 | [tool.tox.env.lint]
178 | commands = [["pre-commit", "run", "--all-files", "--show-diff-on-failure"]]
179 | commands_post = []
180 | commands_pre = []
181 | dependency_groups = ["lint"]
182 | description = "Run all linters"
183 | pass_env = ["HOMEPATH", "PROGRAMDATA", "RUFF_OUTPUT_FORMAT"]
184 | runner = "uv-venv-runner"
185 | skip_install = true
186 |
187 | [tool.tox.env.pkg]
188 | commands = [
189 | ["rm", "-rf", "{tox_root}/dist/"],
190 | [
191 | "python",
192 | "-m",
193 | "build",
194 | "--outdir",
195 | "{tox_root}/dist/",
196 | "{tox_root}",
197 | ],
198 | ["sh", "-c", "python -m twine check --strict {tox_root}/dist/*"],
199 | ]
200 | description = "Build package and check metadata"
201 | group = "pkg"
202 |
203 | [tool.tox.env_run_base]
204 | allowlist_externals = ["rm", "sh"]
205 | commands = [
206 | [
207 | "python",
208 | "-c",
209 | "import pathlib; pathlib.Path(\"{env_site_packages_dir}/cov.pth\").write_text(\"import coverage; coverage.process_startup()\")",
210 | ],
211 | [
212 | "coverage",
213 | "run",
214 | "-m",
215 | "pytest",
216 | { default = [
217 | "-ra",
218 | "-n",
219 | "auto",
220 | "--showlocals",
221 | "--doctest-modules",
222 | "--durations=10",
223 | "--junitxml=./junit.xml",
224 | ], extend = true, replace = "posargs" },
225 | ],
226 | ["coverage", "combine", "-q"],
227 | ["coverage", "xml", "-o", "{env_dir}/coverage.xml", "--fail-under=0"],
228 | ["coverage", "lcov", "-q", "--fail-under=0"],
229 | ["coverage", "report"],
230 | ]
231 | commands_post = []
232 | commands_pre = [["coverage", "erase"]]
233 | dependency_groups = ["dev"]
234 | description = """Run the tests: py{py_dot_ver}"""
235 | package = "editable"
236 | pass_env = [
237 | "CI",
238 | "CONTAINER_*",
239 | "CURL_CA_BUNDLE",
240 | "FORCE_COLOR",
241 | "DOCKER_*",
242 | "GITHUB_*",
243 | "HOME",
244 | "LANG",
245 | "LC_*",
246 | "NO_COLOR",
247 | "PRE_COMMIT_HOME",
248 | "PYTEST_*", # allows developer to define their own preferences
249 | "PYTEST_REQPASS", # needed for CI
250 | "PYTHON*", # PYTHONPYCACHEPREFIX, PYTHONIOENCODING, PYTHONBREAKPOINT,...
251 | "PYTHONBREAKPOINT",
252 | "PYTHONIOENCODING",
253 | "PYTHONPYCACHEPREFIX",
254 | "PY_COLORS",
255 | "REQUESTS_CA_BUNDLE",
256 | "RTD_TOKEN",
257 | "SETUPTOOLS_SCM_DEBUG",
258 | "SSH_AUTH_SOCK",
259 | "TERM",
260 | "USER",
261 | "SSL_CERT_FILE", # https proxies
262 | "SSH_AUTH_SOCK", # may be needed by git
263 | "UV_*",
264 | ]
265 | runner = "uv-venv-lock-runner"
266 |
267 | [tool.tox.env_run_base.set_env]
268 | COVERAGE_COMBINED = "{env_dir}/.coverage"
269 | COVERAGE_FILE = "{env:COVERAGE_FILE:{env_dir}/.coverage.{env_name}}"
270 | COVERAGE_PROCESS_START = "{tox_root}/pyproject.toml"
271 | # see https://github.com/tox-dev/tox/issues/2092#issuecomment-2538729079
272 | # see https://github.com/Kozea/CairoSVG/issues/392#issuecomment-2538707712
273 | DYLD_FALLBACK_LIBRARY_PATH = "/opt/homebrew/lib:{env:DYLD_FALLBACK_LIBRARY_PATH}"
274 |
275 | [tool.uv]
276 | default-groups = ["dev", "docs", "lint", "pkg"]
277 | package = true
278 |
--------------------------------------------------------------------------------
/docs/guides/releases.md:
--------------------------------------------------------------------------------
1 | # Releases
2 |
3 | ## Overview
4 |
5 | The projects maintained by the Ansible dev tools team have a target release frequency of 1 month. Some project may have more than 1 release per month based on project velocity or frequency of bug fixes or feature additions.
6 |
7 | One person within the devtools team will act as **release manager** each month. This will ensure each project is reviewed to ensure a release for that month was made.
8 |
9 | ## Checklist
10 |
11 | - [ ] If a particular project's CI tests are failing, the release manager will coordinate with the project' SME to clear the block using [#ansible-devtools] slack channel.
12 | - [ ] If a project already has a release within the current month a new release is not necessary unless there are merged PRs and release notes available for a new release.
13 | - [ ] A release should be made only if it contains at least one change that is changing the deliverable.
14 |
15 | ## Release order
16 |
17 | ### Python projects
18 |
19 | Stage 1, release below if needed:
20 |
21 | - [ansible-compat](https://github.com/ansible/ansible-compat/releases)
22 |
23 | Stage 2, release the following projects, in no particular order:
24 |
25 | - [ansible-creator](https://github.com/ansible/ansible-creator/releases)
26 | - [ansible-dev-environment](https://github.com/ansible/ansible-dev-environment/releases)
27 | - [ansible-lint](https://github.com/ansible/ansible-lint/releases)
28 | - [ansible-navigator](https://github.com/ansible/ansible-navigator/releases)
29 | - [molecule](https://github.com/ansible/molecule/releases)
30 | - [pytest-ansible](https://github.com/ansible/pytest-ansible/releases)
31 | - [tox-ansible](https://github.com/ansible/tox-ansible/releases)
32 |
33 | ### Update galaxy-importer before downstream release
34 |
35 | - [galaxy-importer](https://github.com/ansible/galaxy-importer)
36 | - Update ansible-lint version in [setup.cfg](https://github.com/ansible/galaxy-importer/blob/master/setup.cfg) and open a PR. Ensure the ansible-lint version is confirmed for the downstream release before doing this. Ask the Hub team to review the PR in either [#ansible-galaxy-internal] or [#wg-hub-delivery] Slack channels.
37 | - Notify Partner Engineering about the ansible-lint version update in importer in the [#ansible-partners] Slack channel using `@ansible-pe`.
38 | - Ask the Hub team to make a new release of galaxy-importer.
39 | - Add the new released version of importer to downstream packages list to notify PDE of the change.
40 |
41 | ### ADT Release
42 |
43 | Finally, after running dependabot so the release notes are updated with dependencies:
44 |
45 | - [ansible-dev-tools](https://github.com/ansible/ansible-dev-tools/releases)
46 |
47 | This will release both a python project and image. Both the resulting python package and image should be validated to ensure each reflects the latest releases.
48 |
49 | - [ansible-dev-tools on pypi](https://pypi.org/project/ansible-dev-tools/#history)
50 | - [ansible-dev-tools image](https://github.com/ansible/ansible-dev-tools/pkgs/container/community-ansible-dev-tools)
51 |
52 | ### vscode-ansible
53 |
54 | Our [vscode-ansible](https://github.com/ansible/vscode-ansible/releases) extension needs to be released after ADT package is released because it uses both the python packages and the container image. Trying to release it with only the python packages being updated will result in testing with older versions when using the execution environment.
55 |
56 | ### Update DevSpaces image
57 |
58 | Whenever the upstream `ansible-devspaces` container is released, the image SHA in the `devfile.yaml` of [ansible-devspaces-demo](https://github.com/redhat-developer-demos/ansible-devspaces-demo) repository must be updated. Verification needed whether the automated pull request for this update has been created correctly.
59 |
60 | ## Schedule
61 |
62 | Releases should be made on the first Wednesday of the month, but can be made the following Wednesday if necessary. This document should be updated with a pull request after the releases are complete.
63 |
64 | ### 2025-12
65 |
66 | Release manager: @cidrblock
67 |
68 | Releases:
69 |
70 | - ansible-compat - [v25.12.0](https://github.com/ansible/ansible-compat/releases/tag/v25.12.0)
71 | - ansible-creator - [v25.12.0](https://github.com/ansible/ansible-creator/releases/tag/v25.12.0)
72 | - ansible-dev-environment - [v25.12.2](https://github.com/ansible/ansible-dev-environment/releases/tag/v25.12.2)
73 | - ansible-dev-tools - [v25.12.0](https://github.com/ansible/ansible-dev-tools/releases/tag/v25.12.0)
74 | - ansible-lint - [v25.12.0](https://github.com/ansible/ansible-lint/releases/tag/v25.12.0)
75 | - ansible-navigator - [v25.12.0](https://github.com/ansible/ansible-navigator/releases/tag/v25.12.0)
76 | - molecule - [v25.12.0](https://github.com/ansible/molecule/releases/tag/v25.12.0)
77 | - pytest-ansible - [v25.12.0](https://github.com/ansible/pytest-ansible/releases/tag/v25.12.0)
78 | - tox-ansible - [v25.12.0](https://github.com/ansible/tox-ansible/releases/tag/v25.12.0)
79 |
80 | ### 2025-05
81 |
82 | Release manager: @shatakshiiii
83 |
84 | Releases:
85 |
86 | - ansible-compat - [v25.5.0](https://github.com/ansible/ansible-compat/releases/tag/v25.5.0)
87 | - ansible-dev-environment - [v25.5.0](https://github.com/ansible/ansible-dev-environment/releases/tag/v25.5.0)
88 | - ansible-lint - [v25.5.0](https://github.com/ansible/ansible-lint/releases/tag/v25.5.0)
89 | - ansible-creator - [v25.5.0](https://github.com/ansible/ansible-creator/releases/tag/v25.5.0)
90 | - molecule - [v25.5.0](https://github.com/ansible/molecule/releases/tag/v25.5.0)
91 | - pytest-ansible - [v25.5.0](https://github.com/ansible/pytest-ansible/releases/tag/v25.5.0)
92 | - ansible-navigator - [v25.5.0](https://github.com/ansible/ansible-navigator/releases/tag/v25.5.0)
93 | - tox-ansible - [v25.5.0](https://github.com/ansible/tox-ansible/releases/tag/v25.5.0)
94 | - ansible-dev-tools - [v25.5.2](https://github.com/ansible/ansible-dev-tools/releases/tag/v25.5.2)
95 |
96 | ### 2025-04
97 |
98 | Release manager: @abhikdps
99 |
100 | Releases:
101 |
102 | - ansible-creator - [v25.4.1](https://github.com/ansible/ansible-creator/releases/tag/v25.4.1)
103 | - ansible-dev-environment - [v25.4.0](https://github.com/ansible/ansible-dev-environment/releases/tag/v25.4.0)
104 | - ansible-dev-tools - [v25.4.1](https://github.com/ansible/ansible-dev-tools/releases/tag/v25.4.1)
105 | - ansible-lint - [v25.4.0](https://github.com/ansible/ansible-lint/releases/tag/v25.4.0)
106 | - ansible-navigator - [v25.4.1](https://github.com/ansible/ansible-navigator/releases/tag/v25.4.1)
107 | - molecule [v25.4.0](https://github.com/ansible/molecule/releases/tag/v25.4.0)
108 | - pytest-ansible - [v25.4.1](https://github.com/ansible/pytest-ansible/releases/tag/v25.4.1)
109 | - tox-ansible [v25.4.0](https://github.com/ansible/tox-ansible/releases/tag/v25.4.0)
110 |
111 | ### 2025-03
112 |
113 | Release manager: @alisonlhart
114 |
115 | Completed date:
116 |
117 | Releases:
118 |
119 | - ansible-creator [v25.3.1](https://github.com/ansible/ansible-creator/releases/tag/v25.3.1)
120 |
121 | ### 2025-01
122 |
123 | Release manager: @audgirka
124 |
125 | Completed date:
126 |
127 | Releases:
128 |
129 | - ansible-creator [v25.0.0](https://github.com/ansible/ansible-creator/releases/tag/v25.0.0)
130 | - ansible-dev-environment [25.1.0](https://github.com/ansible/ansible-dev-environment/releases/tag/v25.1.0)
131 | - ansible-dev-tools [v25.1.0](https://github.com/ansible/ansible-dev-tools/releases/tag/v25.1.0)
132 | - ansible-lint [v25.1.0](https://github.com/ansible/ansible-lint/releases/tag/v25.1.0)
133 | - ansible-navigator [v25.1.0](https://github.com/ansible/ansible-navigator/releases/tag/v25.1.0)
134 | - molecule [25.1.0](https://github.com/ansible/molecule/releases/tag/v25.1.0)
135 | - pytest-ansible [v25.1.0](https://github.com/ansible/pytest-ansible/releases/tag/v25.1.0)
136 | - tox-ansible [v25.1.0](https://github.com/ansible/tox-ansible/releases/tag/v25.1.0)
137 |
138 | ### 2024-12
139 |
140 | Release manager: @shatakshiiii
141 |
142 | Completed date: 2024-12-17
143 |
144 | Notes: All projects are released
145 |
146 | ### 2024-11
147 |
148 | Release manager:
149 |
150 | Completed date:
151 |
152 | Notes:
153 |
154 | ### 2024-10
155 |
156 | Release manager: @audgirka
157 |
158 | Completed date:
159 |
160 | Notes:
161 |
162 | - ansible-compat [v24.10.0](https://github.com/ansible/ansible-compat/releases/tag/v24.10.0)
163 | - ansible-lint [v24.10.0](https://github.com/ansible/ansible-lint/releases/tag/v24.10.0)
164 | - tox-ansible [v24.10.0](https://github.com/ansible/tox-ansible/releases/tag/v24.10.0)
165 | - ansible-navigator [v24.10.0](https://github.com/ansible/ansible-navigator/releases/tag/v24.10.0)
166 | - ansible-creator [v24.11.0](https://github.com/ansible/ansible-creator/releases/tag/v24.11.0)
167 | - ansible-dev-tools [v24.11.0](https://github.com/ansible/ansible-dev-tools/releases/tag/v24.11.0)
168 |
169 | ### 2024-09
170 |
171 | Release manager: @shatakshiiii
172 |
173 | Completed date: 2024-09-18
174 |
175 | Notes:
176 |
177 | ### 2024-08
178 |
179 | Release manager: @Qalthos
180 |
181 | Completed date:
182 |
183 | Notes:
184 |
185 | - ansible-navigator [24.8.0](https://github.com/ansible/ansible-navigator/releases/tag/v24.8.0) released 2024-08-13
186 | - pytest-ansible [24.8.0](https://github.com/ansible/pytest-ansible/releases/tag/v24.8.0) released 2024-08-16
187 | - tox-ansible [24.8.0](https://github.com/ansible/tox-ansible/releases/tag/v24.8.0) released 2024-08-16
188 | - molecule [24.8.0](https://github.com/ansible/molecule/releases/tag/v24.8.0) released 2024-08-16
189 | - ansible-compat [24.8.0](https://github.com/ansible/ansible-compat/releases/tag/v24.8.0) released 2024-08-19
190 |
191 | ### 2024-07
192 |
193 | Release manager: @alisonlhart
194 |
195 | Completed date: 2024-07-18
196 |
197 | [#ansible-partners]: https://redhat.enterprise.slack.com/archives/CE3UL7F8V
198 | [#ansible-galaxy-internal]: https://redhat.enterprise.slack.com/archives/CBPKRHHG9
199 | [#wg-hub-delivery]: https://redhat.enterprise.slack.com/archives/C07BMJL2X42
200 | [#ansible-devtools]: https://redhat.enterprise.slack.com/archives/C01NQV614EA
201 |
--------------------------------------------------------------------------------
/.github/workflows/tox.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: tox
3 |
4 | on:
5 | workflow_call:
6 | inputs:
7 | default_python:
8 | default: "3.12"
9 | description: Default python version for other jobs
10 | required: false
11 | type: string
12 | environment:
13 | default: ""
14 | description: Which GHA environment to use
15 | required: false
16 | type: string
17 | jobs_producing_coverage:
18 | default: 8
19 | description: Number of jobs producing coverage.xml files (deprecated, no longer used
20 | required: false
21 | type: number
22 | min_python:
23 | default: "3.10"
24 | description: Minimum python version for matrix generation
25 | required: false
26 | type: string
27 | max_python:
28 | default: "3.14"
29 | description: Maximum python version for matrix generation
30 | required: false
31 | type: string
32 | other_names:
33 | default: |
34 | docs
35 | lint
36 | pkg
37 | py313-milestone
38 | py314-milestone
39 | description: Implicit names for the matrix
40 | required: false
41 | type: string
42 | other_names_also:
43 | default: ""
44 | description: Additional names for the matrix
45 | required: false
46 | type: string
47 | platforms:
48 | default: linux,macos
49 | description: Comma separated list of platforms to test on, supported linux, macos, windows.
50 | required: false
51 | type: string
52 | run_pre:
53 | default: ""
54 | description: Preparatory command to run before test commands.
55 | required: false
56 | type: string
57 | run_post:
58 | default: ""
59 | description: Command to run after test commands.
60 | required: false
61 | type: string
62 | skip_explode:
63 | default: "0"
64 | description: Set it to "1" to skip creating jobs for each python version.
65 | type: string
66 | node-version-file:
67 | default: ""
68 | description: Path to the Node.js version file (e.g., `.tool-versions`)
69 | required: false
70 | type: string
71 | log_paths:
72 | default: |
73 | .tox/**/pyvenv.cfg
74 | .tox/**/coverage.xml
75 | .tox/**/log/
76 | !**/.DS_Store
77 | !**/venv/
78 | !**/popen-gw0/
79 | description: Paths to log files to upload as artifacts.
80 | required: false
81 | type: string
82 | # keep permissions at top level because this is a composite workflow
83 | permissions:
84 | checks: read
85 | contents: read
86 | id-token: write
87 | packages: write # some tox environments might produce containers
88 | pull-requests: write # allow codenotify to comment on pull-request
89 | env:
90 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # might be needed by tox commands
91 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
92 | FORCE_COLOR: 1 # tox, pytest, ansible-lint
93 | PY_COLORS: 1
94 |
95 | jobs:
96 | prepare:
97 | name: prepare
98 | runs-on: ubuntu-24.04
99 | environment: ${{ inputs.environment }}
100 | outputs:
101 | matrix: ${{ steps.generate_matrix.outputs.matrix }}
102 | steps:
103 | - name: Determine matrix
104 | id: generate_matrix
105 | uses: ansible/actions/matrix@main
106 | with:
107 | min_python: ${{ inputs.min_python }}
108 | max_python: ${{ inputs.max_python }}
109 | default_python: ${{ inputs.default_python }}
110 | other_names: |
111 | ${{ inputs.other_names }}
112 | ${{ inputs.other_names_also }}
113 | skip_explode: ${{ inputs.skip_explode }}
114 | platforms: ${{ inputs.platforms }}
115 |
116 | test:
117 | name: ${{ matrix.name }}
118 | runs-on: ${{ matrix.os || 'ubuntu-24.04' }}
119 | environment: ${{ inputs.environment }}
120 | needs:
121 | - prepare
122 | defaults:
123 | run:
124 | shell: ${{ matrix.shell || 'bash'}}
125 | strategy:
126 | fail-fast: false
127 | matrix: ${{ fromJson(needs.prepare.outputs.matrix) }}
128 |
129 | steps:
130 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
131 | with:
132 | fetch-depth: 0 # needed by setuptools-scm
133 | submodules: true
134 |
135 | - name: Set pre-commit cache
136 | uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4
137 | if: ${{ matrix.name == 'lint' }}
138 | with:
139 | path: |
140 | ~/.cache/pre-commit
141 | key: pre-commit-${{ matrix.name }}-${{ hashFiles('.pre-commit-config.yaml') }}
142 |
143 | - name: Install the latest version of uv
144 | uses: astral-sh/setup-uv@v7
145 | with:
146 | python-version: ${{ matrix.uv_python_version || '3.12' }}
147 |
148 | - name: Install and validate python build tools
149 | run: |
150 | set -exuo pipefail
151 | python3 --version
152 | which -a python3
153 | which -a uv
154 | uv --version
155 | uv tool install --with tox-uv tox
156 | uv tool install pip
157 | uv tool install coverage
158 | pip --version
159 | tox --version
160 | coverage --version
161 |
162 | - name: Set up Node.js
163 | if: ${{ inputs.node-version-file != '' }}
164 | uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
165 | with:
166 | node-version-file: ${{ inputs.node-version-file }}
167 |
168 | - name: Run pre
169 | if: ${{ inputs.run_pre }}
170 | run: ${{ inputs.run_pre }}
171 |
172 | - name: Prepare path for uv
173 | run: |
174 | echo "$HOME/.local/bin" >> "$GITHUB_PATH"
175 |
176 | - name: Install mise
177 | if: ${{ matrix.mise == 'true' }}
178 | run: |
179 | set -exuo pipefail
180 | type mise >/dev/null || {
181 | curl -s https://mise.run | MISE_QUIET=1 sh
182 | echo "$HOME/.local/share/mise/bin" >> $GITHUB_PATH
183 | echo "$HOME/.local/share/mise/shims" >> $GITHUB_PATH
184 | }
185 | mise install
186 |
187 | - name: Ensure mise python3 is used by default
188 | if: ${{ matrix.mise == 'true' }}
189 | run: |
190 | set -exuo pipefail
191 | if [[ "$(which python3)" != */mise/* ]]; then
192 | echo "python3 is not the mise python3"
193 | exit 1
194 | fi
195 |
196 | - run: ${{ matrix.command }}
197 |
198 | - run: ${{ matrix.command2 }}
199 | if: ${{ matrix.command2 }}
200 |
201 | - run: ${{ matrix.command3 }}
202 | if: ${{ matrix.command3 }}
203 |
204 | - run: ${{ matrix.command4 }}
205 | if: ${{ matrix.command4 }}
206 |
207 | - run: ${{ matrix.command5 }}
208 | if: ${{ matrix.command5 }}
209 |
210 | - name: Run post
211 | if: ${{ inputs.run_post }}
212 | run: ${{ inputs.run_post }}
213 |
214 | - name: Archive logs and coverage data
215 | uses: coactions/upload-artifact@ec1957e16e4ecd304d3a115907ccb4ba5f636e9d # v4
216 | with:
217 | name: logs-${{ matrix.name }}.zip
218 | include-hidden-files: true
219 | if-no-files-found: error
220 | path: ${{ inputs.log_paths }}
221 |
222 | - name: Upload coverage data to codecov.io
223 | id: upload-coverage
224 | if: ${{ !cancelled() && hashFiles('**/coverage.xml') != '' }}
225 | uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
226 | with:
227 | name: ${{ matrix.name }}
228 | fail_ci_if_error: true
229 | disable_search: true
230 | plugins: noop
231 | files: "**/coverage.xml"
232 | # Only one flag is allowed since https://github.com/codecov/gazebo/pull/3375
233 | # os is not needed because is detected automatically
234 | flags: ${{ runner.arch }}-${{ matrix.python_version }}
235 | use_oidc: ${{ !(github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork) }}
236 |
237 | - name: Upload test results to codecov.io
238 | id: upload-test-results
239 | if: ${{ !cancelled() && hashFiles('junit.xml') != '' }}
240 | uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1
241 | with:
242 | fail_ci_if_error: true
243 | name: ${{ matrix.name }}
244 | # unable to use wildcards yet due to https://github.com/codecov/test-results-action/issues/110
245 | flags: ${{ matrix.python_version }},${{ matrix.os }}
246 | use_oidc: ${{ !(github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork) }}
247 |
248 | - name: Report failure if git reports dirty status
249 | run: |
250 | # See https://github.com/codecov/codecov-action/issues/1851
251 | rm -rf codecov codecov.SHA256SUM codecov.SHA256SUM.sig
252 | if [[ -n $(git status -s) ]]; then
253 | # shellcheck disable=SC2016
254 | echo -n '::error file=git-status::'
255 | printf '### Failed as git reported modified and/or untracked files\n```\n%s\n```\n' "$(git status -s)" | tee -a "$GITHUB_STEP_SUMMARY"
256 | exit 99
257 | fi
258 |
259 | check:
260 | if: always()
261 | environment: ${{ inputs.environment }}
262 | needs:
263 | - test
264 | runs-on: ubuntu-24.04
265 |
266 | steps:
267 | - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
268 |
269 | - name: Install the latest version of uv
270 | uses: astral-sh/setup-uv@v7
271 |
272 | - name: Save PR number
273 | if: github.event_name == 'pull_request'
274 | run: |
275 | echo "PR Number: ${{ github.event.pull_request.number }}" > pr_number.txt
276 | cat pr_number.txt
277 |
278 | - name: Upload PR number artifact
279 | if: github.event_name == 'pull_request'
280 | uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
281 | with:
282 | name: pr_number
283 | path: pr_number.txt
284 |
285 | - name: Merge logs into a single archive
286 | # Should do nothing if job is retried
287 | uses: actions/upload-artifact/merge@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
288 | with:
289 | name: logs.zip
290 | pattern: logs-*.zip
291 | delete-merged: true
292 | include-hidden-files: true
293 | separate-directories: true
294 |
295 | - name: Decide whether the needed jobs succeeded or failed
296 | id: alls-green
297 | uses: re-actors/alls-green@release/v1
298 | with:
299 | jobs: ${{ toJSON(needs) }}
300 |
301 | # See: https://github.com/sourcegraph/codenotify/issues/32
302 | # - name: Notify repository owners about changes affecting them
303 | # uses: sourcegraph/codenotify@54e4320f0d93f162a371d8d9dc1fb11018199746 # v0.6.4
304 | # env:
305 | # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
306 | # # https://github.com/sourcegraph/codenotify/issues/19
307 | # continue-on-error: true
308 |
309 | - name: Send CI failure notification
310 | if: failure() && github.ref == 'refs/heads/main'
311 | env:
312 | SLACK_WEBHOOK_URL: ${{ secrets.DEVTOOLS_CI_SLACK_URL }}
313 | run: |
314 | if [ -n "$SLACK_WEBHOOK_URL" ]; then
315 | curl -X POST -H 'Content-type: application/json' \
316 | --data "{\"text\":\" Python tests failed in jobs: test for '${GITHUB_REPOSITORY}' (tox.yml). Check logs: '${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}'\"}" \
317 | $SLACK_WEBHOOK_URL
318 | fi
319 |
--------------------------------------------------------------------------------
/test/test_check_platform_constraints.py:
--------------------------------------------------------------------------------
1 | """Tests for check-platform-constraints script."""
2 |
3 | import json
4 | from pathlib import Path
5 |
6 | import pytest
7 | from packaging.specifiers import SpecifierSet
8 |
9 | from team_devtools.check_platform_constraints import (
10 | PLATFORM_CONSTRAINTS,
11 | check_dependency_compatibility,
12 | get_constraints,
13 | update_renovate_config,
14 | )
15 |
16 |
17 | # Constants
18 | EXPECTED_RULE_COUNT = 2
19 | EXPECTED_CONSTRAINT_COUNT = 9
20 |
21 |
22 | def test_get_constraints() -> None:
23 | """Test getting constraints from constants."""
24 | result = get_constraints()
25 |
26 | assert len(result) == EXPECTED_CONSTRAINT_COUNT
27 | assert "ansible-core" in result
28 | assert "cffi" in result
29 | assert "setuptools" in result
30 | assert "packaging" in result
31 | assert str(result["ansible-core"]) == "<2.17"
32 | assert str(result["cffi"]) == "<1.16"
33 | assert str(result["setuptools"]) == "<65.6"
34 | assert str(result["packaging"]) == "<25.0"
35 |
36 |
37 | def test_platform_constraints_constant() -> None:
38 | """Test that PLATFORM_CONSTRAINTS constant is properly defined."""
39 | assert isinstance(PLATFORM_CONSTRAINTS, dict)
40 | assert len(PLATFORM_CONSTRAINTS) == EXPECTED_CONSTRAINT_COUNT
41 | assert PLATFORM_CONSTRAINTS["ansible-core"] == "<2.17"
42 | assert PLATFORM_CONSTRAINTS["cffi"] == "<1.16"
43 | assert PLATFORM_CONSTRAINTS["setuptools"] == "<65.6"
44 | assert PLATFORM_CONSTRAINTS["packaging"] == "<25.0"
45 |
46 |
47 | def test_check_dependency_compatibility_no_violations() -> None:
48 | """Test dependency that doesn't violate constraints."""
49 | constraints = {"ansible-core": SpecifierSet("<2.17")}
50 | dep = "ansible-core>=2.16.14"
51 |
52 | violations = check_dependency_compatibility(dep, constraints)
53 |
54 | assert violations == []
55 |
56 |
57 | def test_check_dependency_compatibility_with_violation() -> None:
58 | """Test dependency that violates platform constraint."""
59 | constraints = {"ansible-core": SpecifierSet("<2.17")}
60 | dep = "ansible-core>=2.17.10"
61 |
62 | violations = check_dependency_compatibility(dep, constraints)
63 |
64 | assert len(violations) == 1
65 | assert "ansible-core>=2.17.10" in violations[0]
66 | assert "2.17.10" in violations[0]
67 |
68 |
69 | def test_check_dependency_compatibility_unrelated_package() -> None:
70 | """Test dependency for package not in constraints."""
71 | constraints = {"ansible-core": SpecifierSet("<2.17")}
72 | dep = "pytest>=8.0.0"
73 |
74 | violations = check_dependency_compatibility(dep, constraints)
75 |
76 | assert violations == []
77 |
78 |
79 | def test_check_dependency_compatibility_multiple_constraints() -> None:
80 | """Test with multiple platform constraints."""
81 | constraints = {
82 | "ansible-core": SpecifierSet("<2.17"),
83 | "cffi": SpecifierSet("<1.17"),
84 | }
85 | dep = "cffi>=1.15.1"
86 |
87 | violations = check_dependency_compatibility(dep, constraints)
88 |
89 | assert violations == []
90 |
91 |
92 | def test_check_dependency_compatibility_edge_case_equal_version() -> None:
93 | """Test when minimum version equals platform maximum."""
94 | constraints = {"ansible-core": SpecifierSet("<2.17")}
95 | dep = "ansible-core>=2.17.0"
96 |
97 | violations = check_dependency_compatibility(dep, constraints)
98 |
99 | assert len(violations) == 1
100 |
101 |
102 | def test_update_renovate_config_new_rules(tmp_path: Path) -> None:
103 | """Test adding new rules to renovate.json."""
104 | renovate_file = tmp_path / "renovate.json"
105 | renovate_file.write_text(
106 | json.dumps(
107 | {
108 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
109 | "extends": ["config:base"],
110 | },
111 | indent=2,
112 | )
113 | )
114 |
115 | constraints = {
116 | "ansible-core": SpecifierSet("<2.17"),
117 | "cffi": SpecifierSet("<1.17"),
118 | }
119 |
120 | changed, message = update_renovate_config(renovate_file, constraints)
121 |
122 | assert changed is True
123 | assert "2 constraint rule(s)" in message
124 |
125 | # Verify the file was updated
126 | config = json.loads(renovate_file.read_text())
127 | assert "packageRules" in config
128 | assert len(config["packageRules"]) == EXPECTED_RULE_COUNT
129 | assert config["packageRules"][0]["matchPackageNames"] == ["ansible-core"]
130 | assert config["packageRules"][0]["allowedVersions"] == "<2.17"
131 |
132 |
133 | def test_update_renovate_config_preserves_existing_rules(tmp_path: Path) -> None:
134 | """Test that existing non-platform rules are preserved."""
135 | renovate_file = tmp_path / "renovate.json"
136 | renovate_file.write_text(
137 | json.dumps(
138 | {
139 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
140 | "packageRules": [
141 | {
142 | "matchPackageNames": ["pytest"],
143 | "allowedVersions": "<9.0",
144 | "description": "Some other rule",
145 | },
146 | ],
147 | },
148 | indent=2,
149 | )
150 | )
151 |
152 | constraints = {"ansible-core": "<2.17"}
153 |
154 | changed, _message = update_renovate_config(renovate_file, constraints)
155 |
156 | assert changed is True
157 |
158 | config = json.loads(renovate_file.read_text())
159 | assert len(config["packageRules"]) == EXPECTED_RULE_COUNT
160 | # Original rule preserved
161 | assert config["packageRules"][0]["matchPackageNames"] == ["pytest"]
162 | # New rule added
163 | assert config["packageRules"][1]["matchPackageNames"] == ["ansible-core"]
164 |
165 |
166 | def test_update_renovate_config_replaces_old_platform_rules(tmp_path: Path) -> None:
167 | """Test that old platform constraint rules are replaced."""
168 | renovate_file = tmp_path / "renovate.json"
169 | renovate_file.write_text(
170 | json.dumps(
171 | {
172 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
173 | "packageRules": [
174 | {
175 | "matchPackageNames": ["ansible-core"],
176 | "allowedVersions": "<2.16",
177 | "description": "Platform compatibility constraint",
178 | },
179 | ],
180 | },
181 | indent=2,
182 | )
183 | )
184 |
185 | constraints = {"ansible-core": SpecifierSet("<2.17")}
186 |
187 | changed, _message = update_renovate_config(renovate_file, constraints)
188 |
189 | assert changed is True
190 |
191 | config = json.loads(renovate_file.read_text())
192 | assert len(config["packageRules"]) == 1
193 | assert config["packageRules"][0]["allowedVersions"] == "<2.17"
194 |
195 |
196 | def test_update_renovate_config_nonexistent_file(tmp_path: Path) -> None:
197 | """Test handling of nonexistent renovate.json."""
198 | renovate_file = tmp_path / "renovate.json"
199 | constraints = {"ansible-core": SpecifierSet("<2.17")}
200 |
201 | changed, message = update_renovate_config(renovate_file, constraints)
202 |
203 | assert changed is False
204 | assert "not found" in message
205 |
206 |
207 | def test_update_renovate_config_no_changes_needed(tmp_path: Path) -> None:
208 | """Test when renovate.json already has correct rules."""
209 | constraints = {"ansible-core": SpecifierSet("<2.17")}
210 |
211 | renovate_file = tmp_path / "renovate.json"
212 | renovate_file.write_text(
213 | json.dumps(
214 | {
215 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
216 | "packageRules": [
217 | {
218 | "matchPackageNames": ["ansible-core"],
219 | "allowedVersions": "<2.17",
220 | "description": "Platform compatibility constraint",
221 | },
222 | ],
223 | },
224 | indent=2,
225 | )
226 | )
227 |
228 | changed, message = update_renovate_config(renovate_file, constraints)
229 |
230 | assert changed is False
231 | assert "already up to date" in message
232 |
233 |
234 | def test_get_constraints_returns_specifier_sets() -> None:
235 | """Test that get_constraints returns SpecifierSet objects."""
236 | result = get_constraints()
237 |
238 | for name, spec in result.items():
239 | assert isinstance(spec, SpecifierSet)
240 | assert isinstance(name, str)
241 |
242 |
243 | def test_check_dependency_compatibility_malformed_dependency() -> None:
244 | """Test handling of malformed dependency string."""
245 | constraints = {"ansible-core": SpecifierSet("<2.17")}
246 | dep = "not a valid dependency string!!!"
247 |
248 | violations = check_dependency_compatibility(dep, constraints)
249 |
250 | # Should handle gracefully and return no violations
251 | assert violations == []
252 |
253 |
254 | def test_check_dependency_compatibility_no_minimum_version() -> None:
255 | """Test dependency with no minimum version specifier."""
256 | constraints = {"ansible-core": SpecifierSet("<2.17")}
257 | dep = "ansible-core<2.18" # Only upper bound, no minimum
258 |
259 | violations = check_dependency_compatibility(dep, constraints)
260 |
261 | # Should return no violations since there's no minimum to check
262 | assert violations == []
263 |
264 |
265 | def test_check_dependency_compatibility_greater_than_operator() -> None:
266 | """Test dependency with > operator instead of >=."""
267 | constraints = {"ansible-core": SpecifierSet("<2.17")}
268 | dep = "ansible-core>2.17.0"
269 |
270 | violations = check_dependency_compatibility(dep, constraints)
271 |
272 | # Should detect violation with > operator
273 | assert len(violations) == 1
274 |
275 |
276 | def test_check_dependency_compatibility_multiple_version_specs() -> None:
277 | """Test dependency with multiple version specifiers to check max version selection."""
278 | constraints = {"ansible-core": SpecifierSet("<2.17")}
279 | # Multiple >= specs - should pick the higher one
280 | dep = "ansible-core>=2.15.0,>=2.16.5"
281 |
282 | violations = check_dependency_compatibility(dep, constraints)
283 |
284 | # Should use 2.16.5 as minimum (higher version)
285 | assert violations == []
286 |
287 | # Now with a violation
288 | dep2 = "ansible-core>=2.16.0,>=2.17.5"
289 | violations2 = check_dependency_compatibility(dep2, constraints)
290 | assert len(violations2) == 1
291 |
292 | # Test where second version is lower (covers the branch where version <= min_version)
293 | dep3 = "ansible-core>=2.16.5,>=2.15.0"
294 | violations3 = check_dependency_compatibility(dep3, constraints)
295 | # Should still use 2.16.5 as the minimum (ignores lower version)
296 | assert violations3 == []
297 |
298 |
299 | def test_main_with_no_pyproject(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
300 | """Test main function when pyproject.toml doesn't exist."""
301 | monkeypatch.chdir(tmp_path)
302 |
303 | from team_devtools import check_platform_constraints
304 |
305 | result = check_platform_constraints.main()
306 |
307 | # Should succeed with no violations since no dependencies to check
308 | assert result == 0
309 |
310 |
311 | def test_main_with_violations(
312 | tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
313 | ) -> None:
314 | """Test main function when violations are found."""
315 | pyproject_file = tmp_path / "pyproject.toml"
316 | pyproject_file.write_text('[project]\ndependencies = ["ansible-core>=2.17.5"]\n')
317 |
318 | renovate_file = tmp_path / "renovate.json"
319 | renovate_file.write_text('{"packageRules": []}\n')
320 |
321 | monkeypatch.chdir(tmp_path)
322 |
323 | from team_devtools import check_platform_constraints
324 |
325 | result = check_platform_constraints.main()
326 |
327 | assert result == 1
328 | captured = capsys.readouterr()
329 | assert "constraint violations" in captured.out.lower()
330 |
331 |
332 | def test_main_no_violations(
333 | tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
334 | ) -> None:
335 | """Test main function when no violations are found."""
336 | pyproject_file = tmp_path / "pyproject.toml"
337 | pyproject_file.write_text('[project]\ndependencies = ["ansible-core>=2.16.0"]\n')
338 |
339 | renovate_file = tmp_path / "renovate.json"
340 | renovate_file.write_text('{"packageRules": []}\n')
341 |
342 | monkeypatch.chdir(tmp_path)
343 |
344 | from team_devtools import check_platform_constraints
345 |
346 | result = check_platform_constraints.main()
347 |
348 | assert result == 0
349 | captured = capsys.readouterr()
350 | assert "compatible with platform constraints" in captured.out.lower()
351 |
352 |
353 | def test_main_no_pyproject_or_renovate(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
354 | """Test main function when neither pyproject.toml nor renovate.json exist."""
355 | monkeypatch.chdir(tmp_path)
356 |
357 | from team_devtools import check_platform_constraints
358 |
359 | result = check_platform_constraints.main()
360 |
361 | assert result == 0
362 |
363 |
364 | def test_main_renovate_no_changes(
365 | tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
366 | ) -> None:
367 | """Test main function when renovate.json already has correct rules."""
368 | pyproject_file = tmp_path / "pyproject.toml"
369 | pyproject_file.write_text('[project]\ndependencies = ["ansible-core>=2.16.0"]\n')
370 |
371 | renovate_file = tmp_path / "renovate.json"
372 | renovate_file.write_text(
373 | json.dumps(
374 | {
375 | "packageRules": [
376 | {
377 | "matchPackageNames": ["ansible-core"],
378 | "allowedVersions": "<2.17",
379 | "description": "Platform compatibility constraint",
380 | },
381 | {
382 | "matchPackageNames": ["cffi"],
383 | "allowedVersions": "<1.16",
384 | "description": "Platform compatibility constraint",
385 | },
386 | {
387 | "matchPackageNames": ["setuptools"],
388 | "allowedVersions": "<65.6",
389 | "description": "Platform compatibility constraint",
390 | },
391 | {
392 | "matchPackageNames": ["packaging"],
393 | "allowedVersions": "<25.0",
394 | "description": "Platform compatibility constraint",
395 | },
396 | ],
397 | }
398 | )
399 | )
400 |
401 | monkeypatch.chdir(tmp_path)
402 |
403 | from team_devtools import check_platform_constraints
404 |
405 | result = check_platform_constraints.main()
406 |
407 | assert result == 0
408 | captured = capsys.readouterr()
409 | # The rules are being recreated, not detected as already there
410 | assert "Updated renovate.json" in captured.out or "already up to date" in captured.out
411 |
412 |
413 | def test_main_checks_optional_dependencies(
414 | tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
415 | ) -> None:
416 | """Test main function checks optional-dependencies."""
417 | pyproject_file = tmp_path / "pyproject.toml"
418 | pyproject_file.write_text(
419 | """
420 | [project]
421 | dependencies = ["ansible-core>=2.16.0"]
422 |
423 | [project.optional-dependencies]
424 | server = ["django>=5.0"]
425 | """
426 | )
427 |
428 | renovate_file = tmp_path / "renovate.json"
429 | renovate_file.write_text('{"packageRules": []}\n')
430 |
431 | monkeypatch.chdir(tmp_path)
432 |
433 | from team_devtools import check_platform_constraints
434 |
435 | result = check_platform_constraints.main()
436 |
437 | # Should fail because django>=5.0 violates django<4.3 constraint
438 | assert result == 1
439 | captured = capsys.readouterr()
440 | assert "django" in captured.out.lower()
441 |
442 |
443 | def test_main_checks_dependency_groups(
444 | tmp_path: Path, monkeypatch: pytest.MonkeyPatch, capsys: pytest.CaptureFixture[str]
445 | ) -> None:
446 | """Test main function checks dependency-groups."""
447 | pyproject_file = tmp_path / "pyproject.toml"
448 | pyproject_file.write_text(
449 | """
450 | [project]
451 | dependencies = ["ansible-core>=2.16.0"]
452 |
453 | [dependency-groups]
454 | dev = ["setuptools>=70.0"]
455 | """
456 | )
457 |
458 | renovate_file = tmp_path / "renovate.json"
459 | renovate_file.write_text('{"packageRules": []}\n')
460 |
461 | monkeypatch.chdir(tmp_path)
462 |
463 | from team_devtools import check_platform_constraints
464 |
465 | result = check_platform_constraints.main()
466 |
467 | # Should fail because setuptools>=70.0 violates setuptools<65.6 constraint
468 | assert result == 1
469 | captured = capsys.readouterr()
470 | assert "setuptools" in captured.out.lower()
471 |
472 |
473 | def test_main_script_entry_point(tmp_path: Path) -> None:
474 | """Test the if __name__ == '__main__' entry point by running script directly."""
475 | renovate_file = tmp_path / "renovate.json"
476 | renovate_file.write_text('{"packageRules": []}\n')
477 |
478 | # Copy the script to temp directory
479 | import shutil
480 | import subprocess
481 | import sys
482 |
483 | src_script = (
484 | Path(__file__).parent.parent / "src" / "team_devtools" / "check_platform_constraints.py"
485 | )
486 | dest_script = tmp_path / "check_platform_constraints.py"
487 | shutil.copy(src_script, dest_script)
488 |
489 | # Run the script using subprocess
490 | result = subprocess.run(
491 | [sys.executable, str(dest_script)],
492 | check=False,
493 | capture_output=True,
494 | text=True,
495 | cwd=str(tmp_path),
496 | )
497 |
498 | assert result.returncode == 0
499 |
--------------------------------------------------------------------------------