├── .darglint ├── .flake8 ├── .github ├── actions │ ├── ansible_aws_test_provider │ │ ├── action.yaml │ │ └── create_aws_session.py │ ├── ansible_azure_test_provider │ │ ├── action.yaml │ │ └── create_session.py │ ├── ansible_test_integration │ │ └── action.yml │ ├── ansible_test_splitter │ │ ├── REAME.md │ │ ├── action.yml │ │ ├── list_changed_common.py │ │ ├── list_changed_targets.py │ │ └── test_list_changed_targets.py │ ├── ansible_validate_changelog │ │ ├── action.yml │ │ └── validate_changelog.py │ ├── build_install_collection │ │ └── action.yml │ ├── changelog_evaluator │ │ └── action.yml │ ├── changelog_labeller │ │ └── action.yml │ ├── checkout_dependency │ │ ├── README.md │ │ ├── action.yml │ │ ├── resolve_dependency.py │ │ └── test_resolve_dependency.py │ ├── commit_to_pullrequest │ │ └── action.yml │ ├── create_pullrequest │ │ ├── action.yml │ │ └── run.py │ ├── identify_collection │ │ └── action.yml │ └── tox │ │ ├── action.yml │ │ └── install_packages.py ├── dependabot.yml └── workflows │ ├── ansible-lint.yml │ ├── backport-labeller.yml │ ├── changelog.yml │ ├── coverage_network_devices.yml │ ├── galaxy_importer.yml │ ├── integration.yml │ ├── integration_simple.yml │ ├── release-branch.yml │ ├── release-tag.yml │ ├── safe-to-test.yml │ ├── sanity.yml │ ├── tox-linters.yml │ ├── tox.yml │ ├── unit_galaxy.yml │ ├── unit_source.yml │ └── update_aws_variables.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── mypy.ini ├── pyproject.toml └── scripts ├── create_github_release.py ├── update_aws_boto_constraints.py └── update_aws_user_agent.py /.darglint: -------------------------------------------------------------------------------- 1 | [darglint] 2 | # NOTE: All `darglint` styles except for `sphinx` hit ridiculously low 3 | # NOTE: performance on some of the in-project Python modules. 4 | # Refs: 5 | # * https://github.com/terrencepreilly/darglint/issues/186 6 | docstring_style = sphinx 7 | strictness = full 8 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | 3 | builtins = _ 4 | 5 | # Print the total number of errors: 6 | count = true 7 | 8 | # Don't even try to analyze these: 9 | # Feel free to add as needed, flake8 has no automatic way 10 | # to leverage the gitignore file 11 | extend-exclude = 12 | # Cache files of MyPy 13 | .mypy_cache, 14 | # Cache files of pytest 15 | .pytest_cache, 16 | # Occasional virtualenv dir 17 | .venv 18 | # VS Code 19 | .vscode, 20 | # Adjacent venv 21 | venv 22 | 23 | # IMPORTANT: avoid using ignore option, always use extend-ignore instead 24 | # Completely and unconditionally ignore the following errors: 25 | extend-ignore = 26 | # Safeguard neutering of flake8-quotes : https://github.com/zheller/flake8-quotes/issues/105 27 | Q, 28 | # annoy black by allowing white space before : https://github.com/psf/black/issues/315 29 | E203, 30 | # duplicate of pylint W0611 (unused-import) 31 | F401, 32 | # duplicate of pylint E0602 (undefined-variable) 33 | F821, 34 | # duplicate of pylint W0612 (unused-variable) 35 | F841, 36 | 37 | # Resonable compromise: 38 | max-line-length = 100 39 | 40 | # Allow certain violations in certain files: 41 | # Please keep both sections of this list sorted, as it will be easier for others to find and add entries in the future 42 | per-file-ignores = 43 | # The following ignores have been researched and should be considered permanent 44 | # each should be preceeded with an explanation of each of the error codes 45 | # If other ignores are added for a specific file in the section following this, 46 | # these will need to be added to that line as well. 47 | 48 | # EX000: Example don't do this 49 | # scripts/example.py EX000 50 | 51 | # The following were present during the initial implementation. 52 | # They are expected to be fixed and unignored over time. 53 | 54 | # EX000: Example don't do this 55 | # scripts/example.py EX000 56 | 57 | # Count the number of occurrences of each error/warning code and print a report: 58 | statistics = true 59 | -------------------------------------------------------------------------------- /.github/actions/ansible_aws_test_provider/action.yaml: -------------------------------------------------------------------------------- 1 | name: ansible-aws-test-provider 2 | description: create file cloud-config-aws.ini in order to run ansible-test 3 | inputs: 4 | collection_path: 5 | description: Path to the AWS collection to create file in. 6 | required: true 7 | ansible_core_ci_key: 8 | description: ansible core ci key 9 | required: true 10 | stage: 11 | description: session stage 12 | default: "prod" 13 | session_id: 14 | description: aws session identifier 15 | default: ${{ github.head_ref }} 16 | 17 | outputs: 18 | configuration_file: 19 | description: aws session details 20 | value: ${{ inputs.collection_path }}/tests/integration/cloud-config-aws.ini 21 | 22 | runs: 23 | using: composite 24 | steps: 25 | - name: Set up Python '3.12' 26 | uses: actions/setup-python@v4 27 | with: 28 | python-version: "3.12" 29 | 30 | - name: install python required modules 31 | run: pip install requests 32 | shell: bash 33 | 34 | - name: create aws session file 35 | run: | 36 | python3 ${{ github.action_path }}/create_aws_session.py 37 | shell: bash 38 | env: 39 | ANSIBLE_CORE_CI_KEY: ${{ inputs.ansible_core_ci_key }} 40 | ANSIBLE_CORE_CI_STAGE: "prod" 41 | ANSIBLE_TEST_CLOUD_CONFIG_FILE: "${{ inputs.collection_path }}/tests/integration/cloud-config-aws.ini" 42 | -------------------------------------------------------------------------------- /.github/actions/ansible_aws_test_provider/create_aws_session.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """Script to request new aws session using ansible_core_ci_key.""" 3 | 4 | import json 5 | import logging 6 | import os 7 | import random 8 | import sys 9 | 10 | import requests 11 | 12 | 13 | FORMAT = "[%(asctime)s] - %(message)s" 14 | logging.basicConfig(format=FORMAT) 15 | logger = logging.getLogger("create_aws_session") 16 | logger.setLevel(logging.DEBUG) 17 | 18 | 19 | def main() -> None: 20 | """Request new aws session credentials. 21 | 22 | :raises ValueError: when ANSIBLE_CORE_CI_KEY environment variable is missing or empty 23 | """ 24 | ansible_core_ci_key = os.environ.get("ANSIBLE_CORE_CI_KEY") or "" 25 | ansible_core_ci_stage = os.environ.get("ANSIBLE_CORE_CI_STAGE") or "prod" 26 | headers = {"Content-Type": "application/json"} 27 | data = { 28 | "config": {"platform": "aws", "version": "sts"}, 29 | "auth": { 30 | "remote": { 31 | "key": ansible_core_ci_key, 32 | "nonce": None, 33 | } 34 | }, 35 | "threshold": 1, 36 | } 37 | if ansible_core_ci_key == "": 38 | logger.error("Empty or missing environment variable 'ANSIBLE_CORE_CI_KEY'") 39 | raise ValueError("ANSIBLE_CORE_CI_KEY environment variable is empty or missing") 40 | logger.info("data -> %s", json.dumps(data).replace(ansible_core_ci_key, "*******")) 41 | session_id = "".join(random.choice("0123456789abcdef") for _ in range(32)) 42 | endpoint_url = ( 43 | f"https://ansible-core-ci.testing.ansible.com/{ansible_core_ci_stage}/aws/{session_id}" 44 | ) 45 | logger.info("Endpoint URL -> '%s'", endpoint_url) 46 | response = requests.put(endpoint_url, data=json.dumps(data), headers=headers, timeout=10) 47 | logger.info("Status: [%d]", response.status_code) 48 | if response.status_code != 200: 49 | logger.info("Response: %s", response.json()) 50 | logger.error("Request failed with [%s]", response.json().get("errorMessage")) 51 | sys.exit(1) 52 | 53 | # create ansible-test credential file 54 | credentials = response.json().get("aws").get("credentials") 55 | cloud_config_file = os.environ.get("ANSIBLE_TEST_CLOUD_CONFIG_FILE") or "cloud-config-aws.ini" 56 | access_key = credentials.get("access_key") 57 | secret_key = credentials.get("secret_key") 58 | session_token = credentials.get("session_token") 59 | aws_credentials = [ 60 | "[default]", 61 | f"aws_access_key: {access_key}", 62 | f"aws_secret_key: {secret_key}", 63 | f"security_token: {session_token}", 64 | "aws_region: us-east-1", 65 | "ec2_access_key: {{ aws_access_key }}", 66 | "ec2_secret_key: {{ aws_secret_key }}", 67 | "ec2_region: {{ aws_region }}", 68 | ] 69 | logger.info("writing aws credentials into file => %s", cloud_config_file) 70 | with open(cloud_config_file, mode="w", encoding="utf-8") as file_writer: 71 | file_writer.write("\n".join(aws_credentials)) 72 | 73 | 74 | if __name__ == "__main__": 75 | main() 76 | -------------------------------------------------------------------------------- /.github/actions/ansible_azure_test_provider/action.yaml: -------------------------------------------------------------------------------- 1 | name: ansible-azure-test-provider 2 | description: create file cloud-config-azure.ini in order to run ansible-test 3 | inputs: 4 | collection_path: 5 | description: Path to the collection to create file in. 6 | required: true 7 | ansible_core_ci_key: 8 | description: ansible core ci key 9 | required: true 10 | stage: 11 | description: session stage 12 | default: "prod" 13 | session_id: 14 | description: Azure session identifier 15 | default: ${{ github.head_ref }} 16 | 17 | outputs: 18 | configuration_file: 19 | description: Azure session details 20 | value: ${{ inputs.collection_path }}/tests/integration/cloud-config-azure.ini 21 | 22 | runs: 23 | using: composite 24 | steps: 25 | - name: Set up Python '3.12' 26 | uses: actions/setup-python@v4 27 | with: 28 | python-version: "3.12" 29 | 30 | - name: install python required modules 31 | run: pip install requests 32 | shell: bash 33 | 34 | - name: Generate SSH Key 35 | run: ssh-keygen -t rsa -N "" -f ~/.ssh/id_rsa 36 | shell: bash 37 | 38 | - name: create Azure session file 39 | run: | 40 | python3 ${{ github.action_path }}/create_session.py 41 | shell: bash 42 | env: 43 | ANSIBLE_CORE_CI_KEY: ${{ inputs.ansible_core_ci_key }} 44 | ANSIBLE_CORE_CI_STAGE: "prod" 45 | ANSIBLE_TEST_CLOUD_CONFIG_FILE: "${{ inputs.collection_path }}/tests/integration/cloud-config-azure.ini" 46 | -------------------------------------------------------------------------------- /.github/actions/ansible_azure_test_provider/create_session.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """Script to request new azure session using ansible_core_ci_key.""" 3 | 4 | import json 5 | import os 6 | import secrets 7 | import sys 8 | 9 | from pathlib import PosixPath 10 | 11 | import requests 12 | 13 | 14 | def main() -> None: 15 | """Request new azure session credentials.""" 16 | try: 17 | ansible_core_ci_key = os.environ["ANSIBLE_CORE_CI_KEY"] 18 | except KeyError: 19 | sys.stderr.write("Missing mandatory environment variable ANSIBLE_CORE_CI_KEY.\n") 20 | sys.exit(1) 21 | ansible_core_ci_stage = os.environ.get("ANSIBLE_CORE_CI_STAGE") or "prod" 22 | headers = {"Content-Type": "application/json"} 23 | ansible_ssh_public_key_path = os.environ.get( 24 | "ANSIBLE_TEST_SSH_PUBLIC_KEY_PATH" 25 | ) or os.path.expanduser("~/.ssh/id_rsa.pub") 26 | 27 | data = { 28 | "config": { 29 | "platform": "azure", 30 | "version": "", 31 | "architecture": "", 32 | "public_key": PosixPath(ansible_ssh_public_key_path).read_text(encoding="utf-8"), 33 | }, 34 | "auth": { 35 | "remote": { 36 | "key": ansible_core_ci_key, 37 | "nonce": None, 38 | } 39 | }, 40 | } 41 | session_id = "".join(secrets.choice("0123456789abcdef") for i in range(32)) 42 | endpoint_url = ( 43 | f"https://ansible-core-ci.testing.ansible.com/{ansible_core_ci_stage}/azure/{session_id}" 44 | ) 45 | response = requests.put(endpoint_url, data=json.dumps(data), headers=headers, timeout=30) 46 | if response.status_code != 200: 47 | sys.stderr.write( 48 | f"HTTP Status Code error - Expected (200) Received ({response.status_code})" 49 | ) 50 | sys.exit(1) 51 | 52 | # create ansible-test credential file 53 | credentials = response.json().get("azure") 54 | cloud_config_file = os.environ.get("ANSIBLE_TEST_CLOUD_CONFIG_FILE") or "cloud-config-azure.ini" 55 | cloud_config_content = [ 56 | "[default]", 57 | f"AZURE_CLIENT_ID: {credentials.get('clientId')}", 58 | f"AZURE_SECRET: {credentials.get('clientSecret')}", 59 | f"AZURE_SUBSCRIPTION_ID: {credentials.get('subscriptionId')}", 60 | f"AZURE_TENANT: {credentials.get('tenantId')}", 61 | f"RESOURCE_GROUP: {credentials.get('resourceGroupNames')[0]}", 62 | f"RESOURCE_GROUP_SECONDARY: {credentials.get('resourceGroupNames')[1]}", 63 | ] 64 | with open(cloud_config_file, mode="w", encoding="utf-8") as file_writer: 65 | file_writer.write("\n".join(cloud_config_content)) 66 | 67 | 68 | if __name__ == "__main__": 69 | main() 70 | -------------------------------------------------------------------------------- /.github/actions/ansible_test_integration/action.yml: -------------------------------------------------------------------------------- 1 | name: ansible test integration 2 | description: configure cloud environment and run ansible-test integration tests 3 | inputs: 4 | collection_path: 5 | description: Relative path where to run `ansible-test integration` command. 6 | required: true 7 | python_version: 8 | description: Python version to use to run integration tests 9 | required: true 10 | ansible_version: 11 | description: ansible-core version to use to run integration tests 12 | required: true 13 | ansible_test_targets: 14 | description: Integration tests targets 15 | required: false 16 | ansible_test_environment: 17 | description: list of environment variables to set when running ansible-test 18 | required: false 19 | ansible_test_requirement_files: 20 | description: Requirements files containing python dependencies to run integration. 21 | default: "requirements.txt test-requirements.txt" 22 | ansible_test_constraint_files: 23 | description: Collection python constraints files. 24 | default: "" 25 | 26 | runs: 27 | using: composite 28 | steps: 29 | - name: Set up Python ${{ inputs.python_version }} 30 | uses: actions/setup-python@v4 31 | with: 32 | python-version: ${{ inputs.python_version }} 33 | 34 | - name: Install wheel now for faster builds 35 | run: python3 -m pip install wheel --upgrade 36 | shell: bash 37 | 38 | - name: Install ansible-core (${{ inputs.ansible_version }}) 39 | run: python3 -m pip install https://github.com/ansible/ansible/archive/${{ inputs.ansible_version }}.tar.gz --disable-pip-version-check 40 | shell: bash 41 | 42 | - name: Disable selinux with selinux_please_lie_to_me 43 | run: | 44 | python3 -m pip uninstall -y selinux 45 | python3 -m pip install selinux_please_lie_to_me 46 | shell: bash 47 | 48 | - name: Create requirements files argument 49 | id: requirements 50 | run: | 51 | set -eux 52 | REQ_ARG="" 53 | for item in $(echo ${FILES_LIST} | tr ' ' '\n'); do 54 | REQ_ARG="-r ${item} ${REQ_ARG}" 55 | done 56 | echo "argument=${REQ_ARG}" >> $GITHUB_OUTPUT 57 | shell: bash 58 | env: 59 | FILES_LIST: ${{ inputs.ansible_test_requirement_files }} 60 | if: inputs.ansible_test_requirement_files != '' 61 | 62 | - name: Create constraints files argument 63 | id: constraints 64 | run: | 65 | set -eux 66 | CONSTRAINTS_ARG="" 67 | for item in $(echo ${CONSTRAINTS_FILES} | tr ' ' '\n'); do 68 | CONSTRAINTS_ARG="-r ${item} ${CONSTRAINTS_ARG}" 69 | done 70 | echo "argument=${CONSTRAINTS_ARG}" >> $GITHUB_OUTPUT 71 | shell: bash 72 | env: 73 | CONSTRAINTS_FILES: ${{ inputs.ansible_test_constraint_files }} 74 | if: inputs.ansible_test_constraint_files != '' 75 | 76 | - name: Install collection python requirements 77 | run: python3 -m pip install ${{ steps.requirements.outputs.argument }} ${{ steps.constraints.outputs.argument }} 78 | shell: bash 79 | working-directory: ${{ inputs.collection_path }} 80 | if: ${{ (steps.requirements.outputs.argument != '') || (steps.constraints.outputs.argument != '') }} 81 | 82 | - name: Set environment variables 83 | run: echo "${{ inputs.ansible_test_environment }}" >> $GITHUB_ENV 84 | shell: bash 85 | if: inputs.ansible_test_environment != '' 86 | 87 | - name: Run integration tests 88 | run: >- 89 | ansible-test integration 90 | --diff 91 | --no-temp-workdir 92 | --color 93 | --skip-tags False 94 | --retry-on-error 95 | --continue-on-error 96 | --python ${{ inputs.python_version }} 97 | -v 98 | ${{ inputs.ansible_test_targets }} 99 | shell: bash 100 | working-directory: ${{ inputs.collection_path }} 101 | -------------------------------------------------------------------------------- /.github/actions/ansible_test_splitter/REAME.md: -------------------------------------------------------------------------------- 1 | # ansible_test_splitter 2 | 3 | This action identifies the targets impacted by the changes on a pull request and split them into a number of jobs defined by the user. 4 | 5 | ## Usage 6 | 7 | 8 | 9 | ```yaml 10 | - uses: ansible-network/github_actions/.github/actions/ansible_test_splitter@main 11 | with: 12 | # Path to a list of collections 13 | collections_to_test: | 14 | path_to_collection_1 15 | path_to_collection_2 16 | (...) 17 | path_to_collection_n 18 | 19 | # The total number of jobs to share 20 | total_jobs: 5 21 | ``` 22 | 23 | The action output is a variable `test_targets` containing a list of chunk for each collection with the targets for each chunk. 24 | e.g: `community.aws-1:dynamodb_table;community.aws-2:elb_target;community.aws-3:msk_cluster-auth;community.aws-4:secretsmanager_secret;community.aws-5:redshift,ec2_transit_gateway_vpc_attachment` 25 | 26 | 27 | 28 | ## Relationship between plugins/roles and targets 29 | 30 | This action reads elements to test from `plugins` and `roles` directories and corresponding tests from `tests/integration/targets` directory. Here after more details on the relationship between plugins/roles and integration tests targets: 31 | 32 | - `modules`, the test target should have the same name as the module or defines the module name into the `aliases` file 33 | 34 | _Example_: 35 | 36 | ``` 37 | |___plugins/modules/my_module.py 38 | |___tests 39 | |___integration 40 | |___targets 41 | |___my_module 42 | |___another_test 43 | |___aliases (contains this line my_module) 44 | ``` 45 | 46 | For any change on `plugins/modules/my_module.py`, this action will produce `my_module` and `another_test` as impacted targets. 47 | 48 | - `roles`, the test target should defines the role name with the prefix `role` into the `aliases` file. 49 | 50 | _Example_: 51 | 52 | ``` 53 | |___roles/some_role 54 | |___tests 55 | |___integration 56 | |___targets 57 | |___test_of_some_role 58 | |___aliases (contains this line role/some_role) 59 | ``` 60 | 61 | For any change on `roles/some_role`, this action will produce `test_of_some_role` as impacted target. 62 | 63 | - For any other plugin (inventory, connection, module_utils, plugin_utils, lookup), the test target should have the same name as the plugin or defines the plugin name prefixed by the plugin type and underscore (e.g: **inventory_myinventory**) into the `aliases` file. 64 | 65 | _Example_: 66 | 67 | ``` 68 | |___plugins/lookup/random.py 69 | |___tests 70 | |___integration 71 | |___targets 72 | |___lookup_random 73 | |___test_random 74 | |___aliases (contains this line lookup_random) 75 | ``` 76 | 77 | For any change on `plugins/lookup/random.py`, this action will produce `lookup_random` and `test_random` as impacted targets. 78 | 79 | ## Debugging 80 | 81 | - Set the label `test-all-the-targets` on the pull request to run the full test suite instead of the impacted changes. 82 | - Use `TargetsToTest=collection1:target01,target02;collection2:target03,target4` in the pull request description to run a specific list of targets. 83 | _Example_: You need to test the following targets for a pull request 84 | 85 | ```yaml 86 | - collection1: some_test_1 some_test_2 87 | - collection2: another_test 88 | ``` 89 | 90 | The pull request should contain the following line `TargetsToTest=collection1:some_test_1,some_test_2;collection2:another_test`. 91 | -------------------------------------------------------------------------------- /.github/actions/ansible_test_splitter/action.yml: -------------------------------------------------------------------------------- 1 | name: Cloud integration test splitter 2 | description: Evaluate which targets need to be tested. 3 | 4 | inputs: 5 | collections_to_test: 6 | description: | 7 | Path to the collections to test. 8 | Provide as a comma-separated list of collection path and base ref to test against. 9 | e.g: 'repo_path_1:main,repo_path_2:stable-2' 10 | required: true 11 | total_jobs: 12 | description: The total number of jobs to share targets on 13 | required: false 14 | default: "3" 15 | base_ref: 16 | description: The git base branch to compare with. 17 | required: false 18 | outputs: 19 | test_targets: 20 | description: The list of targets to test as concatenate string 21 | value: ${{ steps.splitter.outputs.test_targets }} 22 | test_targets_json: 23 | description: The list of targets to test as json string 24 | value: ${{ steps.splitter.outputs.test_targets_json }} 25 | test_jobs: 26 | description: The list of generate keys 27 | value: ${{ steps.splitter.outputs.test_jobs }} 28 | 29 | runs: 30 | using: composite 31 | steps: 32 | - name: setup python 33 | uses: actions/setup-python@v4 34 | with: 35 | python-version: "3.12" 36 | 37 | - name: Install python required libraries 38 | run: pip install -U pyyaml 39 | shell: bash 40 | 41 | - name: Set variable to set test all targets 42 | run: echo "ANSIBLE_TEST_ALL_THE_TARGETS=true" >> "$GITHUB_ENV" 43 | shell: bash 44 | if: ${{ (contains(github.event.pull_request.labels.*.name, 'test-all-the-targets')) }} 45 | 46 | - name: Evaluate targets to test 47 | id: splitter 48 | run: >- 49 | python ${{ github.action_path }}/list_changed_targets.py 50 | env: 51 | COLLECTIONS_TO_TEST: "${{ inputs.collections_to_test }}" 52 | TOTAL_JOBS: "${{ inputs.total_jobs }}" 53 | PULL_REQUEST_BODY: "${{ github.event.pull_request.body }}" 54 | PULL_REQUEST_BASE_REF: "${{ inputs.base_ref || github.event.pull_request.base.ref }}" 55 | shell: bash 56 | -------------------------------------------------------------------------------- /.github/actions/ansible_test_splitter/list_changed_common.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Define collection module for list_changed_targets executable.""" 3 | 4 | import ast 5 | import json 6 | import os 7 | import re 8 | import subprocess 9 | 10 | from collections import defaultdict 11 | from collections.abc import Generator 12 | from pathlib import PosixPath 13 | from typing import Any 14 | from typing import Dict 15 | from typing import List 16 | from typing import Optional 17 | 18 | import yaml 19 | 20 | 21 | def read_collection_name(collection_path: PosixPath) -> str: 22 | """Read collection namespace from galaxy.yml. 23 | 24 | :param collection_path: path to the collection 25 | :returns: collection name as string 26 | """ 27 | with (collection_path / "galaxy.yml").open() as file_handler: 28 | content = yaml.safe_load(file_handler) 29 | return f'{content["namespace"]}.{content["name"]}' 30 | 31 | 32 | def run_command(command: str, chdir: Optional[PosixPath]) -> str: 33 | """Run shell command using subprocess. 34 | 35 | :param command: command to execute 36 | :param chdir: directory to place in before running the command 37 | :returns: command output 38 | """ 39 | with subprocess.Popen( 40 | command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=chdir 41 | ) as proc: 42 | out, _ = proc.communicate() 43 | return out.decode() 44 | 45 | 46 | def list_pyimport(prefix: str, subdir: str, module_content: str) -> Generator[str, None, None]: 47 | """Read collection namespace from galaxy.yml. 48 | 49 | :param prefix: files prefix 50 | :param subdir: sub directory 51 | :param module_content: module content 52 | :yields: python module import 53 | """ 54 | root = ast.parse(module_content) 55 | for node in ast.walk(root): 56 | if isinstance(node, ast.Import): 57 | yield node.names[0].name 58 | elif isinstance(node, ast.ImportFrom): 59 | if node.level == 1: 60 | current_prefix = f"{prefix}{subdir}." 61 | elif node.level == 2: 62 | current_prefix = f"{prefix}" 63 | else: 64 | current_prefix = "" 65 | yield f"{current_prefix}{node.module}" 66 | 67 | 68 | def build_import_tree( 69 | import_path: PosixPath, module_collection_name: str, all_collections_names: list[str] 70 | ) -> tuple[dict[str, list[Any]], dict[str, list[Any]]]: 71 | """Generate import dependencies for the modules and the module_utils. 72 | 73 | Let say we have the following input: 74 | 75 | modules: ec2_mod1 76 | import a_py_mod 77 | import ansible.basic 78 | modules: ec2_mod2 79 | import another_py_mod 80 | import ansible_collections.amazon.aws.plugins.module_utils.core 81 | modules: ec2_mod3 82 | import ansible_collections.amazon.aws.plugins.module_utils.tagging 83 | import ansible_collections.amazon.aws.plugins.module_utils.waiters 84 | 85 | module_utils: waiters 86 | import some_py_mod 87 | import ansible_collections.amazon.aws.plugins.module_utils.core 88 | module_utils: tagging 89 | import some_py_tricky_mod 90 | import ansible_collections.amazon.aws.plugins.module_utils.core 91 | module_utils: core 92 | import some_py_fancy_mod 93 | 94 | This will generated the following dicts (list only import part of this collection): 95 | 96 | modules_imports 97 | { 98 | "ec2_mod1": [], 99 | "ec2_mod2": [ 100 | "ansible_collections.amazon.aws.plugins.module_utils.core", 101 | ], 102 | "ec2_instance_info": [ 103 | "ansible_collections.amazon.aws.plugins.module_utils.tagging", 104 | "ansible_collections.amazon.aws.plugins.module_utils.waiters" 105 | ], 106 | } 107 | 108 | utils_import 109 | { 110 | "ansible_collections.amazon.aws.plugins.module_utils.core": [ 111 | "ansible_collections.amazon.aws.plugins.module_utils.waiters" 112 | "ansible_collections.amazon.aws.plugins.module_utils.tagging" 113 | ] 114 | } 115 | 116 | :param all_collections_names: collections names 117 | :param module_collection_name: current collection name 118 | :param import_path: the path to import from 119 | :returns: tuple of modules and utils imports 120 | """ 121 | modules_import = defaultdict(list) # type: Dict[str, List[Any]] 122 | prefix = f"ansible_collections.{module_collection_name}.plugins." 123 | all_prefixes = [f"ansible_collections.{n}.plugins." for n in all_collections_names] 124 | utils_to_visit = [] 125 | for mod in import_path.glob("plugins/modules/*"): 126 | for i in list_pyimport(prefix, "modules", mod.read_text()): 127 | if any(i.startswith(p) for p in all_prefixes) and i not in modules_import[mod.stem]: 128 | modules_import[mod.stem].append(i) 129 | if i not in utils_to_visit: 130 | utils_to_visit.append(i) 131 | 132 | utils_import = defaultdict(list) # type: Dict[str, List[Any]] 133 | visited = [] 134 | while utils_to_visit: 135 | utils = utils_to_visit.pop() 136 | if utils in visited: 137 | continue 138 | visited.append(utils) 139 | try: 140 | utils_path = import_path / PosixPath( 141 | utils.replace(f"ansible_collections.{module_collection_name}.", "").replace( 142 | ".", "/" 143 | ) 144 | + ".py" 145 | ) 146 | for i in list_pyimport(prefix, "module_utils", utils_path.read_text()): 147 | if i.startswith(prefix) and i not in utils_import[utils]: 148 | utils_import[utils].append(i) 149 | if i not in visited: 150 | utils_to_visit.append(i) 151 | except Exception: # pylint: disable=broad-except 152 | pass 153 | return modules_import, utils_import 154 | 155 | 156 | class WhatHaveChanged: 157 | """A class to store information about changes for a specific collection.""" 158 | 159 | def __init__(self, change_path: PosixPath, base_ref: str) -> None: 160 | """Class constructor. 161 | 162 | :param change_path: path to the change 163 | :param base_ref: pull request base reference 164 | """ 165 | assert isinstance(change_path, PosixPath) 166 | self.collection_path = change_path 167 | self.base_ref = base_ref 168 | self.collection_name = read_collection_name(change_path) 169 | self.files = [] # type: List[PosixPath] 170 | 171 | def changed_files(self) -> list[PosixPath]: 172 | """List of changed files. 173 | 174 | :returns: a list of pathlib.PosixPath 175 | """ 176 | if not self.files: 177 | changed_files_cmd = f"git diff origin/{self.base_ref} --name-only" 178 | print(f"Command for changed files => {changed_files_cmd}") 179 | stdout = run_command(command=changed_files_cmd, chdir=self.collection_path) 180 | self.files = [PosixPath(p) for p in stdout.split("\n") if p] 181 | return self.files 182 | 183 | def targets(self) -> Generator[str, None, None]: 184 | """List the test targets impacted by the change. 185 | 186 | :yields: targets impacted by this change 187 | """ 188 | for change in self.changed_files(): 189 | if str(change).startswith("tests/integration/targets/"): 190 | # These are a special case, we only care that 'something' changed in that test 191 | yield str(change).replace("tests/integration/targets/", "").split("/", maxsplit=1)[ 192 | 0 193 | ] 194 | 195 | def _path_matches(self, base_path: str) -> Generator[PosixPath, None, None]: 196 | """Simplest case, just a file name. 197 | 198 | :param base_path: path of the module 199 | :yields: path to a change file 200 | """ 201 | for changed_file in self.changed_files(): 202 | if str(changed_file).startswith(base_path): 203 | yield PosixPath(changed_file) 204 | 205 | def connection(self) -> Generator[PosixPath, None, None]: 206 | """List the connection plugins impacted by the change. 207 | 208 | :yields: path to a connection plugin change 209 | """ 210 | yield from self._path_matches("plugins/connection/") 211 | 212 | def inventory(self) -> Generator[PosixPath, None, None]: 213 | """List the inventory plugins impacted by the change. 214 | 215 | :yields: path to an inventory plugin change 216 | """ 217 | yield from self._path_matches("plugins/inventory/") 218 | 219 | def lookup(self) -> Generator[PosixPath, None, None]: 220 | """List the lookup plugins impacted by the change. 221 | 222 | :yields: path to a connection lookup change 223 | """ 224 | yield from self._path_matches("plugins/lookup/") 225 | 226 | def modules(self) -> Generator[PosixPath, None, None]: 227 | """List the modules impacted by the change. 228 | 229 | :yields: path to a module plugin change 230 | """ 231 | yield from self._path_matches("plugins/modules/") 232 | 233 | def roles(self) -> Generator[str, None, None]: 234 | """List the roles impacted by the change. 235 | 236 | :yields: path to a role change 237 | """ 238 | for changed_file in self.changed_files(): 239 | if str(changed_file).startswith("roles/"): 240 | yield str(changed_file).split("/", maxsplit=2)[1] 241 | 242 | def _util_matches( 243 | self, base_path: str, import_path: str 244 | ) -> Generator[tuple[PosixPath, str], None, None]: 245 | """List matching utils files. 246 | 247 | :param base_path: path of the module or plugin util 248 | :param import_path: path of the import library 249 | :yields: path to a module or plugin utils change 250 | """ 251 | # We care about the file, but we also need to find what potential side effects would be for 252 | # our change 253 | base_name = f"ansible_collections.{self.collection_name}.plugins.{import_path}." 254 | for util_change in self.changed_files(): 255 | if str(util_change).startswith(base_path): 256 | yield ( 257 | PosixPath(util_change), 258 | f"{base_name}{util_change.stem}", 259 | ) 260 | 261 | def module_utils(self) -> Generator[tuple[PosixPath, str], None, None]: 262 | """List the Python modules impacted by the change. 263 | 264 | :yields: path to a module util change 265 | """ 266 | yield from self._util_matches("plugins/module_utils/", "module_utils") 267 | 268 | def plugin_utils(self) -> Generator[tuple[PosixPath, str], None, None]: 269 | """List the Python modules impacted by the change. 270 | 271 | :yields: path to a plugin util change 272 | """ 273 | yield from self._util_matches("plugins/plugin_utils/", "plugin_utils") 274 | 275 | 276 | class Target: 277 | """A class to store information about a specific target.""" 278 | 279 | def __init__(self, target_path: PosixPath) -> None: 280 | """Class constructor. 281 | 282 | :param target_path: path to the target 283 | """ 284 | self.path = target_path 285 | self.lines = [line.split("#")[0] for line in target_path.read_text().split("\n") if line] 286 | self.name = target_path.parent.name 287 | self.exec_time = 0 288 | 289 | def is_alias_of(self, name: str) -> bool: 290 | """Test alias target. 291 | 292 | :param name: the name of the source target 293 | :returns: whether target is an alias or not 294 | """ 295 | return name in self.lines or self.name == name 296 | 297 | def is_unstable(self) -> bool: 298 | """Test unstable target. 299 | 300 | :returns: whether target is unstable or not 301 | """ 302 | if "unstable" in self.lines: 303 | return True 304 | return False 305 | 306 | def is_disabled(self) -> bool: 307 | """Test disabled target. 308 | 309 | :returns: whether target is disabled or not 310 | """ 311 | if "disabled" in self.lines: 312 | return True 313 | return False 314 | 315 | def is_slow(self) -> bool: 316 | """Test slow target. 317 | 318 | :returns: whether target is slow or not 319 | """ 320 | # NOTE: Should be replaced by time=3000 321 | if "slow" in self.lines or "# reason: slow" in self.lines: 322 | return True 323 | return False 324 | 325 | def is_ignored(self) -> bool: 326 | """Show the target be ignored. 327 | 328 | :returns: whether target is set as ignored or not 329 | """ 330 | ignore = {"unsupported", "disabled", "unstable", "hidden"} 331 | return not ignore.isdisjoint(set(self.lines)) 332 | 333 | def execution_time(self) -> int: 334 | """Retrieve execution time of a target. 335 | 336 | :returns: execution time of the target 337 | """ 338 | if self.exec_time: 339 | return self.exec_time 340 | 341 | self.exec_time = 3000 if self.is_slow() else 180 342 | for line in self.lines: 343 | if match := re.match(r"^time=([0-9]+)s\S*$", line): 344 | self.exec_time = int(match.group(1)) 345 | elif match := re.match(r"^time=([0-9]+)m\S*$", line): 346 | self.exec_time = int(match.group(1)) * 60 347 | elif match := re.match(r"^time=([0-9]+)\S*$", line): 348 | self.exec_time = int(match.group(1)) 349 | 350 | return self.exec_time 351 | 352 | 353 | class Collection: 354 | """A class storing collection information.""" 355 | 356 | def __init__(self, collection_path: PosixPath) -> None: 357 | """Class Constructor. 358 | 359 | :param collection_path: path to the collection 360 | """ 361 | self.collection_path = collection_path 362 | self._my_test_plan = [] # type: List[Target] 363 | self.collection_name = read_collection_name(collection_path) # type: str 364 | self.modules_import = {} # type: Dict[str, List[Any]] 365 | self.utils_import = {} # type: Dict[str, List[Any]] 366 | self.test_groups = [] # type: List[Dict[str, Any]] 367 | 368 | @property 369 | def test_plan_names(self) -> list[str]: 370 | """Return list of name of the test plan. 371 | 372 | :returns: a list of test plan names 373 | """ 374 | return [t.name for t in self._my_test_plan] 375 | 376 | @property 377 | def test_plan(self) -> list[Target]: 378 | """Get protected attribute _my_test_plan. 379 | 380 | :returns: a list of test plan objects 381 | """ 382 | return self._my_test_plan 383 | 384 | def targets(self) -> Generator[Target, None, None]: 385 | """List collection targets. 386 | 387 | :yields: a collection target 388 | """ 389 | for alias in self.collection_path.glob("tests/integration/targets/*/aliases"): 390 | yield Target(alias) 391 | 392 | def _is_target_already_added(self, target_name: str) -> bool: 393 | """Return true if the target is already part of the test plan. 394 | 395 | :param target_name: target name being checked 396 | :returns: whether the target is already part of the test plan or not 397 | """ 398 | for target_src in self._my_test_plan: 399 | if target_src.is_alias_of(target_name): 400 | return True 401 | return False 402 | 403 | def add_target_to_plan(self, target_name: str, is_direct: bool = True) -> None: 404 | """Add specific target to the test plan. 405 | 406 | :param target_name: target name being added 407 | :param is_direct: whether it is a direct target or an alias 408 | """ 409 | if not self._is_target_already_added(target_name): 410 | for plan_target in self.targets(): 411 | if plan_target.is_disabled(): 412 | continue 413 | # For indirect targets we want to skip "ignored" tests 414 | if not is_direct and plan_target.is_ignored(): 415 | continue 416 | if plan_target.is_alias_of(target_name): 417 | self._my_test_plan.append(plan_target) 418 | 419 | def cover_all(self) -> None: 420 | """Cover all the targets available.""" 421 | for cover_target in self.targets(): 422 | self.add_target_to_plan(cover_target.name, is_direct=False) 423 | 424 | def cover_module_utils(self, pymodule: str, names: list[str]) -> None: 425 | """Track the targets to run follow up to a module_utils changed. 426 | 427 | :param pymodule: collection module 428 | :param names: collections names 429 | """ 430 | if self.modules_import is None or self.utils_import is None: 431 | self.modules_import, self.utils_import = build_import_tree( 432 | self.collection_path, self.collection_name, names 433 | ) 434 | 435 | u_candidates = [pymodule] 436 | # add as candidates all module_utils which include this module_utils 437 | u_candidates += [ 438 | import_lib for _, imports in self.utils_import.items() for import_lib in imports 439 | ] 440 | 441 | for mod, mod_imports in self.modules_import.items(): 442 | if any(util in mod_imports for util in u_candidates): 443 | self.add_target_to_plan(mod, is_direct=False) 444 | 445 | def slow_targets_to_test(self) -> list[str]: 446 | """List collection slow targets. 447 | 448 | :returns: list of slow targets 449 | """ 450 | return sorted(list({t.name for t in self.test_plan if t.is_slow()})) 451 | 452 | def regular_targets_to_test(self) -> list[str]: 453 | """List regular targets to test. 454 | 455 | :returns: list of regular targets 456 | """ 457 | return sorted(list({t.name for t in self._my_test_plan if not t.is_slow()})) 458 | 459 | 460 | class ElGrandeSeparator: 461 | """A class to build output for the targets to test.""" 462 | 463 | def __init__(self, collections_items: list[Collection], number_jobs: int) -> None: 464 | """Class constructor. 465 | 466 | :param collections_items: list of collections being tested 467 | :param number_jobs: number of jobs to share targets on 468 | """ 469 | self.collections = collections_items 470 | self.total_jobs = number_jobs 471 | self.targets_per_slot = 10 472 | 473 | def output(self) -> dict[str, str]: 474 | """Produce output for the targets to test. 475 | 476 | :returns: a string describing the output 477 | """ 478 | batches = [] 479 | for col in self.collections: 480 | slots = [f"{col.collection_name}-{i+1}" for i in range(self.total_jobs)] 481 | for batch in self.build_up_batches(slots, col): 482 | batches.append(batch) 483 | raw_string = ";".join([f"{x}:{','.join(y)}" for x, y in batches]) 484 | raw_json = json.dumps({x: " ".join(y) for x, y in batches}) 485 | jobs = json.dumps([x for x, _ in batches]) 486 | return {"raw": raw_string, "raw_json": raw_json, "jobs": jobs} 487 | 488 | def build_up_batches( 489 | self, slots: list[str], my_collection: Collection 490 | ) -> Generator[tuple[str, list[str]], None, None]: 491 | """Build up batches. 492 | 493 | :param slots: list of slots 494 | :param my_collection: collection containing list of targets 495 | :yields: batches 496 | """ 497 | if not my_collection.test_groups: 498 | sorted_targets = sorted( 499 | my_collection.test_plan, key=lambda x: x.execution_time(), reverse=True 500 | ) 501 | my_collection.test_groups = [{"total": 0, "targets": []} for _ in range(len(slots))] 502 | my_collection.test_groups = equal_share(sorted_targets, len(slots)) 503 | 504 | for group in my_collection.test_groups: 505 | if group["targets"] == []: 506 | continue 507 | my_slot = slots.pop(0) 508 | yield (my_slot, group["targets"]) 509 | 510 | 511 | def make_unique(data: list[str]) -> list[str]: 512 | """Remove duplicated items of a list containing string. 513 | 514 | :param data: input list of string 515 | :returns: A list containing unique items 516 | """ 517 | tmp = [] 518 | for i in data: 519 | if i not in tmp: 520 | tmp.append(i) 521 | return tmp 522 | 523 | 524 | def equal_share(targets: list[Target], nbchunks: int) -> list[dict[str, Any]]: 525 | """Split a list of targets into equal size chunks. 526 | 527 | :param targets: The list of target to share 528 | :param nbchunks: The number of chunks to share targets into 529 | :returns: A list of dictionary with a set of targets and the total size 530 | """ 531 | total_data = [0 for _ in range(nbchunks)] 532 | targets_data = [[] for _ in range(nbchunks)] # type: List[List[str]] 533 | 534 | for my_target in targets: 535 | index = total_data.index(min(total_data)) 536 | total_data[index] += my_target.execution_time() 537 | targets_data[index].append(my_target.name) 538 | 539 | return [{"total": total_data[i], "targets": targets_data[i]} for i in range(nbchunks)] 540 | 541 | 542 | def read_test_all_the_targets() -> bool: 543 | """Test if all targets should be executed. 544 | 545 | :returns: whether the full suite should be run or not 546 | """ 547 | test_all = os.environ.get("ANSIBLE_TEST_ALL_THE_TARGETS", "") 548 | test_all_the_targets = False 549 | if test_all and test_all.lower() == "true": 550 | test_all_the_targets = True 551 | return test_all_the_targets 552 | 553 | 554 | def read_total_jobs() -> int: 555 | """Read the number of job to divide targets into. 556 | 557 | :returns: total jobs as integer 558 | """ 559 | default_value = "3" 560 | total_jobs = os.environ.get("TOTAL_JOBS", default_value) 561 | try: 562 | result = int(total_jobs) 563 | except ValueError: 564 | result = int(default_value) 565 | return result 566 | 567 | 568 | def read_targets_to_test() -> dict[str, list[str]]: 569 | """Determine specific targets to test based on TargetsToTest flag into pull request body. 570 | 571 | :returns: list of targets to test per collection 572 | """ 573 | targets_to_test = {} 574 | body = os.environ.get("PULL_REQUEST_BODY", "") 575 | regex = re.compile(r"^TargetsToTest=([\w\.\:,;]+)", re.MULTILINE | re.IGNORECASE) 576 | match = regex.search(body) 577 | if match: 578 | for item in match.group(1).split(";"): 579 | if not item: 580 | continue 581 | elements = item.split(":") 582 | targets_to_test[elements[0]] = elements[1].split(",") 583 | return targets_to_test 584 | 585 | 586 | def read_collections_to_test() -> list[PosixPath]: 587 | """Read module parameters from environment variables. 588 | 589 | :returns: a list of parameters to execute the module 590 | """ 591 | return [ 592 | PosixPath(path) 593 | for path in os.environ.get("COLLECTIONS_TO_TEST", "").replace("\n", ",").split(",") 594 | if path.strip() 595 | ] 596 | -------------------------------------------------------------------------------- /.github/actions/ansible_test_splitter/list_changed_targets.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to list target to test for a pull request.""" 3 | 4 | import json 5 | import os 6 | 7 | from pathlib import PosixPath 8 | from typing import Dict 9 | from typing import List 10 | from typing import Union 11 | 12 | from list_changed_common import Collection 13 | from list_changed_common import ElGrandeSeparator 14 | from list_changed_common import WhatHaveChanged 15 | from list_changed_common import make_unique 16 | from list_changed_common import read_collections_to_test 17 | from list_changed_common import read_targets_to_test 18 | from list_changed_common import read_test_all_the_targets 19 | from list_changed_common import read_total_jobs 20 | 21 | 22 | class ListChangedTargets: 23 | """A class used to list changed impacted for a pull request.""" 24 | 25 | def __init__(self) -> None: 26 | """Class constructor.""" 27 | self.collections_to_test = read_collections_to_test() 28 | self.total_jobs = read_total_jobs() 29 | 30 | self.test_all_the_targets = read_test_all_the_targets() 31 | self.targets_to_test = read_targets_to_test() 32 | self.base_ref = os.environ.get("PULL_REQUEST_BASE_REF", "") 33 | 34 | def make_change_targets_to_test(self, collections: list[Collection]) -> dict[str, list[str]]: 35 | """Create change for a specific target to test. 36 | 37 | :param collections: list of collections being tested 38 | :returns: list of target per collection 39 | """ 40 | changes = {} 41 | for collection in collections: 42 | name = collection.collection_name 43 | if name in self.targets_to_test: 44 | for target in self.targets_to_test[name]: 45 | collection.add_target_to_plan(target) 46 | changes[name] = collection.test_plan_names 47 | 48 | return changes 49 | 50 | def make_change_for_all_targets(self, collections: list[Collection]) -> dict[str, list[str]]: 51 | """Create change for full test suite. 52 | 53 | :param collections: list of collections being tested 54 | :returns: list of all targets per collection 55 | """ 56 | changes = {} 57 | for collection in collections: 58 | collection.cover_all() 59 | changes[collection.collection_name] = collection.test_plan_names 60 | 61 | return changes 62 | 63 | def make_changed_targets(self, collections: list[Collection]) -> dict[str, list[str]]: 64 | """Create change for changed targets. 65 | 66 | :param collections: list of collections being tested 67 | :returns: list of targets per collection 68 | """ 69 | listed_changes = {} # type: Dict[str, Dict[str, List[str]]] 70 | collections_names = [collection.collection_name for collection in collections] 71 | 72 | def _add_changed_target( 73 | name: str, ref_path: Union[PosixPath, str], plugin_type: str 74 | ) -> None: 75 | if plugin_type == "targets": 76 | file_name, plugin_file_name = str(ref_path), str(ref_path) 77 | elif plugin_type == "modules": 78 | file_name = PosixPath(ref_path).stem 79 | plugin_file_name = file_name 80 | elif plugin_type == "roles": 81 | file_name = str(ref_path) 82 | plugin_file_name = f"role/{ref_path}" 83 | else: 84 | file_name = PosixPath(ref_path).stem 85 | plugin_file_name = f"{plugin_type}_{PosixPath(ref_path).stem}" 86 | listed_changes[name][plugin_type].append(file_name) 87 | for collection in collections: 88 | collection.add_target_to_plan(plugin_file_name) 89 | 90 | for whc in [WhatHaveChanged(path, self.base_ref) for path in self.collections_to_test]: 91 | print(f"changed file for collection [{whc.collection_name}] => {whc.changed_files()}") 92 | listed_changes[whc.collection_name] = { 93 | "modules": [], 94 | "inventory": [], 95 | "connection": [], 96 | "module_utils": [], 97 | "plugin_utils": [], 98 | "lookup": [], 99 | "targets": [], 100 | "roles": [], 101 | } 102 | for path in whc.modules(): 103 | _add_changed_target(whc.collection_name, path, "modules") 104 | for path in whc.inventory(): 105 | _add_changed_target(whc.collection_name, path, "inventory") 106 | for path in whc.connection(): 107 | _add_changed_target(whc.collection_name, path, "connection") 108 | for path, pymod in whc.module_utils(): 109 | _add_changed_target(whc.collection_name, path, "module_utils") 110 | for collection in collections: 111 | collection.cover_module_utils(pymod, collections_names) 112 | for path, pymod in whc.plugin_utils(): 113 | _add_changed_target(whc.collection_name, path, "plugin_utils") 114 | for collection in collections: 115 | collection.cover_module_utils(pymod, collections_names) 116 | for path in whc.lookup(): 117 | _add_changed_target(whc.collection_name, path, "lookup") 118 | for target in whc.targets(): 119 | _add_changed_target(whc.collection_name, target, "targets") 120 | for role in whc.roles(): 121 | _add_changed_target(whc.collection_name, role, "roles") 122 | 123 | print("----------- Listed Changes -----------\n", json.dumps(listed_changes, indent=2)) 124 | return {x: make_unique(y["targets"]) for x, y in listed_changes.items()} 125 | 126 | def run(self) -> dict[str, str]: 127 | """List changes and divide targets into chunk. 128 | 129 | :returns: resulting string of targets divide into chunks 130 | """ 131 | collections = [Collection(p) for p in self.collections_to_test] 132 | 133 | if self.targets_to_test: 134 | changes = self.make_change_targets_to_test(collections) 135 | elif self.test_all_the_targets: 136 | changes = self.make_change_for_all_targets(collections) 137 | else: 138 | changes = self.make_changed_targets(collections) 139 | 140 | print("----------- Changes -----------\n", json.dumps(changes, indent=2)) 141 | egs = ElGrandeSeparator(collections, self.total_jobs) 142 | return egs.output() 143 | 144 | 145 | def write_variable_to_github_output(name: str, value: str) -> None: 146 | """Write content variable to GITHUB_OUTPUT. 147 | 148 | :param name: variable name to write into GITHUB_OUTPUT 149 | :param value: variable content 150 | """ 151 | github_output_file = os.environ.get("GITHUB_OUTPUT") or "" 152 | if github_output_file: 153 | with open(github_output_file, "a", encoding="utf-8") as file_write: 154 | file_write.write(f"{name}={value}\n") 155 | 156 | 157 | def main() -> None: 158 | """Perform main process of the module.""" 159 | result = ListChangedTargets().run() 160 | print("----------- change targets result -----------\n", json.dumps(result, indent=2)) 161 | write_variable_to_github_output("test_targets", result.get("raw", "")) 162 | write_variable_to_github_output("test_targets_json", result.get("raw_json", "")) 163 | write_variable_to_github_output("test_jobs", result.get("jobs", "[]")) 164 | 165 | 166 | if __name__ == "__main__": 167 | main() 168 | -------------------------------------------------------------------------------- /.github/actions/ansible_test_splitter/test_list_changed_targets.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Contains tests cases for list_changed_common and list_changed_targets modules.""" 3 | 4 | import io 5 | 6 | from pathlib import PosixPath 7 | from typing import Any 8 | from unittest.mock import ANY 9 | from unittest.mock import MagicMock 10 | from unittest.mock import patch 11 | 12 | import pytest 13 | 14 | from list_changed_common import Collection 15 | from list_changed_common import ElGrandeSeparator 16 | from list_changed_common import WhatHaveChanged 17 | from list_changed_common import list_pyimport 18 | from list_changed_common import make_unique 19 | from list_changed_common import read_collection_name 20 | from list_changed_common import read_collections_to_test 21 | from list_changed_common import read_targets_to_test 22 | from list_changed_common import read_test_all_the_targets 23 | from list_changed_common import read_total_jobs 24 | 25 | 26 | MY_MODULE = """ 27 | from ..module_utils.core import AnsibleAWSModule 28 | from ipaddress import ipaddress 29 | import time 30 | import botocore.exceptions 31 | """ 32 | 33 | MY_MODULE_2 = """ 34 | import ansible_collections.kubernetes.core.plugins.module_utils.k8sdynamicclient 35 | 36 | def main(): 37 | mutually_exclusive = [ 38 | ("resource_definition", "src"), 39 | ] 40 | module = AnsibleModule( 41 | argument_spec=argspec(), 42 | ) 43 | from ansible_collections.kubernetes.core.plugins.module_utils.common import ( 44 | K8sAnsibleMixin, 45 | get_api_client, 46 | ) 47 | 48 | k8s_ansible_mixin = K8sAnsibleMixin(module) 49 | """ 50 | 51 | MY_MODULE_3 = """ 52 | from .modules import AnsibleAWSModule 53 | from ipaddress import ipaddress 54 | import time 55 | import botocore.exceptions 56 | """ 57 | 58 | 59 | def test_read_collection_name() -> None: 60 | """Test read_collection_name method.""" 61 | m_galaxy_file = MagicMock() 62 | m_galaxy_file.open = lambda: io.BytesIO(b"name: b\nnamespace: a\n") 63 | m_path = MagicMock() 64 | m_path.__truediv__.return_value = m_galaxy_file 65 | assert read_collection_name(m_path) == "a.b" 66 | 67 | 68 | def test_list_pyimport() -> None: 69 | """Test list_pyimport.""" 70 | assert list(list_pyimport("ansible_collections.amazon.aws.plugins.", "modules", MY_MODULE)) == [ 71 | "ansible_collections.amazon.aws.plugins.module_utils.core", 72 | "ipaddress", 73 | "time", 74 | "botocore.exceptions", 75 | ] 76 | 77 | assert list( 78 | list_pyimport("ansible_collections.kubernetes.core.plugins.", "modules", MY_MODULE_2) 79 | ) == [ 80 | "ansible_collections.kubernetes.core.plugins.module_utils.k8sdynamicclient", 81 | "ansible_collections.kubernetes.core.plugins.module_utils.common", 82 | ] 83 | 84 | assert list( 85 | list_pyimport("ansible_collections.amazon.aws.plugins.", "module_utils", MY_MODULE_3) 86 | ) == [ 87 | "ansible_collections.amazon.aws.plugins.module_utils.modules", 88 | "ipaddress", 89 | "time", 90 | "botocore.exceptions", 91 | ] 92 | 93 | 94 | @patch("list_changed_common.read_collection_name") 95 | def test_what_changed_files(m_read_collection_name: MagicMock) -> None: 96 | """Test changes from WhatHaveChanged class. 97 | 98 | :param m_read_collection_name: read_collection mock method 99 | """ 100 | m_read_collection_name.return_value = "a.b" 101 | whc = WhatHaveChanged(PosixPath("a"), "b") 102 | whc.files = [ 103 | PosixPath("tests/something"), 104 | PosixPath("plugins/module_utils/core.py"), 105 | PosixPath("plugins/plugin_utils/base.py"), 106 | PosixPath("plugins/connection/aws_ssm.py"), 107 | PosixPath("plugins/modules/ec2.py"), 108 | PosixPath("plugins/lookup/aws_test.py"), 109 | PosixPath("tests/integration/targets/k8s_target_1/action.yaml"), 110 | PosixPath("tests/integration/targets/k8s_target_2/file.txt"), 111 | PosixPath("tests/integration/targets/k8s_target_3/tasks/main.yaml"), 112 | ] 113 | assert list(whc.modules()) == [PosixPath("plugins/modules/ec2.py")] 114 | assert list(whc.plugin_utils()) == [ 115 | ( 116 | PosixPath("plugins/plugin_utils/base.py"), 117 | "ansible_collections.a.b.plugins.plugin_utils.base", 118 | ) 119 | ] 120 | assert list(whc.module_utils()) == [ 121 | ( 122 | PosixPath("plugins/module_utils/core.py"), 123 | "ansible_collections.a.b.plugins.module_utils.core", 124 | ) 125 | ] 126 | assert list(whc.lookup()) == [PosixPath("plugins/lookup/aws_test.py")] 127 | assert list(whc.targets()) == [ 128 | "k8s_target_1", 129 | "k8s_target_2", 130 | "k8s_target_3", 131 | ] 132 | assert list(whc.connection()) == [PosixPath("plugins/connection/aws_ssm.py")] 133 | 134 | 135 | def build_collection(aliases: list[Any]) -> Collection: 136 | """Build Collection. 137 | 138 | :param aliases: aliases 139 | :returns: Mock collection 140 | """ 141 | with patch("list_changed_common.read_collection_name") as m_read_collection_name: 142 | m_read_collection_name.return_value = "some.collection" 143 | mycollection = Collection(PosixPath("nowhere")) 144 | m_c_path = MagicMock() 145 | mycollection.collection_path = m_c_path 146 | m_c_path.glob.return_value = aliases 147 | return mycollection 148 | 149 | 150 | def build_alias(name: str, text: str) -> MagicMock: 151 | """Build target alias. 152 | 153 | :param name: collection name 154 | :param text: alias file content 155 | :returns: Mock target 156 | """ 157 | m_alias_file = MagicMock() 158 | m_alias_file.read_text.return_value = text 159 | m_alias_file.parent.name = name 160 | return m_alias_file 161 | 162 | 163 | def test_c_targets() -> None: 164 | """Test add targets method from Collection class.""" 165 | mycollection = build_collection([]) 166 | assert not list(mycollection.targets()) 167 | 168 | mycollection = build_collection([build_alias("a", "ec2\n")]) 169 | assert len(list(mycollection.targets())) == 1 170 | assert list(mycollection.targets())[0].name == "a" 171 | assert list(mycollection.targets())[0].is_alias_of("ec2") 172 | 173 | mycollection = build_collection([build_alias("a", "#ec2\n")]) 174 | assert len(list(mycollection.targets())) == 1 175 | assert list(mycollection.targets())[0].name == "a" 176 | assert list(mycollection.targets())[0].execution_time() == 180 177 | 178 | mycollection = build_collection([build_alias("a", "time=30\n")]) 179 | assert len(list(mycollection.targets())) == 1 180 | assert list(mycollection.targets())[0].name == "a" 181 | assert list(mycollection.targets())[0].execution_time() == 30 182 | 183 | 184 | def test_2_targets_for_one_module() -> None: 185 | """Test 2 targets.""" 186 | collection = build_collection( 187 | [build_alias("a", "ec2_instance\n"), build_alias("b", "ec2_instance\n")] 188 | ) 189 | assert collection.regular_targets_to_test() == [] 190 | collection.add_target_to_plan("ec2_instance") 191 | assert collection.regular_targets_to_test() == ["a", "b"] 192 | 193 | 194 | @patch("list_changed_common.read_collection_name") 195 | def test_c_disabled_unstable(m_read_collection_name: MagicMock) -> None: 196 | """Test disable/unstable targets. 197 | 198 | :param m_read_collection_name: read_collection_name patched method 199 | """ 200 | m_read_collection_name.return_value = "some.collection" 201 | collection = Collection(PosixPath("nowhere")) 202 | m_c_path = MagicMock() 203 | collection.collection_path = m_c_path 204 | m_c_path.glob.return_value = [ 205 | build_alias("a", "disabled\n"), 206 | build_alias("b", "unstable\n"), 207 | ] 208 | 209 | # all, we should ignore the disabled,unstable jobs 210 | collection.cover_all() 211 | assert len(collection.regular_targets_to_test()) == 0 212 | # if the module is targets, we continue to ignore the disabled 213 | collection.add_target_to_plan("a") 214 | assert len(collection.regular_targets_to_test()) == 0 215 | # unstable targets should not be triggered if they were pulled in as a dependency 216 | collection.add_target_to_plan("b", is_direct=False) 217 | assert len(collection.regular_targets_to_test()) == 0 218 | # but the unstable is ok when directly triggered 219 | collection.add_target_to_plan("b") 220 | assert len(collection.regular_targets_to_test()) == 1 221 | 222 | 223 | @patch("list_changed_common.read_collection_name") 224 | def test_c_slow_regular_targets(m_read_collection_name: MagicMock) -> None: 225 | """Test targets* methods from Collection class. 226 | 227 | :param m_read_collection_name: read_collection_name patched method 228 | """ 229 | m_read_collection_name.return_value = "some.collection" 230 | collection = build_collection( 231 | [ 232 | build_alias("tortue", "slow\nec2\n#s3\n"), 233 | build_alias("lapin", "notslow\ncarrot\n\n"), 234 | ] 235 | ) 236 | 237 | collection.cover_all() 238 | assert len(list(collection.targets())) == 2 239 | assert list(collection.targets())[0].is_slow() 240 | assert not list(collection.targets())[1].is_slow() 241 | assert len(collection.slow_targets_to_test()) == 1 242 | 243 | 244 | def test_c_inventory_targets() -> None: 245 | """Test targets methods from Collection class.""" 246 | col = build_collection( 247 | [ 248 | build_alias("inventory_tortue", "slow\nec2\n#s3\n"), 249 | build_alias("lapin", "notslow\ninventory_carrot\n\n"), 250 | ] 251 | ) 252 | col.cover_all() 253 | assert len(list(col.targets())) == 2 254 | assert list(col.targets())[0].is_slow() 255 | assert not list(col.targets())[1].is_slow() 256 | assert len(col.slow_targets_to_test()) == 1 257 | 258 | 259 | @patch("list_changed_common.read_collection_name") 260 | def test_c_with_cover(m_read_collection_name: MagicMock) -> None: 261 | """Test add_target_to_plan method from Collection class. 262 | 263 | :param m_read_collection_name: read_collection_name patched method 264 | """ 265 | m_read_collection_name.return_value = "some.collection" 266 | collection = Collection(PosixPath("nowhere")) 267 | m_c_path = MagicMock() 268 | collection.collection_path = m_c_path 269 | 270 | m_c_path.glob.return_value = [ 271 | build_alias("tortue", "slow\nec2\n#s3\n"), 272 | build_alias("lapin", "carrot\n\n"), 273 | ] 274 | collection.add_target_to_plan("ec2") 275 | assert len(collection.slow_targets_to_test()) == 1 276 | assert collection.regular_targets_to_test() == [] 277 | 278 | 279 | def test_splitter_with_time() -> None: 280 | """Test splitter method from class ElGrandeSeparator.""" 281 | collection_1 = build_collection( 282 | [ 283 | build_alias("a", "time=50m\n"), 284 | build_alias("b", "time=10m\n"), 285 | build_alias("c", "time=180\n"), 286 | build_alias("d", "time=140s \n"), 287 | build_alias("e", "time=70\n"), 288 | ] 289 | ) 290 | collection_1.cover_all() 291 | egs = ElGrandeSeparator([collection_1], ANY) 292 | result = list(egs.build_up_batches([f"slot{i}" for i in range(2)], collection_1)) 293 | assert result == [ 294 | ("slot0", ["a"]), 295 | ("slot1", ["b", "c", "d", "e"]), 296 | ] 297 | 298 | collection_2 = build_collection( 299 | [ 300 | build_alias("a", "time=50m\n"), 301 | build_alias("b", "time=50m\n"), 302 | build_alias("c", "time=18\n"), 303 | build_alias("d", "time=5m\n"), 304 | ] 305 | ) 306 | collection_2.cover_all() 307 | egs = ElGrandeSeparator([collection_2], ANY) 308 | result = list(egs.build_up_batches([f"slot{i}" for i in range(3)], collection_2)) 309 | assert result == [("slot0", ["a"]), ("slot1", ["b"]), ("slot2", ["d", "c"])] 310 | 311 | 312 | @patch("list_changed_common.read_collection_name") 313 | @patch("list_changed_common.run_command") 314 | def test_what_changed_git_call(m_run_command: MagicMock, m_read_collection_name: MagicMock) -> None: 315 | """Test changed_files method from WhatHaveChanged class. 316 | 317 | :param m_run_command: run_command patched method 318 | :param m_read_collection_name: read_collection_name patched method 319 | """ 320 | m_run_command.return_value = "plugins/modules/foo.py\n" 321 | m_read_collection_name.return_value = "a.b" 322 | 323 | whc = WhatHaveChanged(PosixPath("a"), "stable-2.1") 324 | assert whc.changed_files() == [PosixPath("plugins/modules/foo.py")] 325 | 326 | m_run_command.assert_called_with( 327 | command="git diff origin/stable-2.1 --name-only", 328 | chdir=PosixPath("a"), 329 | ) 330 | 331 | 332 | def test_make_unique() -> None: 333 | """Test test_make_unique function.""" 334 | assert make_unique(["a", "b", "a"]) == ["a", "b"] 335 | assert make_unique(["a", "b"]) == ["a", "b"] 336 | 337 | 338 | def test_read_test_all_the_targets(monkeypatch: pytest.MonkeyPatch) -> None: 339 | """Test read_test_all_the_targets function. 340 | 341 | :param monkeypatch: monkey patch 342 | """ 343 | # default value when environment variable is not defined 344 | assert read_test_all_the_targets() is False 345 | 346 | # ANSIBLE_TEST_ALL_THE_TARGETS -> 'any' 347 | monkeypatch.setenv("ANSIBLE_TEST_ALL_THE_TARGETS", "any") 348 | assert read_test_all_the_targets() is False 349 | 350 | # ANSIBLE_TEST_ALL_THE_TARGETS -> 'TRUE' 351 | monkeypatch.setenv("ANSIBLE_TEST_ALL_THE_TARGETS", "TRUE") 352 | assert read_test_all_the_targets() is True 353 | 354 | # ANSIBLE_TEST_ALL_THE_TARGETS -> 'True' 355 | monkeypatch.setenv("ANSIBLE_TEST_ALL_THE_TARGETS", "True") 356 | assert read_test_all_the_targets() is True 357 | 358 | 359 | def test_read_total_jobs(monkeypatch: pytest.MonkeyPatch) -> None: 360 | """Test read_total_jobs function. 361 | 362 | :param monkeypatch: monkey patch 363 | """ 364 | # default value when environment variable is not defined 365 | assert read_total_jobs() == 3 366 | 367 | # TOTAL_JOBS -> 'any' 368 | monkeypatch.setenv("TOTAL_JOBS", "any") 369 | assert read_total_jobs() == 3 370 | 371 | # TOTAL_JOBS -> '07' 372 | monkeypatch.setenv("TOTAL_JOBS", "07") 373 | assert read_total_jobs() == 7 374 | 375 | # TOTAL_JOBS -> '5' 376 | monkeypatch.setenv("TOTAL_JOBS", "5") 377 | assert read_total_jobs() == 5 378 | 379 | 380 | def test_read_targets_to_test(monkeypatch: pytest.MonkeyPatch) -> None: 381 | """Test read_targets_to_test function. 382 | 383 | :param monkeypatch: monkey patch 384 | """ 385 | # default value when environment variable is not defined 386 | assert not read_targets_to_test() 387 | 388 | body = "No target to test set here" 389 | monkeypatch.setenv("PULL_REQUEST_BODY", body) 390 | assert not read_targets_to_test() 391 | 392 | body = ( 393 | "This is the first line of my pull request description\n" 394 | "TargetsToTest=collection1:target_01,target_02;collection2:target_2" 395 | ) 396 | monkeypatch.setenv("PULL_REQUEST_BODY", body) 397 | print(body) 398 | assert read_targets_to_test() == { 399 | "collection1": ["target_01", "target_02"], 400 | "collection2": ["target_2"], 401 | } 402 | 403 | body = ( 404 | "This is the first line of my pull request description\n" 405 | "TARGETSTOTEST=collection1:target_01,target_02;collection2:target_2;" 406 | ) 407 | monkeypatch.setenv("PULL_REQUEST_BODY", body) 408 | assert read_targets_to_test() == { 409 | "collection1": ["target_01", "target_02"], 410 | "collection2": ["target_2"], 411 | } 412 | 413 | 414 | def test_read_collections_to_test(monkeypatch: pytest.MonkeyPatch) -> None: 415 | """Test read_collections_to_test function. 416 | 417 | :param monkeypatch: monkey patch 418 | """ 419 | collection_to_test = "col1,col2\n ,col3" 420 | monkeypatch.setenv("COLLECTIONS_TO_TEST", collection_to_test) 421 | assert read_collections_to_test() == [PosixPath("col1"), PosixPath("col2"), PosixPath("col3")] 422 | -------------------------------------------------------------------------------- /.github/actions/ansible_validate_changelog/action.yml: -------------------------------------------------------------------------------- 1 | name: ansible_validate_changelog 2 | description: Ensure a valid changelog has been added to the pull request. 3 | 4 | inputs: 5 | path: 6 | description: | 7 | Path to the collection to validate changelog from. 8 | required: false 9 | default: "." 10 | base_ref: 11 | description: The pull request base ref. 12 | required: false 13 | default: ${{ github.event.pull_request.base.ref }} 14 | custom_paths: 15 | description: | 16 | A comma-separated list of custom paths from which any modified file 17 | will require a changelog. 18 | default: "" 19 | 20 | runs: 21 | using: composite 22 | steps: 23 | - name: Setup python 24 | uses: actions/setup-python@v4 25 | with: 26 | python-version: "3.12" 27 | 28 | - name: Install python dependencies 29 | run: | 30 | pip install -U pyyaml 31 | shell: bash 32 | 33 | - name: Validate changelog using custom paths 34 | run: >- 35 | python3 ${{ github.action_path }}/validate_changelog.py 36 | --ref ${{ inputs.base_ref }} 37 | --custom-paths ${{ inputs.custom_paths }} 38 | shell: bash 39 | working-directory: ${{ inputs.path }} 40 | if: inputs.custom_paths != '' 41 | 42 | - name: Validate changelog 43 | run: >- 44 | python3 ${{ github.action_path }}/validate_changelog.py 45 | --ref ${{ inputs.base_ref }} 46 | shell: bash 47 | working-directory: ${{ inputs.path }} 48 | if: inputs.custom_paths == '' 49 | -------------------------------------------------------------------------------- /.github/actions/ansible_validate_changelog/validate_changelog.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """Script to check if a PR has a correct changelog fragment.""" 3 | 4 | import argparse 5 | import logging 6 | import re 7 | import subprocess 8 | import sys 9 | 10 | from collections import defaultdict 11 | from pathlib import Path 12 | from typing import Optional 13 | 14 | import yaml 15 | 16 | 17 | FORMAT = "[%(asctime)s] - %(message)s" 18 | logging.basicConfig(format=FORMAT) 19 | logger = logging.getLogger("validate_changelog") 20 | logger.setLevel(logging.DEBUG) 21 | 22 | 23 | def is_changelog_file(ref: str) -> bool: 24 | """Check if a file is a changelog fragment. 25 | 26 | :param ref: the file to be checked 27 | :returns: True if file is a changelog fragment else False 28 | """ 29 | match = re.match(r"^changelogs/fragments/(.*)\.(yaml|yml)$", ref) 30 | return bool(match) 31 | 32 | 33 | def is_module_or_plugin(ref: str) -> bool: 34 | """Check if a file is a module or plugin. 35 | 36 | :param ref: the file to be checked 37 | :returns: True if file is a module or plugin else False 38 | """ 39 | prefix_list = ( 40 | "plugins/modules", 41 | "plugins/module_utils", 42 | "plugins/plugin_utils", 43 | "plugins/action", 44 | "plugins/inventory", 45 | "plugins/lookup", 46 | "plugins/filter", 47 | "plugins/connection", 48 | "plugins/become", 49 | "plugins/cache", 50 | "plugins/callback", 51 | "plugins/cliconf", 52 | "plugins/httpapi", 53 | "plugins/netconf", 54 | "plugins/shell", 55 | "plugins/strategy", 56 | "plugins/terminal", 57 | "plugins/test", 58 | "plugins/vars", 59 | "roles/", 60 | "playbooks/", 61 | "meta/runtime.yml", 62 | ) 63 | return ref.startswith(prefix_list) 64 | 65 | 66 | def is_documentation_file(ref: str) -> bool: 67 | """Check if a file is a documentation file. 68 | 69 | :param ref: the file to be checked 70 | :returns: True if file is a documentation file else False 71 | """ 72 | prefix_list = ( 73 | "docs/", 74 | "plugins/doc_fragments", 75 | "examples", 76 | ) 77 | return ref.startswith(prefix_list) 78 | 79 | 80 | def is_release_pr(changes: dict[str, list[str]]) -> bool: 81 | """Determine whether the changeset looks like a release. 82 | 83 | :param changes: A dictionary keyed on change status (A, M, D, etc.) of lists of changed files 84 | :returns: True if the changes match a collection release else False 85 | """ 86 | # Should only have Deleted and Modified files. 87 | if not set(changes.keys()).issubset(("D", "M")): 88 | return False 89 | 90 | # All deletions should be of changelog files 91 | if not all(is_changelog_file(x) for x in changes["D"]): 92 | return False 93 | 94 | # A collection release should only change these files 95 | if not set(changes["M"]).issubset(("CHANGELOG.rst", "changelogs/changelog.yaml", "galaxy.yml")): 96 | return False 97 | 98 | return True 99 | 100 | 101 | def is_changelog_needed(changes: dict[str, list[str]], custom_paths: Optional[list[str]]) -> bool: 102 | """Determine whether a changelog fragment is necessary. 103 | 104 | :param changes: A dictionary keyed on change status (A, M, D, etc.) of lists of changed files 105 | :param custom_paths: additional paths to check changes in 106 | :returns: True if a changelog fragment is not required for this PR else False 107 | """ 108 | # Changes to existing plugins or modules require a changelog 109 | # Changelog entries are not needed for new plugins or modules 110 | # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#generating-changelogs 111 | modifications = changes["M"] + changes["D"] 112 | if any(is_module_or_plugin(x) for x in modifications): 113 | return True 114 | # check any file from the custom paths 115 | if custom_paths: 116 | for ref in modifications + changes["A"]: 117 | if any(ref.startswith(x) for x in custom_paths): 118 | return True 119 | 120 | return False 121 | 122 | 123 | def is_valid_changelog_format(path: str) -> bool: 124 | """Check if changelog fragment is formatted properly. 125 | 126 | :param path: the file to be checked 127 | :returns: True if the file passes validation else False 128 | """ 129 | try: 130 | config = Path("changelogs/config.yaml") 131 | with open(config, "rb") as config_file: 132 | changelog_config = yaml.safe_load(config_file) 133 | changes_type = tuple(item[0] for item in changelog_config["sections"]) 134 | changes_type += (changelog_config["trivial_section_name"],) 135 | logger.info("Found the following changelog sections: %s", changes_type) 136 | except (OSError, yaml.YAMLError) as exc: 137 | logger.info("Failed to read changelog config, using default sections instead: %s", exc) 138 | # https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelogs.rst#changelog-fragment-categories 139 | changes_type = ( 140 | "release_summary", 141 | "breaking_changes", 142 | "major_changes", 143 | "minor_changes", 144 | "removed_features", 145 | "deprecated_features", 146 | "security_fixes", 147 | "bugfixes", 148 | "known_issues", 149 | "trivial", 150 | ) 151 | 152 | try: 153 | with open(path, "rb") as file_desc: 154 | result = list(yaml.safe_load_all(file_desc)) 155 | 156 | for section in result: 157 | for key in section.keys(): 158 | if key not in changes_type: 159 | msg = f"{key} from {path} is not a valid changelog type" 160 | logger.error(msg) 161 | return False 162 | if not isinstance(section[key], list): 163 | logger.error( 164 | "Changelog section %s from file %s must be a list, '%s' found instead.", 165 | key, 166 | path, 167 | type(section[key]), 168 | ) 169 | return False 170 | return True 171 | except (OSError, yaml.YAMLError) as exc: 172 | msg = f"yaml loading error for file {path} -> {exc}" 173 | logger.error(msg) 174 | return False 175 | 176 | 177 | def run_command(cmd: str) -> tuple[int, str, str]: 178 | """Run a command and return the response. 179 | 180 | :param cmd: The command to run 181 | :returns: A tuple of (return code, stdout, stderr) 182 | """ 183 | with subprocess.Popen( 184 | cmd, 185 | stdout=subprocess.PIPE, 186 | stderr=subprocess.PIPE, 187 | shell=True, 188 | encoding="utf-8", 189 | ) as proc: 190 | out, err = proc.communicate() 191 | return proc.returncode, out, err 192 | 193 | 194 | def list_files(ref: str) -> dict[str, list[str]]: 195 | """List all files changed since ref, grouped by change status. 196 | 197 | :param ref: The git ref to compare to 198 | :returns: A dictionary keyed on change status (A, M, D, etc.) of lists of changed files 199 | :raises ValueError: If the file gathering command fails 200 | """ 201 | command = "git diff origin/" + ref + " --name-status" 202 | logger.info("Executing -> %s", command) 203 | ret_code, stdout, stderr = run_command(command) 204 | if ret_code != 0: 205 | raise ValueError(stderr) 206 | 207 | changes: dict[str, list[str]] = defaultdict(list) 208 | for file in stdout.split("\n"): 209 | file_attr = file.split("\t") 210 | if len(file_attr) == 2: 211 | changes[file_attr[0]].append(file_attr[1]) 212 | logger.info("changes -> %s", changes) 213 | return changes 214 | 215 | 216 | def main(ref: str, custom_paths: Optional[list[str]]) -> None: 217 | """Run the script. 218 | 219 | :param ref: The pull request base ref 220 | :param custom_paths: additional paths to check changes in 221 | """ 222 | changes = list_files(ref) 223 | if changes: 224 | if is_release_pr(changes): 225 | logger.info("This PR looks like a release!") 226 | sys.exit(0) 227 | 228 | changelog = [x for x in changes["A"] if is_changelog_file(x)] 229 | logger.info("changelog files -> %s", changelog) 230 | if not changelog: 231 | if is_changelog_needed(changes, custom_paths): 232 | logger.error( 233 | "Missing changelog fragment. This is not required" 234 | " only if PR adds new modules and plugins or contain" 235 | " only documentation changes." 236 | ) 237 | sys.exit(1) 238 | logger.info( 239 | "Changelog not required as PR adds new modules and/or" 240 | " plugins or contain only documentation changes." 241 | ) 242 | else: 243 | invalid_changelog_files = [x for x in changelog if not is_valid_changelog_format(x)] 244 | if invalid_changelog_files: 245 | logger.error( 246 | "The following changelog files are not valid -> %s", 247 | invalid_changelog_files, 248 | ) 249 | sys.exit(1) 250 | sys.exit(0) 251 | 252 | 253 | def comma_separated_list(arg: str) -> list[str]: 254 | """Parse a string into a list of string. 255 | 256 | :param arg: The string list to parse 257 | :returns: A list of string 258 | """ 259 | return [x for x in arg.split(",") if x] 260 | 261 | 262 | if __name__ == "__main__": 263 | parser = argparse.ArgumentParser(description="Validate changelog file from new commit") 264 | parser.add_argument("--ref", required=True, help="Pull request base ref") 265 | parser.add_argument("--custom-paths", type=comma_separated_list) 266 | 267 | args = parser.parse_args() 268 | main(args.ref, args.custom_paths) 269 | -------------------------------------------------------------------------------- /.github/actions/build_install_collection/action.yml: -------------------------------------------------------------------------------- 1 | name: Build and install the collection 2 | description: Build and install the collection 3 | 4 | inputs: 5 | install_python_dependencies: 6 | description: "Install collection python dependencies" 7 | required: true 8 | source_path: 9 | description: "Path to the collection source" 10 | required: true 11 | collection_path: 12 | description: | 13 | The final collection path 14 | If not set, this will be determined by the action 15 | default: "" 16 | tar_file: 17 | description: | 18 | The collection tarball when built 19 | If not set, this will be determined by the action 20 | default: "" 21 | ansible_version: 22 | description: | 23 | Ansible Core version from the workflow. 24 | required: false 25 | default: "" 26 | outputs: 27 | collection_path: 28 | description: The final collection path 29 | value: ${{ inputs.collection_path || steps.identify.outputs.collection_path }} 30 | 31 | runs: 32 | using: composite 33 | steps: 34 | - name: Show the galaxy.yml 35 | run: cat galaxy.yml 36 | shell: bash 37 | working-directory: ${{ inputs.source_path }} 38 | 39 | - name: check if bindep.txt exists 40 | id: bindep_check 41 | shell: bash 42 | run: | 43 | if test -f "bindep.txt"; then 44 | echo "file_exists=true" >> $GITHUB_OUTPUT 45 | else 46 | echo "file_exists=false" >> $GITHUB_OUTPUT 47 | fi 48 | working-directory: ${{ inputs.source_path }} 49 | 50 | - name: Install bindep from pypi 51 | run: sudo python3 -m pip install bindep 52 | shell: bash 53 | if: steps.bindep_check.outputs.file_exists == 'true' 54 | 55 | - name: Install missing system packages using bindep.txt 56 | run: bindep test | tail -n +2 | xargs sudo apt-get -o Debug::pkgProblemResolver=true -o Debug::Acquire::http=true install -y || exit 0 57 | shell: bash 58 | working-directory: ${{ inputs.source_path }} 59 | if: steps.bindep_check.outputs.file_exists == 'true' 60 | 61 | - name: Check for missing system packages using bindep.txt 62 | run: bindep test 63 | shell: bash 64 | working-directory: ${{ inputs.source_path }} 65 | if: steps.bindep_check.outputs.file_exists == 'true' 66 | 67 | - name: Install collection python requirements 68 | if: ${{ inputs.install_python_dependencies == 'true' }} 69 | 70 | run: python3 -m pip install -r requirements.txt -r test-requirements.txt 71 | shell: bash 72 | working-directory: ${{ inputs.source_path }} 73 | 74 | - name: identify collection (final installation path and tarball name) 75 | id: identify 76 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 77 | with: 78 | source_path: ${{ inputs.source_path }} 79 | if: ${{ (inputs.collection_path == '') || (inputs.tar_file == '') }} 80 | 81 | - name: Build collection 82 | run: ansible-galaxy collection build -vvv 83 | shell: bash 84 | working-directory: ${{ inputs.source_path }} 85 | 86 | - name: Install collection and dependencies (with --pre flag) 87 | run: ansible-galaxy collection install ./${{ steps.identify.outputs.tar_file || inputs.tar_file }} --pre -p /home/runner/collections 88 | shell: bash 89 | working-directory: ${{ inputs.source_path }} 90 | if: ${{ inputs.ansible_version != 'stable-2.9' }} 91 | 92 | - name: Install collection and dependencies (without --pre flag) 93 | run: ansible-galaxy collection install ./${{ steps.identify.outputs.tar_file || inputs.tar_file }} -p /home/runner/collections 94 | shell: bash 95 | working-directory: ${{ inputs.source_path }} 96 | if: ${{ inputs.ansible_version == 'stable-2.9' }} 97 | 98 | - name: Copy the galaxy.yml from source to destination, needed for pytest-ansible-units 99 | run: cp galaxy.yml ${{ steps.identify.outputs.collection_path || inputs.collection_path }}/galaxy.yml 100 | shell: bash 101 | working-directory: ${{ inputs.source_path }} 102 | -------------------------------------------------------------------------------- /.github/actions/changelog_evaluator/action.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Changelog based PR evaluator 3 | author: Mark Chappell (tremble) 4 | branding: 5 | icon: git-branch 6 | color: gray-dark 7 | description: | 8 | This action evaluates the contents of changelog fragments in "changelogs/fragments/" to assess 9 | which branches it may be appropriate to backport a change to. 10 | 11 | A PR is evaluated as needing a "major release" if it includes at least one of "major_changes", 12 | "breaking_changes", or "removed_features". 13 | 14 | A PR is evaluated as needing a "minor release" if it includes at least one of "minor_changes" or 15 | "deprecated_features". 16 | 17 | A PR is evaluated as being a "bugfix" PR if it includes at least one of "bugfixes" or 18 | "security_fixes". 19 | 20 | The output values of this action are "bash-ian" booleans ("0" == True, anything else == False) 21 | 22 | outputs: 23 | major_release: 24 | description: Whether the changelogs indicate that a major release would be needed. 25 | value: ${{ steps.evaluate.outputs.major }} 26 | minor_release: 27 | description: Whether the changelogs indicate that a minor release would be needed. 28 | value: ${{ steps.evaluate.outputs.minor }} 29 | bugfix_release: 30 | description: Whether the changelogs indicate that a the PR includes bugfixes. 31 | value: ${{ steps.evaluate.outputs.bugfix }} 32 | 33 | runs: 34 | using: composite 35 | steps: 36 | - uses: actions/checkout@v2 37 | id: checkout 38 | with: 39 | ref: refs/pull/${{ github.event.number }}/merge 40 | - name: Fetch change types from changelog fragments 41 | id: evaluate 42 | shell: bash {0} 43 | run: | 44 | gh pr -R "${GITHUB_REPOSITORY}" diff "${{ github.event.pull_request.number }}" --name-only | \ 45 | grep -E '^changelogs/fragments/' | \ 46 | while read -r line 47 | do cat "${line}" | \ 48 | python -c 'import sys, yaml; change = yaml.safe_load(sys.stdin.read()) ; print("\n".join(change.keys()));' \ 49 | | tee -a all-changelog-types 50 | done 51 | # Beware, these are bash-ian booleans: "true == 0" 52 | grep -qE '(release_summary|breaking_changes|major_changes|removed_features)' all-changelog-types ; echo "major=${?}" >>${GITHUB_OUTPUT} 53 | grep -qE '(deprecated_features|minor_changes)' all-changelog-types ; echo "minor=${?}" >>${GITHUB_OUTPUT} 54 | grep -qE '(bugfixes|security_fixes)' all-changelog-types ; echo "bugfix=${?}" >>${GITHUB_OUTPUT} 55 | env: 56 | GH_TOKEN: ${{ github.token }} 57 | -------------------------------------------------------------------------------- /.github/actions/changelog_labeller/action.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Apply backporting labels 3 | author: Mark Chappell (tremble) 4 | branding: 5 | icon: git-branch 6 | color: gray-dark 7 | description: | 8 | Applies backporting labels to a PR. 9 | 10 | inputs: 11 | purge_labels: 12 | description: Whether to purge existing labels 13 | required: false 14 | type: boolean 15 | default: false 16 | purge_prefix: 17 | description: The prefix used when purging labels 18 | required: false 19 | type: string 20 | default: "backport-" 21 | label_to_add: 22 | description: The label(s) to be applied to the PR 23 | type: string 24 | required: true 25 | 26 | runs: 27 | using: composite 28 | steps: 29 | - name: Strip existing labels and add new labels 30 | id: label-strip-add 31 | # If breaking_changes or major_changes are pushed, then we always apply do_not_backport 32 | # and strip any existing backport-* labels 33 | if: ${{ fromJSON(inputs.purge_labels) }} 34 | shell: bash {0} 35 | run: | 36 | # If this includes breaking changes, then set the do_not_backport label and remove all 37 | # labels starting with "backport-". 38 | CURRENT_LABELS=$( 39 | gh pr -R "${GITHUB_REPOSITORY}" view "${{ github.event.pull_request.number }}" \ 40 | --json labels \ 41 | --jq '[.labels[] | select(.name | startswith("${{ inputs.purge_prefix }}"))] | map(.name) | join(",")' 42 | ) 43 | echo "Apply '${{ inputs.label_to_add }}' (remove '${CURRENT_LABELS}')" 44 | if [[ -n ${CURRENT_LABELS} ]] ; then 45 | gh pr -R "${GITHUB_REPOSITORY}" edit "${{ github.event.pull_request.number }}" \ 46 | --add-label ${{ inputs.label_to_add }} \ 47 | --remove-label "${CURRENT_LABELS}" 48 | else 49 | gh pr -R "${GITHUB_REPOSITORY}" edit "${{ github.event.pull_request.number }}" \ 50 | --add-label ${{ inputs.label_to_add }} 51 | fi 52 | env: 53 | GH_TOKEN: ${{ github.token }} 54 | 55 | - name: Apply labels 56 | id: label-add 57 | if: ${{ ! fromJSON(inputs.purge_labels) }} 58 | shell: bash {0} 59 | run: | 60 | echo "Apply '${{ inputs.label_to_add }}'" 61 | gh pr -R "${GITHUB_REPOSITORY}" edit "${{ github.event.pull_request.number }}" \ 62 | --add-label ${{ inputs.label_to_add }} 63 | env: 64 | GH_TOKEN: ${{ github.token }} 65 | -------------------------------------------------------------------------------- /.github/actions/checkout_dependency/README.md: -------------------------------------------------------------------------------- 1 | # checkout_dependency 2 | 3 | This action checks-out your repository under the specified destination directory using the action actions/checkout. Use the `depends-On: repository/pull/xx` to override the reference to checkout. 4 | 5 | # Usage 6 | 7 | 8 | 9 | ```yaml 10 | - uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 11 | with: 12 | # Repository name with owner. For example, ansible-collections/kubernetes.core 13 | repository: "" 14 | 15 | # The branch, tag, or SHA to checkout when the pull request body does not 16 | # contain any override for this repository. 17 | ref: "" 18 | 19 | # Relative path under $GITHUB_WORKSPACE to place the repository 20 | path: "" 21 | 22 | # Number of commits to fetch. 0 indicates all history for all branches and tags. 23 | fetch-depth: "1" 24 | ``` 25 | 26 | 27 | 28 | # Depending on others PRs 29 | 30 | The pull request body should contain the following sequence: 31 | 32 | ``` 33 | Depends-On: repository/pull/xx 34 | ``` 35 | 36 | # Scenarios 37 | 38 | - [checkout pull request 12345 from repository my_org/my_collection](#Checkout-depending-pull-request) 39 | 40 | ## Checkout depending pull request 41 | 42 | Github action step: 43 | 44 | ```yaml 45 | - uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 46 | with: 47 | repository: my_org/my_collection 48 | ref: main 49 | path: /path/to/checkout/repository 50 | ``` 51 | 52 | Pull request body: 53 | 54 | ```text 55 | Depends-On: https://github.com/my_org/my_collection/pull/12345 56 | ``` 57 | -------------------------------------------------------------------------------- /.github/actions/checkout_dependency/action.yml: -------------------------------------------------------------------------------- 1 | name: "Checkout Dependency" 2 | description: | 3 | checkout repository and override commit based on keyword 'depends-on' from pull request message 4 | inputs: 5 | repository: 6 | description: "Repository name with owner. For example, ansible-collections/cloud.common" 7 | default: ${{ github.repository }} 8 | ref: 9 | description: "The default branch, tag or SHA to checkout if no reference (using depends-on) is found" 10 | path: 11 | description: "Relative path under $GITHUB_WORKSPACE to place the repository" 12 | fetch-depth: 13 | description: "Number of commits to fetch. 0 indicates all history for all branches and tags." 14 | default: "1" 15 | runs: 16 | using: composite 17 | steps: 18 | - name: Set up Python '3.12' 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: "3.12" 22 | 23 | - name: install PyGithub 24 | run: | 25 | pip install -U PyGithub 26 | shell: bash 27 | 28 | - id: resolve-dependency 29 | shell: bash 30 | run: | 31 | python ${{ github.action_path }}/resolve_dependency.py 32 | env: 33 | RESOLVE_REF_PR_BODY: ${{ github.event.pull_request.body }} 34 | RESOLVE_REF_REPOSITORY: ${{ inputs.repository }} 35 | 36 | - name: Display reference to checkout 37 | run: echo "Resolved reference -> '${RESOLVED_REF}'" 38 | shell: bash 39 | env: 40 | RESOLVED_REF: ${{ steps.resolve-dependency.outputs.merge_commit_sha }} 41 | 42 | - name: checkout repository 43 | uses: actions/checkout@v3 44 | with: 45 | repository: ${{ inputs.repository }} 46 | path: ${{ inputs.path }} 47 | ref: ${{ steps.resolve-dependency.outputs.merge_commit_sha || inputs.ref }} 48 | fetch-depth: ${{ inputs.fetch-depth }} 49 | -------------------------------------------------------------------------------- /.github/actions/checkout_dependency/resolve_dependency.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """Script to check if a depends-on pull request has been defined into pull request body.""" 3 | 4 | import logging 5 | import os 6 | import re 7 | import sys 8 | 9 | from github import Github 10 | 11 | 12 | FORMAT = "[%(asctime)s] - %(message)s" 13 | logging.basicConfig(format=FORMAT) 14 | logger = logging.getLogger("resolve_dependency") 15 | logger.setLevel(logging.DEBUG) 16 | 17 | 18 | def get_pr_merge_commit_sha(repository: str, pr_number: int) -> str: 19 | """Retrieve pull request merge commit sha. 20 | 21 | :param repository: The repository name 22 | :param pr_number: The pull request number 23 | :returns: The pull request merge commit sha if it exists 24 | :raises ValueError: if the pull request is not mergeable 25 | """ 26 | access_token = os.environ.get("GITHUB_TOKEN") 27 | gh_obj = Github(access_token) 28 | repo = gh_obj.get_repo(repository) 29 | 30 | pr_obj = repo.get_pull(pr_number) 31 | if not pr_obj.mergeable: 32 | # raise an error when the pull request is not mergeable 33 | sys.tracebacklimit = -1 34 | raise ValueError(f"Pull request {pr_number} from {repository} is not mergeable") 35 | 36 | return pr_obj.merge_commit_sha 37 | 38 | 39 | def resolve_ref(pr_body: str, repository: str) -> int: 40 | """Get pull request reference number defined with Depends-On. 41 | 42 | :param pr_body: the pull request body 43 | :param repository: The repository name 44 | :returns: pull request number if it is defined else 0 45 | """ 46 | pr_regx = re.compile( 47 | rf"^Depends-On:[ ]*https://github.com/{repository}/pull/(\d+)\s*$", 48 | re.MULTILINE | re.IGNORECASE, 49 | ) 50 | # Search for expression starting with depends-on not case-sensitive 51 | match = pr_regx.search(pr_body) 52 | return int(match.group(1)) if match else 0 53 | 54 | 55 | def main() -> None: 56 | """Run the script.""" 57 | pr_body = os.environ.get("RESOLVE_REF_PR_BODY") or "" 58 | repository = os.environ.get("RESOLVE_REF_REPOSITORY") or "" 59 | 60 | if not repository: 61 | return 62 | 63 | pr_number = resolve_ref(pr_body, repository) 64 | if not pr_number: 65 | return 66 | logger.info("Override checkout with pr number: %d", pr_number) 67 | 68 | # get pull request merge commit sha 69 | merge_commit_sha = get_pr_merge_commit_sha(repository, pr_number) 70 | logger.info("merge commit sha for pull request %d => '%s'", pr_number, merge_commit_sha) 71 | github_output = os.environ.get("GITHUB_OUTPUT") 72 | if github_output: 73 | with open(str(github_output), "a", encoding="utf-8") as file_handler: 74 | file_handler.write(f"merge_commit_sha={merge_commit_sha}\n") 75 | 76 | 77 | if __name__ == "__main__": 78 | main() 79 | -------------------------------------------------------------------------------- /.github/actions/checkout_dependency/test_resolve_dependency.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Module used to test resolve_dependency.py script.""" 3 | 4 | import os 5 | import string 6 | 7 | from pathlib import PosixPath 8 | from random import choice 9 | from unittest.mock import MagicMock 10 | from unittest.mock import patch 11 | 12 | import pytest 13 | 14 | from resolve_dependency import get_pr_merge_commit_sha 15 | from resolve_dependency import main 16 | from resolve_dependency import resolve_ref 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "pr_body,match", 21 | [ 22 | ("Depends-On: https://github.com/my_org/my_collection/pull/12345", True), 23 | ( 24 | "Depends-On: https://github.com/my_org/my_collection/pull/12345\n" 25 | "Depends-On: https://github.com/my_org/my_collection/pull/67890", 26 | True, 27 | ), 28 | ( 29 | "Depends-On: https://github.com/another_org/my_collection/pull/4000\n" 30 | "Depends-On: https://github.com/my_org/my_collection/pull/12345", 31 | True, 32 | ), 33 | ( 34 | "Depends-On: https://github.com/my_org/my_collection/pull/12345\n" 35 | "Depends-On: https://github.com/my_org/my_collection/pull/67890", 36 | True, 37 | ), 38 | ("Depends-On: https://github.com/another_org/my_collection/pull/12345", False), 39 | ("Depends-On: https://github.com/my_org/my_collection2/pull/12345", False), 40 | ("Depends-On: https://github.com/my_org/my_collection/pull", False), 41 | ], 42 | ) 43 | def test_resolve_ref(pr_body: str, match: bool) -> None: 44 | """Test resolve_ref function. 45 | 46 | :param pr_body: pull request body 47 | :param match: whether a depends-on should be found or not 48 | """ 49 | expected = 12345 if match else 0 50 | assert resolve_ref(pr_body, "my_org/my_collection") == expected 51 | 52 | 53 | class FakePullRequest: 54 | # pylint: disable=too-few-public-methods 55 | """Class to simulate PullRequest Object.""" 56 | 57 | def __init__(self, mergeable: bool) -> None: 58 | """Class constructor. 59 | 60 | :param mergeable: whether the pull request is mergeable or not 61 | """ 62 | self.mergeable = mergeable 63 | self.merge_commit_sha = self.generate_commit_sha() 64 | 65 | @staticmethod 66 | def generate_commit_sha(length: int = 16) -> str: 67 | """Generate random commit sha. 68 | 69 | :param length: The length of the generated string 70 | :returns: The generated commit sha 71 | """ 72 | data = string.ascii_letters + string.digits 73 | return "".join([choice(data) for _ in range(length)]) 74 | 75 | 76 | @pytest.mark.parametrize("mergeable", [True, False]) 77 | @patch("resolve_dependency.Github") 78 | def test_get_pr_merge_commit_sha(m_github: MagicMock, mergeable: bool) -> None: 79 | """Test get_pr_merge_commit_sha function. 80 | 81 | :param m_github: The github module 82 | :param mergeable: whether the pull request is mergeable or not 83 | """ 84 | github_obj = MagicMock() 85 | m_github.return_value = github_obj 86 | 87 | os.environ["GITHUB_TOKEN"] = "unittest_github_token" 88 | 89 | m_github_repo = MagicMock() 90 | github_obj.get_repo = MagicMock() 91 | github_obj.get_repo.return_value = m_github_repo 92 | 93 | local_pr = FakePullRequest(mergeable=mergeable) 94 | m_github_repo.get_pull = MagicMock() 95 | m_github_repo.get_pull.return_value = local_pr 96 | 97 | repository = "my_testing_repository" 98 | pr_number = 12345 99 | 100 | if mergeable: 101 | assert get_pr_merge_commit_sha(repository, pr_number) == local_pr.merge_commit_sha 102 | else: 103 | with pytest.raises(ValueError): 104 | get_pr_merge_commit_sha(repository, pr_number) 105 | 106 | m_github.assert_called_once_with("unittest_github_token") 107 | github_obj.get_repo.assert_called_once_with(repository) 108 | m_github_repo.get_pull.assert_called_once_with(pr_number) 109 | 110 | 111 | @pytest.mark.parametrize("repository", [True, False]) 112 | @pytest.mark.parametrize("resolve_ref_pr", [0, 1]) 113 | @patch("resolve_dependency.get_pr_merge_commit_sha") 114 | @patch("resolve_dependency.resolve_ref") 115 | def test_main( 116 | m_resolve_ref: MagicMock, 117 | m_get_pr_merge_commit_sha: MagicMock, 118 | repository: bool, 119 | resolve_ref_pr: int, 120 | tmp_path: PosixPath, 121 | ) -> None: 122 | """Test main function. 123 | 124 | :param m_resolve_ref: The resolve_ref mock function 125 | :param m_get_pr_merge_commit_sha: The get_pr_merge_commit_sha mock function 126 | :param repository: whether the repository is defined on environment variable or not 127 | :param resolve_ref_pr: The pull request number 128 | :param tmp_path: The temporary path for file to create for test 129 | """ 130 | pr_body = "My pull request body - this is a sample for unit tests" 131 | repository_name = "my_test_repository" 132 | os.environ["RESOLVE_REF_PR_BODY"] = pr_body 133 | 134 | gh_output_file = tmp_path / "github_output.txt" 135 | env_update = {"GITHUB_OUTPUT": str(gh_output_file)} 136 | if repository: 137 | env_update.update({"RESOLVE_REF_REPOSITORY": repository_name}) 138 | 139 | m_resolve_ref.return_value = resolve_ref_pr 140 | merge_commit_sha = FakePullRequest.generate_commit_sha() 141 | m_get_pr_merge_commit_sha.return_value = merge_commit_sha 142 | 143 | with patch.dict(os.environ, env_update): 144 | main() 145 | 146 | if not repository: 147 | m_resolve_ref.assert_not_called() 148 | m_get_pr_merge_commit_sha.assert_not_called() 149 | assert not gh_output_file.exists() 150 | elif not resolve_ref_pr: 151 | m_resolve_ref.assert_called_once_with(pr_body, repository_name) 152 | m_get_pr_merge_commit_sha.assert_not_called() 153 | assert not gh_output_file.exists() 154 | else: 155 | m_resolve_ref.assert_called_once_with(pr_body, repository_name) 156 | m_get_pr_merge_commit_sha.assert_called_once_with(repository_name, resolve_ref_pr) 157 | assert gh_output_file.exists() 158 | # gh_output_file.read_text() == f"merge_commit_sha={merge_commit_sha}\n" 159 | -------------------------------------------------------------------------------- /.github/actions/commit_to_pullrequest/action.yml: -------------------------------------------------------------------------------- 1 | name: commit to pull request 2 | description: | 3 | checkout pull request, run provided script and push back changes 4 | The action expects a python script to be executed 5 | 6 | inputs: 7 | token: 8 | description: The Github token to use to perform commit. 9 | required: true 10 | python_libs: 11 | description: | 12 | Required libraries to execute the python script 13 | for example 'pyyaml urllib' 14 | required: false 15 | default: "" 16 | python_executable_url: 17 | description: URL of the script to download to update the repository 18 | required: true 19 | commit_message: 20 | description: commit message 21 | required: false 22 | default: Apply automatic changes 23 | file_pattern: 24 | description: File pattern used for `git add`. For example `src/*.js` 25 | required: false 26 | default: "." 27 | outputs: 28 | changes_detected: 29 | description: Value is "true", if the repository was dirty and file changes have been detected. Value is "false", if no changes have been detected. 30 | value: ${{ steps.commit.outputs.changes_detected }} 31 | 32 | runs: 33 | using: composite 34 | steps: 35 | - name: checkout fork repository 36 | uses: actions/checkout@v3 37 | with: 38 | repository: ${{ github.event.pull_request.head.repo.full_name }} 39 | ref: ${{ github.head_ref }} 40 | 41 | - name: Set up Python 42 | uses: actions/setup-python@v4 43 | with: 44 | python-version: "3.12" 45 | 46 | - name: Install required python libraries 47 | run: pip install -U ${{ inputs.python_libs }} 48 | shell: bash 49 | if: inputs.python_libs != '' 50 | 51 | - name: Download python script 52 | run: >- 53 | curl -o /tmp/update_repository.py ${{ inputs.python_executable_url }} 54 | shell: bash 55 | 56 | - name: Execute python script 57 | run: >- 58 | python /tmp/update_repository.py 59 | shell: bash 60 | 61 | - name: Commit and push changes 62 | id: commit 63 | run: | 64 | if [[ -n $(git status -s) ]]; then 65 | echo -e "changes detected\n$(git status -s)" 66 | git add -A ${{ inputs.file_pattern }} 67 | git -c user.name="$GIT_USER_NAME" -c user.email="$GIT_USER_EMAIL" commit -m "${{ inputs.commit_message }}" --author="$GIT_AUTHOR" 68 | git push origin ${{ github.head_ref }} 69 | echo "changes_detected=true" >> $GITHUB_OUTPUT 70 | else 71 | echo "changes_detected=false" >> $GITHUB_OUTPUT 72 | fi 73 | shell: bash 74 | env: 75 | GITHUB_TOKEN: ${{ inputs.token }} 76 | GIT_USER_NAME: "github-actions[bot]" 77 | GIT_USER_EMAIL: "41898282+github-actions[bot]@users.noreply.github.com" 78 | GIT_AUTHOR: "${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>" 79 | -------------------------------------------------------------------------------- /.github/actions/create_pullrequest/action.yml: -------------------------------------------------------------------------------- 1 | name: create pull request to Github repository 2 | description: | 3 | Create pull request on Github repository. 4 | 5 | inputs: 6 | token: 7 | description: The Github token to use to create pull request. 8 | required: true 9 | repository: 10 | description: The Github repository 11 | required: false 12 | default: ${{ github.repository }} 13 | head_branch: 14 | description: The pull request head branch. 15 | required: true 16 | base_branch: 17 | description: The pull request base, default to the repository default branch. 18 | required: false 19 | title: 20 | description: The pull request title. 21 | required: true 22 | body: 23 | description: The pull request body. 24 | required: true 25 | 26 | outputs: 27 | url: 28 | description: The html url of the pull request 29 | value: ${{ steps.create.outputs.url }} 30 | number: 31 | description: The number of the pull request 32 | value: ${{ steps.create.outputs.number }} 33 | 34 | runs: 35 | using: composite 36 | steps: 37 | - name: setup python 38 | uses: actions/setup-python@v4 39 | with: 40 | python-version: "3.12" 41 | 42 | - name: Install python required libraries 43 | run: pip install -U pygithub 44 | shell: bash 45 | 46 | - name: create pull request 47 | id: create 48 | run: >- 49 | python ${{ github.action_path }}/run.py 50 | --repository "${{ inputs.repository }}" 51 | --head "${{ inputs.head_branch }}" 52 | --base "${{ inputs.base_branch }}" 53 | --title "${{ inputs.title }}" 54 | --body "${{ inputs.body }}" 55 | env: 56 | GITHUB_TOKEN: "${{ inputs.token }}" 57 | shell: bash 58 | -------------------------------------------------------------------------------- /.github/actions/create_pullrequest/run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to read release content from CHANGELOG.rst file.""" 3 | 4 | import logging 5 | import os 6 | import sys 7 | 8 | from argparse import ArgumentParser 9 | 10 | from github import Github 11 | from github import GithubException 12 | 13 | 14 | FORMAT = "[%(asctime)s] - %(message)s" 15 | logging.basicConfig(format=FORMAT) 16 | logger = logging.getLogger(__file__) 17 | logger.setLevel(logging.DEBUG) 18 | 19 | 20 | def main() -> None: 21 | """Read release content from CHANGELOG.rst for a specific version.""" 22 | parser = ArgumentParser( 23 | description="Read release content from CHANGELOG.rst for a specific version." 24 | ) 25 | parser.add_argument("--repository", required=True, help="Repository name.") 26 | parser.add_argument("--head", required=True, help="Pull request head branch.") 27 | parser.add_argument("--base", required=True, help="Pull request base branch.") 28 | parser.add_argument("--title", required=True, help="Pull request title.") 29 | parser.add_argument("--body", required=True, help="Pull request body.") 30 | 31 | args = parser.parse_args() 32 | 33 | access_token = os.environ.get("GITHUB_TOKEN") 34 | 35 | client = Github(access_token) 36 | repo = client.get_repo(args.repository) 37 | try: 38 | pr_obj = repo.create_pull(title=args.title, body=args.body, head=args.head, base=args.base) 39 | except GithubException as err: 40 | logger.error("Failed to create pull request due to: %s", err) 41 | sys.exit(1) 42 | 43 | output = os.environ.get("GITHUB_OUTPUT") 44 | if output: 45 | with open(output, "a", encoding="utf-8") as file_handler: 46 | file_handler.write(f"url={pr_obj.html_url}\n") 47 | file_handler.write(f"number={pr_obj.number}\n") 48 | 49 | 50 | if __name__ == "__main__": 51 | main() 52 | -------------------------------------------------------------------------------- /.github/actions/identify_collection/action.yml: -------------------------------------------------------------------------------- 1 | name: Retrieve collection information 2 | description: Extract collection information from the galaxy.yml file 3 | 4 | inputs: 5 | source_path: 6 | description: Path to the collection source root 7 | required: true 8 | outputs: 9 | tar_file: 10 | description: The collection tarball when built 11 | value: ${{ steps.keys.outputs.namespace }}-${{ steps.keys.outputs.name }}-${{ steps.keys.outputs.version }}.tar.gz 12 | collection_path: 13 | description: The final collection path 14 | value: /home/runner/collections/ansible_collections/${{ steps.keys.outputs.namespace }}/${{ steps.keys.outputs.name }} 15 | dependency: 16 | description: The collection dependency 17 | value: ${{ steps.keys.outputs.dependency }} 18 | 19 | runs: 20 | using: composite 21 | steps: 22 | - name: Github action python requirements 23 | run: pip3 install yq 24 | shell: bash 25 | 26 | - name: Extract metadata from galaxy.yml 27 | id: keys 28 | run: | 29 | echo "namespace=$(yq -r '.namespace' 'galaxy.yml')" >> $GITHUB_OUTPUT 30 | echo "name=$(yq -r '.name' 'galaxy.yml')" >> $GITHUB_OUTPUT 31 | echo "version=$(yq -r '.version' 'galaxy.yml')" >> $GITHUB_OUTPUT 32 | echo "dependency=$(yq -r '.dependencies // [] | keys | join(" ")' 'galaxy.yml')" >> $GITHUB_OUTPUT 33 | shell: bash 34 | working-directory: ${{ inputs.source_path }} 35 | 36 | - name: Github action python requirements 37 | run: pip3 uninstall yq -y 38 | shell: bash 39 | -------------------------------------------------------------------------------- /.github/actions/tox/action.yml: -------------------------------------------------------------------------------- 1 | name: tox 2 | description: Run tox specified environment 3 | 4 | inputs: 5 | path: 6 | description: | 7 | The directory containng 'tox.ini' file in which the tox command is executed. 8 | required: true 9 | tox_extra_args: 10 | description: extra args for tox command 11 | required: false 12 | default: "-vv --skip-missing-interpreters=false" 13 | tox_envlist: 14 | description: tox env list 15 | required: false 16 | default: "" 17 | tox_labellist: 18 | description: tox label list 19 | required: false 20 | default: "" 21 | tox_config_file: 22 | description: tox configuration file 23 | required: false 24 | default: "" 25 | tox_environment: 26 | description: | 27 | list of environment variables to set when running ansible-test. 28 | required: false 29 | default: "" 30 | tox_dependencies: 31 | description: | 32 | Path to dependencies to install into tox environment prior running command. 33 | e.g: '/home/runner/goutelette /home/runner/collection_prep' 34 | required: false 35 | tox_constraints_file: 36 | description: the location to the tox constraints file. 37 | default: "" 38 | 39 | runs: 40 | using: composite 41 | steps: 42 | - name: Ensure 'tox.ini' is present 43 | run: >- 44 | if ! test -f "${{ inputs.path }}/tox.ini"; then 45 | echo "Missing expected tox configuration file '${{ inputs.path }}/tox.ini'" 46 | exit 1 47 | fi 48 | shell: bash 49 | 50 | - name: setup python 51 | uses: actions/setup-python@v4 52 | with: 53 | python-version: "3.10" 54 | 55 | - name: Install tox 56 | run: pip install -U tox 57 | shell: bash 58 | 59 | - name: Emit tox command options 60 | id: py-options 61 | shell: bash 62 | run: | 63 | PY_OPTIONS="" 64 | if [ ! -z "${TOX_CONFIG_FILE}" ]; then 65 | PY_OPTIONS="${PY_OPTIONS} --tox-config-file ${TOX_CONFIG_FILE}" 66 | fi 67 | if [ ! -z "${TOX_ENV_LIST}" ]; then 68 | PY_OPTIONS="${PY_OPTIONS} --tox-envname ${TOX_ENV_LIST}" 69 | fi 70 | if [ ! -z "${TOX_LABEL_LIST}" ]; then 71 | PY_OPTIONS="${PY_OPTIONS} --tox-labelname ${TOX_LABEL_LIST}" 72 | fi 73 | if [ ! -z "${TOX_CONSTRAINTS}" ]; then 74 | PY_OPTIONS="${PY_OPTIONS} --tox-constraints-file ${TOX_CONSTRAINTS}" 75 | fi 76 | if [ ! -z "${TOX_ENVIRONMENT}" ]; then 77 | PY_OPTIONS="${PY_OPTIONS} --tox-env-vars ${TOX_ENVIRONMENT}" 78 | fi 79 | echo "python_args=${PY_OPTIONS}" >> $GITHUB_OUTPUT 80 | env: 81 | TOX_CONFIG_FILE: ${{ inputs.tox_config_file }} 82 | TOX_ENV_LIST: ${{ inputs.tox_envlist }} 83 | TOX_LABEL_LIST: ${{ inputs.tox_labellist }} 84 | TOX_CONSTRAINTS: ${{ inputs.tox_constraints_file }} 85 | TOX_ENVIRONMENT: ${{ inputs.tox_environment }} 86 | 87 | - name: install dependencies packages 88 | run: >- 89 | python3 ${{ github.action_path }}/install_packages.py 90 | --tox-project-dir ${{ inputs.path }} 91 | ${{ steps.py-options.outputs.python_args }} 92 | ${{ inputs.tox_dependencies }} 93 | shell: bash 94 | env: 95 | TOX_EXTRA_ARGS: ${{ inputs.tox_extra_args }} 96 | if: inputs.tox_dependencies != '' 97 | 98 | - name: Emit tox command options 99 | id: tox-options 100 | shell: bash 101 | run: | 102 | TOX_CMD_OPTIONS="" 103 | if [ ! -z "${TOX_CONFIG_FILE}" ]; then 104 | TOX_CMD_OPTIONS="-c ${TOX_CONFIG_FILE}" 105 | fi 106 | if [ ! -z "${TOX_ENV_LIST}" ]; then 107 | TOX_CMD_OPTIONS="${TOX_CMD_OPTIONS} -e ${TOX_ENV_LIST}" 108 | fi 109 | if [ ! -z "${TOX_LABEL_LIST}" ]; then 110 | TOX_CMD_OPTIONS="${TOX_CMD_OPTIONS} -m ${TOX_LABEL_LIST}" 111 | fi 112 | echo "tox_common_args=${TOX_CMD_OPTIONS}" >> $GITHUB_OUTPUT 113 | env: 114 | TOX_CONFIG_FILE: ${{ inputs.tox_config_file }} 115 | TOX_LABEL_LIST: ${{ inputs.tox_labellist }} 116 | TOX_ENV_LIST: ${{ inputs.tox_envlist }} 117 | 118 | - name: Set environment variables 119 | run: echo "${{ inputs.tox_environment }}" >> $GITHUB_ENV 120 | shell: bash 121 | if: inputs.tox_environment != '' 122 | 123 | - name: Run tox 124 | run: >- 125 | tox 126 | ${{ steps.tox-options.outputs.tox_common_args }} 127 | ${{ inputs.tox_extra_args }} 128 | shell: bash 129 | working-directory: ${{ inputs.path }} 130 | -------------------------------------------------------------------------------- /.github/actions/tox/install_packages.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | """Install checkout version of packages into tox environment.""" 3 | 4 | import ast 5 | import logging 6 | import os 7 | import subprocess 8 | import sys 9 | 10 | from argparse import ArgumentParser 11 | from configparser import ConfigParser 12 | from configparser import NoOptionError 13 | from configparser import NoSectionError 14 | from configparser import RawConfigParser 15 | from pathlib import PosixPath 16 | from tempfile import NamedTemporaryFile 17 | from typing import Any 18 | from typing import Optional 19 | 20 | 21 | FORMAT = "[%(asctime)s] - %(message)s" 22 | logging.basicConfig(format=FORMAT) 23 | logger = logging.getLogger("install_sibling") 24 | logger.setLevel(logging.DEBUG) 25 | 26 | 27 | # pylint: disable-next=too-many-arguments 28 | def run_tox_command( 29 | project_dir: PosixPath, 30 | env_name: Optional[str], 31 | label_name: Optional[str], 32 | config_file: Optional[PosixPath], 33 | env_vars: Optional[dict[Any, Any]], 34 | extra_args: list[str], 35 | ) -> str: 36 | """Execute a tox command using subprocess. 37 | 38 | :param project_dir: The location of the project containing tox.ini file. 39 | :param env_name: An optional tox env name. 40 | :param label_name: An optional tox label name. 41 | :param config_file: An optional tox configuration file. 42 | :param env_vars: An optional dictionary of environment to set when running command. 43 | :param extra_args: Tox extra args. 44 | :returns: The output result of the shell command. 45 | """ 46 | tox_cmd = ["tox"] 47 | if env_name: 48 | tox_cmd.extend(["-e", env_name]) 49 | if label_name: 50 | tox_cmd.extend(["-m", label_name]) 51 | if config_file: 52 | tox_cmd.extend(["-c", str(config_file)]) 53 | if extra_args: 54 | tox_cmd.extend(extra_args) 55 | 56 | logger.info("Running %s cwd=%s, env=%s", tox_cmd, str(project_dir), env_vars) 57 | with subprocess.Popen( 58 | " ".join(tox_cmd), 59 | stdout=subprocess.PIPE, 60 | stderr=subprocess.PIPE, 61 | shell=True, 62 | encoding="utf-8", 63 | cwd=str(project_dir), 64 | env=env_vars, 65 | ) as proc: 66 | out, err = proc.communicate() 67 | if proc.returncode != 0: 68 | logger.error(err) 69 | sys.exit(1) 70 | return out 71 | 72 | 73 | def tox_config_remove_verbose(raw_config: str) -> str: 74 | """Filter out any leading verbose output lines before the config. 75 | 76 | :param raw_config: tox raw config 77 | :returns: A new tox config without verbose 78 | """ 79 | items = raw_config.split("\n") 80 | index = 0 81 | result = "" 82 | for index in range(len(raw_config)): 83 | # Once we see a section heading, we collect all remaining lines 84 | if items[index].startswith("[") and items[index].rstrip().endswith("]"): 85 | result = "\n".join(items[index:]) 86 | break 87 | return result 88 | 89 | 90 | def get_envlist(tox_config: RawConfigParser) -> list[str]: 91 | """Retrieve tox env list from raw config. 92 | 93 | :param tox_config: tox raw config 94 | :returns: A list of tox environment names 95 | """ 96 | envlist = [] 97 | if ( 98 | "tox" in tox_config.sections() 99 | and "env" in tox_config.options("tox") 100 | and "'-e" not in tox_config.get("tox", "args") 101 | ): 102 | envlist_default = ast.literal_eval(tox_config.get("tox", "envlist_default")) 103 | tox_args = ast.literal_eval(tox_config.get("tox", "args")) 104 | if "ALL" in tox_args or not envlist_default: 105 | for section in tox_config.sections(): 106 | if section.startswith("testenv"): 107 | envlist.append(section.split(":")[1]) 108 | else: 109 | for testenv in envlist_default: 110 | envlist.append(testenv) 111 | else: 112 | for section in tox_config.sections(): 113 | if section.startswith("testenv:"): 114 | envlist.append(section.split(":")[1]) 115 | return envlist 116 | 117 | 118 | def read_package_name(path: str, tox_py: str) -> Optional[str]: 119 | """Read package name from from setup.cfg or by running setup.py. 120 | 121 | :param path: the location of the python package 122 | :param tox_py: python executable using to test setup.py 123 | :returns: A python package name 124 | """ 125 | setup_cfg = os.path.join(path, "setup.cfg") 126 | name = None 127 | if os.path.exists(setup_cfg): 128 | config = ConfigParser() 129 | config.read(setup_cfg) 130 | try: 131 | name = config.get("metadata", "name") 132 | except (NoSectionError, NoOptionError): 133 | # Some things have a setup.cfg, but don't keep 134 | # metadata in it; fall back to setup.py below 135 | logger.info("[metadata] name not found in %s, skipping", setup_cfg) 136 | else: 137 | logger.info("%s does not exist", setup_cfg) 138 | setup_py = os.path.join(path, "setup.py") 139 | if not os.path.exists(setup_py): 140 | logger.info("%s does not exist", setup_py) 141 | else: 142 | # It's a python package but doesn't use pbr, so we need to run 143 | # python setup.py --name to get setup.py to tell us what the 144 | # package name is. 145 | package_name = subprocess.check_output( 146 | [os.path.abspath(tox_py), "setup.py", "--name"], 147 | cwd=path, 148 | shell=True, 149 | stderr=subprocess.STDOUT, 150 | ).decode("utf-8") 151 | if package_name: 152 | name = package_name.strip() 153 | return name 154 | 155 | 156 | def identify_packages(dirs: list[str], tox_py: str) -> dict[str, str]: 157 | """Retrieve package name from provided directories. 158 | 159 | :param dirs: list of python package directories 160 | :param tox_py: python executable using to test setup.py 161 | :returns: A dictionary containing package names and location 162 | """ 163 | packages = {} 164 | for path in dirs: 165 | package_name = read_package_name(path, tox_py) 166 | if not package_name: 167 | logger.info("Could not find package name for '%s'", path) 168 | else: 169 | packages[package_name] = path 170 | # Convert a project or version name to its filename-escaped form 171 | # Any '-' characters are currently replaced with '_'. 172 | # Implementation vendored from pkg_resources.to_filename in order to avoid 173 | # adding an extra runtime dependency. 174 | packages[package_name.replace("-", "_")] = path 175 | return packages 176 | 177 | 178 | def find_installed_packages(tox_python: str, packages: dict[str, str]) -> list[str]: 179 | """Find installed packages from python environment. 180 | 181 | :param tox_python: path to python executable 182 | :param packages: dependencies packages to filter 183 | :returns: The list of python packages installed into python environment 184 | """ 185 | # We use the output of pip freeze here as that is pip's stable public 186 | # interface. 187 | frozen_pkgs = subprocess.check_output( 188 | [tox_python, "-m", "pip", "-qqq", "freeze"], stderr=subprocess.STDOUT 189 | ).decode("utf-8") 190 | # Matches strings of the form: 191 | # 1. '==' 192 | # 2. '# Editable Git install with no remote (==)' 193 | # 3. ' @ ' # PEP440, PEP508, PEP610 194 | # results 195 | installed_packages = [] 196 | for item in frozen_pkgs.split("\n"): 197 | if "==" in item: 198 | name = item[item.find("(") + 1 :].split("==")[0] 199 | if name in packages: 200 | installed_packages.append(name) 201 | elif "@" in item: 202 | name = item.split("@")[0].rstrip(" \t") 203 | if name in packages: 204 | installed_packages.append(name) 205 | return installed_packages 206 | 207 | 208 | def create_constraints_file(constraints_file: str, packages: list[str]) -> str: 209 | """Create new constraints file by removing installed dependencies. 210 | 211 | :param constraints_file: tox constraints file 212 | :param packages: dependencies packages 213 | :returns: the path to the new constraints file 214 | """ 215 | with NamedTemporaryFile(mode="w", delete=False) as temp_constraints_file: 216 | with open(constraints_file, encoding="utf-8") as file_handler: 217 | constraints_lines = file_handler.read().split("\n") 218 | for line in constraints_lines: 219 | package_name = line.split("===")[0] 220 | if package_name in packages: 221 | continue 222 | temp_constraints_file.write(line) 223 | temp_constraints_file.write("\n") 224 | return temp_constraints_file.name 225 | 226 | 227 | def install_into_env(envdir: str, dirs: list[str], constraints_file: Optional[str]) -> None: 228 | """Install dependencies packages into a python directory. 229 | 230 | :param envdir: The list of projects directories 231 | :param dirs: tox raw config 232 | :param constraints_file: tox constraints file 233 | """ 234 | tox_python = f"{envdir}/bin/python" 235 | 236 | # identify packages dependencies 237 | packages = identify_packages(dirs, tox_python) 238 | for name, path in packages.items(): 239 | logger.info("Packages -> name [%s] - path [%s]", name, path) 240 | 241 | # find packages installed version 242 | installed_packges = find_installed_packages(tox_python, packages) 243 | logger.info("installed packages => %s", installed_packges) 244 | 245 | tmp_contraints_file = None 246 | if constraints_file: 247 | tmp_contraints_file = create_constraints_file(constraints_file, installed_packges) 248 | 249 | for name in installed_packges: 250 | # uninstall package first 251 | uninstall_cmd = [tox_python, "-m", "pip", "uninstall", "-y", name] 252 | logger.info("Uninstalling package '%s' using %s", name, uninstall_cmd) 253 | uninstall_output = subprocess.check_output(uninstall_cmd) 254 | logger.info(uninstall_output.decode("utf-8")) 255 | 256 | install_cmd = [tox_python, "-m", "pip", "install"] 257 | if tmp_contraints_file: 258 | install_cmd.extend(["-c", tmp_contraints_file]) 259 | 260 | package_dir = packages[name] 261 | install_cmd.append(package_dir) 262 | logger.info( 263 | "Installing package '%s' from '%s' for deps using %s", 264 | name, 265 | package_dir, 266 | install_cmd, 267 | ) 268 | install_output = subprocess.check_output(install_cmd) 269 | logger.info(install_output.decode("utf-8")) 270 | 271 | for name in installed_packges: 272 | package_dir = packages[name] 273 | command = [tox_python, "-m", "pip", "install", "--no-deps", package_dir] 274 | logger.info("Installing '%s' from '%s' using %s", name, package_dir, command) 275 | install_output = subprocess.check_output(command) 276 | logger.info(install_output.decode("utf-8")) 277 | 278 | 279 | def install_packages( 280 | projects: list[str], 281 | tox_raw_config: str, 282 | tox_envname: Optional[str], 283 | constraints_file: Optional[str], 284 | ) -> None: 285 | """Install dependencies packages into a tox env. 286 | 287 | :param projects: The list of projects directories 288 | :param tox_raw_config: tox raw config 289 | :param tox_envname: tox env name 290 | :param constraints_file: tox constraints file 291 | """ 292 | tox_config = RawConfigParser() 293 | tox_config.read_string(tox_config_remove_verbose(tox_raw_config)) 294 | 295 | envlist = get_envlist(tox_config) 296 | logger.info("env list => %s", envlist) 297 | if not envlist: 298 | return 299 | 300 | for testenv in envlist: 301 | envname = f"testenv:{testenv}" 302 | if tox_envname and tox_envname not in (envname, testenv): 303 | continue 304 | envdir, envlogdir = None, None 305 | for key in ("envdir", "env_dir"): 306 | if tox_config.has_option(envname, key): 307 | envdir = tox_config.get(envname, key) 308 | break 309 | for key in ("envlogdir", "env_log_dir"): 310 | if tox_config.has_option(envname, key): 311 | envlogdir = tox_config.get(envname, key) 312 | break 313 | if not envdir or not envlogdir: 314 | logger.error("Unable to find tox env directories for envname -> '%s'", envname) 315 | sys.exit(1) 316 | logger.info("installing packages from env '%s', envdir='%s'", envname, envdir) 317 | install_into_env(envdir, projects, constraints_file) 318 | 319 | 320 | def main() -> None: 321 | """Read inputs parameters and install packages.""" 322 | parser = ArgumentParser( 323 | description="Install checkout version of packages into tox environment." 324 | ) 325 | parser.add_argument( 326 | "--tox-config-file", type=PosixPath, help="the location of the tox configuration file" 327 | ) 328 | parser.add_argument("--tox-envname", help="the tox env name.") 329 | parser.add_argument("--tox-labelname", help="the tox label name.") 330 | parser.add_argument( 331 | "--tox-project-dir", default=".", help="the location of the project containing tox.ini file" 332 | ) 333 | parser.add_argument( 334 | "--tox-env-vars", 335 | default="", 336 | help="the environment to set when running tox command. e.g: env1=value1\nenv2=value2", 337 | ) 338 | parser.add_argument( 339 | "--tox-constraints-file", type=PosixPath, help="the location to the tox constraints file." 340 | ) 341 | parser.add_argument( 342 | "tox_packages", 343 | default=[], 344 | nargs="+", 345 | help="the location of the package to install", 346 | ) 347 | 348 | args = parser.parse_args() 349 | tox_extra_args = os.environ.get("TOX_EXTRA_ARGS") 350 | 351 | # parse tox environment variables 352 | tox_environment = { 353 | x.split("=", maxsplit=1)[0]: x.split("=", maxsplit=1)[1] 354 | for x in args.tox_env_vars.split("\n") 355 | if x 356 | } or None 357 | 358 | # Run tox without test 359 | extra_args = ["--notest"] 360 | if tox_extra_args: 361 | extra_args.append(tox_extra_args) 362 | run_tox_command( 363 | args.tox_project_dir, 364 | args.tox_envname, 365 | args.tox_labelname, 366 | args.tox_config_file, 367 | tox_environment, 368 | extra_args, 369 | ) 370 | 371 | # show environment config 372 | extra_args = ["--showconfig"] 373 | tox_raw_config = run_tox_command( 374 | args.tox_project_dir, 375 | args.tox_envname, 376 | args.tox_labelname, 377 | args.tox_config_file, 378 | tox_environment, 379 | extra_args, 380 | ) 381 | logger.info("Show config => %s", tox_raw_config) 382 | 383 | # install dependencies packages 384 | projects_dir = [os.path.abspath(path) for path in args.tox_packages] 385 | logger.info("Packages dirs -> %s", projects_dir) 386 | install_packages(projects_dir, tox_raw_config, args.tox_envname, args.tox_constraints_file) 387 | 388 | 389 | if __name__ == "__main__": 390 | main() 391 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | updates: 4 | - package-ecosystem: github-actions 5 | directory: / 6 | schedule: 7 | interval: daily 8 | -------------------------------------------------------------------------------- /.github/workflows/ansible-lint.yml: -------------------------------------------------------------------------------- 1 | name: ansible-lint 2 | on: 3 | workflow_call: 4 | 5 | jobs: 6 | ansible-lint: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 10 | - name: Run ansible-lint 11 | uses: ansible/ansible-lint@main 12 | -------------------------------------------------------------------------------- /.github/workflows/backport-labeller.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Apply labels for backporting 3 | 4 | on: 5 | workflow_call: 6 | inputs: 7 | label_major_release: 8 | default: "do_not_backport" 9 | required: false 10 | type: string 11 | description: | 12 | The label to apply if the PR includes a change that would necessitate a major release. 13 | label_minor_release: 14 | required: true 15 | type: string 16 | description: | 17 | The label to apply if the PR only includes a change that would necessitate a minor 18 | release. This will also be applied by default if the PR doesn't necessitate a major 19 | release. 20 | label_bugfix_release: 21 | required: true 22 | type: string 23 | description: | 24 | The label to apply if the PR only includes a bugfix or security release. 25 | label_skip: 26 | default: "do_not_backport" 27 | required: false 28 | type: string 29 | description: | 30 | If this label has been applied, then the PR will not be re-assessed. 31 | label_mergeit: 32 | default: "mergeit" 33 | required: false 34 | type: string 35 | description: | 36 | Which label will be used to trigger labelling for minor/bugfix backporting. 37 | We look for major releases when a change is pushed, and minor/bugfixes when the PR 38 | has been approved for merging and the mergeit label has been applied to trigger 39 | the merge. 40 | 41 | jobs: 42 | changelog-types: 43 | # We always skip if do_not_backport has previously been applied. 44 | # Otherwise, if someone applies 'mergeit', opens the PR, or pushes a new commit 45 | # we'll examine the contents of changelog fragments to try to guess the best backport strategy. 46 | if: ${{ 47 | ! contains(github.event.pull_request.labels.*.name, inputs.label_skip) 48 | && ( 49 | (github.event.action == 'labeled' && github.event.label.name == inputs.label_mergeit) 50 | || (github.event.action == 'synchronize') 51 | || (github.event.action == 'opened') 52 | ) 53 | }} 54 | permissions: 55 | pull-requests: read 56 | runs-on: ubuntu-latest 57 | outputs: 58 | no_backport: ${{ steps.evaluate.outputs.major_release }} 59 | bugfix: ${{ steps.evaluate.outputs.bugfix_release }} 60 | minor_only: ${{ steps.evaluate.outputs.minor_release }} 61 | steps: 62 | - name: Evaluate change types 63 | id: evaluate 64 | uses: ansible-network/github_actions/.github/actions/changelog_evaluator@main 65 | 66 | changelog-labeling: 67 | permissions: 68 | pull-requests: write 69 | runs-on: ubuntu-latest 70 | needs: 71 | - changelog-types 72 | steps: 73 | - name: Strip tags for backporting and apply do_not_backport 74 | id: no-backport 75 | # If breaking_changes or major_changes are pushed, then we always apply do_not_backport 76 | # and strip any existing backport-* labels 77 | if: ${{ needs.changelog-types.outputs.no_backport == '0' }} 78 | uses: ansible-network/github_actions/.github/actions/changelog_labeller@main 79 | with: 80 | purge_labels: true 81 | label_to_add: ${{ inputs.label_major_release }} 82 | 83 | - name: Apply tag for backporting to at least the most recent major release 84 | id: minor-only 85 | if: ${{ 86 | (github.event.action == 'labeled' && github.event.label.name == inputs.label_mergeit ) 87 | && ! ( needs.changelog-types.outputs.no_backport == '0' ) 88 | && ( 89 | ( needs.changelog-types.outputs.minor_only == '0' ) 90 | || ! (needs.changelog-types.outputs.bugfix == '0' ) 91 | ) 92 | }} 93 | uses: ansible-network/github_actions/.github/actions/changelog_labeller@main 94 | with: 95 | label_to_add: ${{ inputs.label_minor_release }} 96 | 97 | - name: Apply tag for backporting to at least the two most recent major releases 98 | id: security-or-bugfix 99 | if: ${{ 100 | (github.event.action == 'labeled' && github.event.label.name == inputs.label_mergeit ) 101 | && ! ( needs.changelog-types.outputs.no_backport == '0' ) 102 | && ! ( needs.changelog-types.outputs.minor_only == '0' ) 103 | && ( needs.changelog-types.outputs.bugfix == '0' ) 104 | }} 105 | uses: ansible-network/github_actions/.github/actions/changelog_labeller@main 106 | with: 107 | label_to_add: ${{ inputs.label_minor_release }},${{ inputs.label_bugfix_release }} 108 | -------------------------------------------------------------------------------- /.github/workflows/changelog.yml: -------------------------------------------------------------------------------- 1 | name: Changelog required 2 | on: 3 | workflow_call: 4 | inputs: 5 | custom_paths: 6 | description: | 7 | A comma-separated list of custom paths from which any modified file 8 | will require a changelog. 9 | required: false 10 | type: string 11 | default: "" 12 | jobs: 13 | changelog: 14 | runs-on: ubuntu-latest 15 | name: Require a changelog 16 | if: "!contains(github.event.pull_request.labels.*.name, 'skip-changelog')" 17 | steps: 18 | - name: Checkout the collection repository 19 | uses: actions/checkout@v3 20 | with: 21 | ref: ${{ github.event.pull_request.head.sha }} 22 | fetch-depth: "0" 23 | 24 | - name: Validate changelog 25 | uses: ansible-network/github_actions/.github/actions/ansible_validate_changelog@main 26 | with: 27 | custom_paths: ${{ inputs.custom_paths }} 28 | -------------------------------------------------------------------------------- /.github/workflows/coverage_network_devices.yml: -------------------------------------------------------------------------------- 1 | name: CodeCoverage 2 | on: 3 | workflow_call: 4 | inputs: 5 | collection_pre_install: 6 | required: true 7 | type: string 8 | jobs: 9 | codecoverage: 10 | env: 11 | PY_COLORS: "1" 12 | source_directory: "./source" 13 | python_version: "3.10" 14 | ansible_version: "latest" 15 | os: "ubuntu-latest" 16 | runs-on: ubuntu-latest 17 | 18 | name: "Code Coverage | Python 3.10" 19 | steps: 20 | - name: Checkout the collection repository 21 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 22 | with: 23 | path: ${{ env.source_directory }} 24 | ref: ${{ github.event.pull_request.head.sha }} 25 | fetch-depth: "0" 26 | 27 | - name: Set up Python ${{ env.python_version }} 28 | uses: actions/setup-python@v4 29 | with: 30 | python-version: ${{ env.python_version }} 31 | 32 | - name: Install ansible-core (${{ env.ansible-version }}) 33 | run: python3 -m pip install ansible-core pytest pytest-cov pytest-ansible-units pytest-forked pytest-xdist 34 | 35 | - name: Read collection metadata from galaxy.yml 36 | id: identify 37 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 38 | with: 39 | source_path: ${{ env.source_directory }} 40 | 41 | - name: Build and install the collection 42 | uses: ansible-network/github_actions/.github/actions/build_install_collection@main 43 | with: 44 | install_python_dependencies: true 45 | source_path: ${{ env.source_directory }} 46 | collection_path: ${{ steps.identify.outputs.collection_path }} 47 | tar_file: ${{ steps.identify.outputs.tar_file }} 48 | 49 | - name: Print the ansible version 50 | run: ansible --version 51 | 52 | - name: Print the python dependencies 53 | run: python3 -m pip list 54 | 55 | - name: Run Coverage tests 56 | run: | 57 | pytest tests/unit -v --cov-report xml --cov=./ 58 | working-directory: ${{ steps.identify.outputs.collection_path }} 59 | 60 | - name: Upload coverage report to Codecov 61 | uses: codecov/codecov-action@v3 62 | with: 63 | directory: ${{ steps.identify.outputs.collection_path }} 64 | fail_ci_if_error: false 65 | -------------------------------------------------------------------------------- /.github/workflows/galaxy_importer.yml: -------------------------------------------------------------------------------- 1 | name: galaxy importer 2 | 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | galaxy: 8 | runs-on: ubuntu-latest 9 | name: importer 10 | env: 11 | source_path: "./source" 12 | importer_path: "./importer" 13 | steps: 14 | - name: checkout collection 15 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 16 | with: 17 | path: ${{ env.source_path }} 18 | ref: ${{ github.event.pull_request.head.sha }} 19 | 20 | - name: Read collection metadata from galaxy.yml 21 | id: identify 22 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 23 | with: 24 | source_path: ${{ env.source_path }} 25 | 26 | - name: Build collection 27 | run: ansible-galaxy collection build -vvv 28 | shell: bash 29 | working-directory: ${{ env.source_path }} 30 | 31 | - name: checkout ansible-network/releases 32 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 33 | with: 34 | repository: ansible-network/releases 35 | path: ${{ env.importer_path }} 36 | ref: master 37 | 38 | - name: Create tox environment 39 | uses: ansible-network/github_actions/.github/actions/tox@main 40 | with: 41 | path: ${{ env.importer_path }} 42 | tox_envlist: venv 43 | tox_extra_args: "-vv --notest" 44 | 45 | - name: Confirm collection can be imported into galaxy 46 | run: | 47 | source .tox/venv/bin/activate 48 | ./tools/validate-collection.sh ${COLLECTION_TARBALL} 49 | shell: bash 50 | env: 51 | COLLECTION_TARBALL: "${{ github.workspace }}/${{ env.source_path }}/${{ steps.identify.outputs.tar_file }}" 52 | GALAXY_IMPORTER_CONFIG: "${{ github.workspace }}/${{ env.source_path }}/tests/galaxy-importer.cfg" 53 | working-directory: ${{ env.importer_path }} 54 | -------------------------------------------------------------------------------- /.github/workflows/integration.yml: -------------------------------------------------------------------------------- 1 | name: Integration tests 2 | on: 3 | workflow_call: 4 | inputs: 5 | collection_pre_install: 6 | required: false 7 | type: string 8 | default: "" 9 | lab_title: 10 | required: false 11 | type: string 12 | default: ${{ github.event.repository.name }} 13 | cml_lab: 14 | default: tests/integration/labs/single.yaml 15 | required: false 16 | type: string 17 | integration_test_path: 18 | default: tests/integration/targets 19 | required: false 20 | type: string 21 | network_os: 22 | required: true 23 | type: string 24 | pytest_addopts: 25 | default: "" 26 | required: false 27 | type: string 28 | 29 | secrets: 30 | cml_ssh_password: 31 | required: true 32 | virl_password: 33 | required: true 34 | virl_host: 35 | required: true 36 | 37 | jobs: 38 | integration: 39 | env: 40 | PY_COLORS: "1" 41 | source_directory: "./source" 42 | dependency_directory: "./dependency" 43 | runs-on: ubuntu-latest 44 | strategy: 45 | fail-fast: false 46 | matrix: 47 | ansible-version: 48 | - stable-2.16 49 | # - stable-2.15 50 | # - milestone 51 | # - devel 52 | python-version: 53 | - "3.11" 54 | dependency-source: 55 | - github 56 | 57 | name: "py${{ matrix.python-version }} / ${{ matrix.ansible-version }} / ${{ matrix.dependency-source }}" 58 | steps: 59 | - name: Checkout the collection repository 60 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 61 | with: 62 | path: ${{ env.source_directory }} 63 | ref: ${{ github.event.pull_request.head.sha }} 64 | fetch-depth: "0" 65 | 66 | - name: Set up Python ${{ matrix.python-version }} 67 | uses: actions/setup-python@v4 68 | with: 69 | python-version: ${{ matrix.python-version }} 70 | 71 | - name: Install wheel now for faster builds 72 | run: python3 -m pip install wheel --upgrade 73 | 74 | - name: Install ansible-core (${{ matrix.ansible-version }}) 75 | run: python3 -m pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check 76 | 77 | - name: Pre install collections dependencies first so the collection install does not 78 | run: ansible-galaxy collection install --pre ${{ inputs.collection_pre_install }} -p /home/runner/collections 79 | if: inputs.collection_pre_install != '' 80 | 81 | - name: Read collection metadata from galaxy.yml 82 | id: identify 83 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 84 | with: 85 | source_path: ${{ env.source_directory }} 86 | 87 | - name: Build and install the collection 88 | uses: ansible-network/github_actions/.github/actions/build_install_collection@main 89 | with: 90 | install_python_dependencies: true 91 | source_path: ${{ env.source_directory }} 92 | collection_path: ${{ steps.identify.outputs.collection_path }} 93 | tar_file: ${{ steps.identify.outputs.tar_file }} 94 | ansible_version: ${{ matrix.ansible-version }} 95 | 96 | - name: Print the ansible version 97 | run: ansible --version 98 | 99 | - name: Install the integration test dependency 100 | run: python3 -m pip install git+https://github.com/ansible-network/pytest-ansible-network-integration.git 101 | 102 | - name: Clear current lab id if any 103 | run: "cml clear" 104 | continue-on-error: true 105 | env: 106 | CML_VERIFY_CERT: False 107 | VIRL_HOST: ${{ secrets.virl_host }} 108 | VIRL_PASSWORD: ${{ secrets.virl_password }} 109 | VIRL_USERNAME: admin 110 | 111 | - name: Create the lab title 112 | run: | 113 | if [[ "${{ github.event_name }}" == 'pull_request_target' ]]; then 114 | echo "CLABTITLE=${{ inputs.lab_title }}_PR${{ github.event.pull_request.number }}" >> $GITHUB_ENV 115 | elif [[ "${{ github.event_name }}" == 'workflow_dispatch' ]]; then 116 | shashort=$(git rev-parse --short HEAD) 117 | uuidval=$(uuidgen | cut -c 1-8) 118 | echo "CLABTITLE=${{ inputs.lab_title }}_${shashort}_${uuidval}" >> $GITHUB_ENV 119 | fi 120 | 121 | - name: Print the lab title 122 | run: echo ${{ env.CLABTITLE }} 123 | 124 | - name: Add Lab Title to the lab file 125 | run: >- 126 | sed -i "s/title: ${{ inputs.network_os }}/title: ${{ env.CLABTITLE }}/" ${{ inputs.cml_lab }} 127 | working-directory: ${{ steps.identify.outputs.collection_path }} 128 | 129 | - name: Run integration tests 130 | run: >- 131 | python3 -m pytest tests/integration 132 | --integration-tests-path ${{ inputs.integration_test_path }} 133 | --cml-lab ${{ inputs.cml_lab }} 134 | ${{ inputs.pytest_addopts }} 135 | env: 136 | ANSIBLE_FORCE_COLOR: "1" 137 | ANSIBLE_NETWORK_OS: ${{ inputs.network_os }} 138 | CML_SSH_PASSWORD: ${{ secrets.cml_ssh_password }} 139 | CML_SSH_PORT: 1122 140 | CML_SSH_USER: sysadmin 141 | VIRL_HOST: ${{ secrets.virl_host }} 142 | VIRL_PASSWORD: ${{ secrets.virl_password }} 143 | VIRL_USERNAME: admin 144 | working-directory: ${{ steps.identify.outputs.collection_path }} 145 | 146 | - name: Attempt to remove any labs in case the workflow was cancelled 147 | if: cancelled() 148 | run: echo ${{ env.CML_LABS }} | tr -d '\n' | xargs -d ',' -i sh -c 'cml use --id {} && cml rm --force --no-confirm' 149 | continue-on-error: true 150 | env: 151 | CML_VERIFY_CERT: False 152 | VIRL_HOST: ${{ secrets.virl_host }} 153 | VIRL_PASSWORD: ${{ secrets.virl_password }} 154 | VIRL_USERNAME: admin 155 | 156 | - name: Upload logs 157 | if: always() 158 | uses: actions/upload-artifact@v3 159 | with: 160 | name: logs 161 | path: /home/runner/test_logs/ 162 | -------------------------------------------------------------------------------- /.github/workflows/integration_simple.yml: -------------------------------------------------------------------------------- 1 | name: Integration tests, no CML, dependencies from galaxy 2 | on: 3 | workflow_call: 4 | inputs: 5 | integration_test_path: 6 | default: tests/integration/targets 7 | required: false 8 | type: string 9 | matrix_exclude: 10 | # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix 11 | # 2.15 supports Python 3.9-3.11 12 | # 2.16 supports Python 3.10-3.11 13 | # 2.17 supports Python 3.10-3.12 14 | # 2.18 supports Python 3.11-3.13 15 | # 2.19 supports Python 3.11-3.13 16 | # support for Python 3.13 added and 3.10 removed in 2.18 for control node 17 | # target node supported Python 3.8-3.13 as of 2.18 and 2.19 18 | # milestone is and devel is switched to 2.20 19 | # https://docs.ansible.com/ansible/devel/roadmap/ROADMAP_2_18.html 20 | default: >- 21 | [ 22 | { 23 | "ansible-version": "devel", 24 | "python-version": "3.10" 25 | }, 26 | { 27 | "ansible-version": "milestone", 28 | "python-version": "3.10" 29 | }, 30 | { 31 | "ansible-version": "stable-2.19", 32 | "python-version": "3.10" 33 | }, 34 | { 35 | "ansible-version": "stable-2.18", 36 | "python-version": "3.10" 37 | }, 38 | { 39 | "ansible-version": "stable-2.17", 40 | "python-version": "3.13" 41 | }, 42 | { 43 | "ansible-version": "stable-2.16", 44 | "python-version": "3.12" 45 | }, 46 | { 47 | "ansible-version": "stable-2.16", 48 | "python-version": "3.13" 49 | } 50 | ] 51 | required: false 52 | type: string 53 | pytest_addopts: 54 | default: "" 55 | required: false 56 | type: string 57 | secrets: 58 | GH_TOKEN: 59 | required: false 60 | 61 | jobs: 62 | integration: 63 | env: 64 | PY_COLORS: "1" 65 | source_directory: "./source" 66 | runs-on: ubuntu-latest 67 | strategy: 68 | fail-fast: false 69 | matrix: 70 | ansible-version: 71 | # ansible-core 2.15 reached EOL on November 2024 72 | # ansible-core 2.16 will reach EOL on May 2025 73 | - stable-2.16 74 | - stable-2.17 75 | - stable-2.18 76 | - stable-2.19 77 | - milestone 78 | - devel 79 | python-version: 80 | # 2.16 supports Python 3.10-3.11 81 | # 2.17 supports Python 3.10-3.12 82 | # 2.18 supports Python 3.11-3.13 83 | - "3.10" 84 | - "3.11" 85 | - "3.12" 86 | - "3.13" 87 | exclude: ${{ fromJSON(inputs.matrix_exclude) }} 88 | 89 | name: "py${{ matrix.python-version }} / ${{ matrix.ansible-version }}" 90 | steps: 91 | - name: Checkout the collection repository 92 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 93 | with: 94 | path: ${{ env.source_directory }} 95 | ref: ${{ github.event.pull_request.head.sha }} 96 | fetch-depth: "0" 97 | 98 | - name: Set up Python ${{ matrix.python-version }} 99 | uses: actions/setup-python@v4 100 | with: 101 | python-version: ${{ matrix.python-version }} 102 | 103 | - name: Install wheel now for faster builds 104 | run: python3 -m pip install wheel --upgrade 105 | 106 | - name: Install ansible-core (${{ matrix.ansible-version }}) 107 | run: python3 -m pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check 108 | 109 | - name: Read collection metadata from galaxy.yml 110 | id: identify 111 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 112 | with: 113 | source_path: ${{ env.source_directory }} 114 | 115 | - name: Build and install the collection 116 | uses: ansible-network/github_actions/.github/actions/build_install_collection@main 117 | with: 118 | install_python_dependencies: true 119 | source_path: ${{ env.source_directory }} 120 | collection_path: ${{ steps.identify.outputs.collection_path }} 121 | tar_file: ${{ steps.identify.outputs.tar_file }} 122 | ansible_version: ${{ matrix.ansible-version }} 123 | 124 | - name: Print the ansible version 125 | run: ansible --version 126 | 127 | - name: Print the python dependencies 128 | run: python3 -m pip list 129 | 130 | - name: Run integration tests 131 | run: >- 132 | python -m pytest tests/integration 133 | ${{ inputs.pytest_addopts }} 134 | env: 135 | ANSIBLE_FORCE_COLOR: "1" 136 | GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} 137 | working-directory: ${{ steps.identify.outputs.collection_path }} 138 | 139 | - name: Upload logs 140 | if: always() 141 | uses: actions/upload-artifact@v3 142 | with: 143 | name: logs 144 | path: /home/runner/test_logs/ 145 | -------------------------------------------------------------------------------- /.github/workflows/release-branch.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | version: 7 | description: The release version to create. 8 | required: true 9 | type: string 10 | secrets: 11 | GH_TOKEN: 12 | description: The Github token to use 13 | required: true 14 | 15 | jobs: 16 | release: 17 | runs-on: ubuntu-latest 18 | permissions: 19 | contents: write 20 | pull-requests: write 21 | 22 | steps: 23 | - name: Checkout the repository 24 | uses: actions/checkout@v3 25 | with: 26 | fetch-depth: "0" 27 | 28 | - name: Validate version format 29 | run: |- 30 | python -c "import os, re, sys; 31 | version=os.environ.get('RELEASE_VERSION'); 32 | print('version <%s> is matching expecting format' % version) if re.match(r'^[0-9]+\.[0-9]+\.[0-9]+$', version) else sys.exit(1)" 33 | shell: bash 34 | env: 35 | RELEASE_VERSION: ${{ inputs.version }} 36 | 37 | - name: Create release branch on Github repository 38 | id: create-branch 39 | run: | 40 | R_BRANCH="stable-$(echo ${RELEASE_VERSION} | cut -d '.' -f1)" 41 | D_BRANCH=$(git remote show origin | sed -n '/HEAD branch/s/.*: //p') 42 | echo "release_branch=$R_BRANCH" >> $GITHUB_OUTPUT 43 | git checkout $D_BRANCH 44 | git checkout -b $R_BRANCH && git push origin $R_BRANCH || git checkout $R_BRANCH 45 | shell: bash 46 | env: 47 | GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} 48 | RELEASE_VERSION: ${{ inputs.version }} 49 | 50 | - name: setup python 51 | uses: actions/setup-python@v4 52 | with: 53 | python-version: "3.12" 54 | 55 | - name: Install required python modules 56 | run: pip3 install tox yq 57 | shell: bash 58 | 59 | - name: Prepare release 60 | run: tox -e prepare_release -vv 61 | shell: bash 62 | env: 63 | RELEASE_VERSION: ${{ inputs.version }} 64 | 65 | - name: Update galaxy.yml file 66 | run: yq -yi ".version = \"$RELEASE_VERSION\"" galaxy.yml 67 | shell: bash 68 | env: 69 | RELEASE_VERSION: ${{ inputs.version }} 70 | 71 | - name: Push changes to branch on Github repository 72 | id: push-changes 73 | run: | 74 | git checkout -b "prepare_release_${RELEASE_VERSION}" 75 | git add -A 76 | git -c user.name="$GIT_USER_NAME" -c user.email="$GIT_USER_EMAIL" commit -m "Release ${{ inputs.version }}" --author="$GIT_AUTHOR" 77 | git push origin "prepare_release_${RELEASE_VERSION}" 78 | echo "created_branch=prepare_release_${RELEASE_VERSION}" >> $GITHUB_OUTPUT 79 | shell: bash 80 | env: 81 | RELEASE_VERSION: ${{ inputs.version }} 82 | GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} 83 | GIT_USER_NAME: "github-actions[bot]" 84 | GIT_USER_EMAIL: "41898282+github-actions[bot]@users.noreply.github.com" 85 | GIT_AUTHOR: "${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>" 86 | 87 | - name: Create Pull Request 88 | uses: ansible-network/github_actions/.github/actions/create_pullrequest@main 89 | with: 90 | token: ${{ secrets.GH_TOKEN }} 91 | repository: ${{ github.repository }} 92 | base_branch: ${{ steps.create-branch.outputs.release_branch }} 93 | head_branch: ${{ steps.push-changes.outputs.created_branch }} 94 | title: "Prepare release ${{ inputs.version }}" 95 | body: "Automatic changes for Release ${{ inputs.version }}" 96 | -------------------------------------------------------------------------------- /.github/workflows/release-tag.yml: -------------------------------------------------------------------------------- 1 | name: tagging 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | branch_name: 7 | description: The branch name to create tag from. 8 | type: string 9 | default: ${{ github.base_ref }} 10 | release_publish: 11 | default: true 12 | description: Publish a new github release. 13 | type: boolean 14 | release_prefix: 15 | default: "Release" 16 | description: Prefix name of the release to create. 17 | type: string 18 | secrets: 19 | # due to https://github.com/ad-m/github-push-action/issues/32 we are not using default GITHUB_TOKEN 20 | gh_token: 21 | required: true 22 | 23 | jobs: 24 | push: 25 | if: ${{ github.event.pull_request.merged == true }} and ${{ contains(github.event.pull_request.labels.*.name, 'ok-to-tag') }} 26 | runs-on: ubuntu-latest 27 | permissions: 28 | contents: write 29 | pull-requests: write 30 | 31 | steps: 32 | - name: Checkout the repository 33 | uses: actions/checkout@v3 34 | with: 35 | ref: ${{ inputs.branch_name }} 36 | token: ${{ secrets.gh_token }} 37 | 38 | - name: setup python 39 | uses: actions/setup-python@v4 40 | with: 41 | python-version: "3.12" 42 | 43 | - name: install python libraries 44 | run: pip3 install yq pygithub 45 | shell: bash 46 | 47 | - name: extract tag name from 'galaxy.yml' 48 | id: read-tag 49 | run: echo "release_tag=$(yq -r '.version' 'galaxy.yml')" >> $GITHUB_OUTPUT 50 | shell: bash 51 | 52 | - name: create and push tag to Github repository 53 | id: push-tag 54 | run: | 55 | git tag ${RELEASE_TAG} 56 | git push origin ${RELEASE_TAG} 57 | # read repository default branch 58 | GIT_DEFAULT_BRANCH=$(git remote show origin | sed -n '/HEAD branch/s/.*: //p') 59 | echo "default_branch=$GIT_DEFAULT_BRANCH" >> $GITHUB_OUTPUT 60 | shell: bash 61 | env: 62 | RELEASE_TAG: ${{ steps.read-tag.outputs.release_tag }} 63 | GITHUB_TOKEN: ${{ secrets.gh_token }} 64 | 65 | - name: Create Pull Request from Release branch to default branch 66 | uses: ansible-network/github_actions/.github/actions/create_pullrequest@main 67 | with: 68 | token: ${{ secrets.gh_token }} 69 | repository: ${{ github.repository }} 70 | base_branch: ${{ steps.push-tag.outputs.default_branch }} 71 | head_branch: ${{ inputs.branch_name }} 72 | title: "Push changes for release '${{ steps.read-tag.outputs.release_tag }}' on '${{ steps.push-tag.outputs.default_branch }}' branch" 73 | body: "Automatic changes for Release ${{ steps.read-tag.outputs.release_tag }} on Repository default branch" 74 | 75 | - name: Parse release content 76 | run: | 77 | curl -o create_github_release.py https://raw.githubusercontent.com/ansible-network/github_actions/main/scripts/create_github_release.py 78 | python3 ./create_github_release.py --repository ${{ github.repository }} --release-tag ${{ steps.read-tag.outputs.release_tag }} --release-name "${{ inputs.release_prefix }} ${{ steps.read-tag.outputs.release_tag }}" 79 | env: 80 | GITHUB_TOKEN: ${{ secrets.gh_token }} 81 | if: ${{ inputs.release_publish }} 82 | -------------------------------------------------------------------------------- /.github/workflows/safe-to-test.yml: -------------------------------------------------------------------------------- 1 | name: safe-to-test 2 | on: 3 | workflow_call: 4 | secrets: 5 | GH_TOKEN: 6 | required: false 7 | 8 | jobs: 9 | confirm: 10 | runs-on: ubuntu-latest 11 | env: 12 | GITHUB_TOKEN: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }} 13 | steps: 14 | - name: Checkout the repository 15 | uses: actions/checkout@v3 16 | 17 | - name: Check if the PR author is a collaborator 18 | id: authorization 19 | run: | 20 | user_role=$(gh api --jq .permission -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" $GH_API_URL) 21 | roles=("write maintain admin") 22 | [[ "${roles[*]} " =~ "${user_role} " ]] && collaborator=true || collaborator=false 23 | echo "collaborator=${collaborator}" >> $GITHUB_OUTPUT 24 | env: 25 | GH_API_URL: "/repos/${{ github.repository }}/collaborators/${{ github.event.pull_request.user.login }}/permission" 26 | 27 | # Add 'safe to test' label for collaborators 28 | - name: Add safe label for User with required roles 29 | run: gh pr edit ${{ github.event.number }} --add-label "safe to test" 30 | if: ${{ steps.authorization.outputs.collaborator == 'true' }} 31 | 32 | # Remove 'safe to test' for non collaborators 33 | - name: Get pull request labels 34 | id: read-label 35 | run: | 36 | SAFE_LABEL=$(gh api --jq '.[] | select(.name == "safe to test") | .name' -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" $GH_API_URL) 37 | echo "safe_label=$SAFE_LABEL" >> $GITHUB_OUTPUT 38 | env: 39 | GH_API_URL: /repos/${{ github.repository }}/issues/${{ github.event.number }}/labels 40 | if: ${{ steps.authorization.outputs.collaborator == 'false' }} 41 | 42 | - name: Remove the 'safe to test', not a collaborator, PR was updated or not just added 43 | run: gh pr edit ${{ github.event.number }} --remove-label "safe to test" 44 | if: >- 45 | steps.authorization.outputs.collaborator == 'false' && 46 | steps.read-label.outputs.safe_label != '' && 47 | github.event.label.name != 'safe to test' && 48 | (github.event.action == 'synchronize' || github.event.action == 'reopened') 49 | 50 | - name: Fail if not now labeled 51 | run: >- 52 | gh api -H "Accept: application/vnd.github.v3+json" $API_URL 53 | --jq .labels | grep 'safe to test' 54 | env: 55 | API_URL: /repos/${{ github.repository }}/issues/${{ github.event.number }} 56 | -------------------------------------------------------------------------------- /.github/workflows/sanity.yml: -------------------------------------------------------------------------------- 1 | name: Sanity tests 2 | on: 3 | workflow_call: 4 | inputs: 5 | collection_pre_install: 6 | required: false 7 | type: string 8 | default: "" 9 | matrix_exclude: 10 | # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix 11 | # 2.15 supports Python 3.9-3.11 12 | # 2.16 supports Python 3.10-3.11 13 | # 2.17 supports Python 3.10-3.12 14 | # 2.18 supports Python 3.11-3.13 15 | # 2.19 supports Python 3.11-3.13 16 | # support for Python 3.13 added and 3.10 removed in 2.18 for control node 17 | # target node supported Python 3.8-3.13 as of 2.18 and 2.19 18 | # milestone is and devel is switched to 2.20 19 | # https://docs.ansible.com/ansible/devel/roadmap/ROADMAP_2_19.html 20 | default: >- 21 | [ 22 | { 23 | "ansible-version": "devel", 24 | "python-version": "3.10" 25 | }, 26 | { 27 | "ansible-version": "milestone", 28 | "python-version": "3.10" 29 | }, 30 | { 31 | "ansible-version": "stable-2.19", 32 | "python-version": "3.10" 33 | }, 34 | { 35 | "ansible-version": "stable-2.18", 36 | "python-version": "3.10" 37 | }, 38 | { 39 | "ansible-version": "stable-2.17", 40 | "python-version": "3.13" 41 | }, 42 | { 43 | "ansible-version": "stable-2.16", 44 | "python-version": "3.12" 45 | }, 46 | { 47 | "ansible-version": "stable-2.16", 48 | "python-version": "3.13" 49 | }, 50 | { 51 | "ansible-version": "stable-2.15", 52 | "python-version": "3.12" 53 | }, 54 | { 55 | "ansible-version": "stable-2.15", 56 | "python-version": "3.13" 57 | } 58 | ] 59 | required: false 60 | type: string 61 | matrix_include: 62 | default: >- 63 | [] 64 | required: false 65 | type: string 66 | unstable: 67 | default: >- 68 | [ 69 | "devel", 70 | ] 71 | required: false 72 | type: string 73 | 74 | jobs: 75 | sanity: 76 | env: 77 | PY_COLORS: "1" 78 | source_directory: "./source" 79 | strategy: 80 | fail-fast: false 81 | matrix: 82 | os: 83 | - ubuntu-latest 84 | ansible-version: 85 | # ansible-core 2.15 reached EOL on November 2024 86 | - stable-2.16 87 | - stable-2.17 88 | - stable-2.18 89 | - stable-2.19 90 | - milestone 91 | - devel 92 | python-version: 93 | # 2.16 supports Python 3.10-3.11 94 | # 2.17 supports Python 3.10-3.12 95 | # 2.18 supports Python 3.11-3.13 96 | # 2.19 supports Python 3.11-3.13 97 | - "3.10" 98 | - "3.11" 99 | - "3.12" 100 | - "3.13" 101 | exclude: ${{ fromJSON(inputs.matrix_exclude) }} 102 | include: ${{ fromJSON(inputs.matrix_include) }} 103 | runs-on: ${{ matrix.os }} 104 | continue-on-error: ${{ contains(fromJSON(inputs.unstable), matrix.ansible-version) }} 105 | 106 | name: "py${{ matrix.python-version }} / ${{ matrix.os }} / ${{ matrix.ansible-version }}" 107 | steps: 108 | - name: Checkout the collection repository 109 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 110 | with: 111 | path: ${{ env.source_directory }} 112 | ref: ${{ github.event.pull_request.head.sha }} 113 | fetch-depth: "0" 114 | 115 | - name: Set up Python ${{ matrix.python-version }} 116 | uses: actions/setup-python@v4 117 | with: 118 | python-version: ${{ matrix.python-version }} 119 | 120 | - name: Install ansible-core (${{ matrix.ansible-version }}) 121 | run: python3 -m pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check 122 | 123 | - name: Pre install collections dependencies first so the collection install does not 124 | run: ansible-galaxy collection install --pre ${{ inputs.collection_pre_install }} -p /home/runner/collections/ 125 | if: ${{ inputs.collection_pre_install != '' && matrix.ansible-version != 'stable-2.9' }} 126 | 127 | - name: Read collection metadata from galaxy.yml 128 | id: identify 129 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 130 | with: 131 | source_path: ${{ env.source_directory }} 132 | 133 | - name: Build and install the collection 134 | uses: ansible-network/github_actions/.github/actions/build_install_collection@main 135 | with: 136 | install_python_dependencies: false 137 | source_path: ${{ env.source_directory }} 138 | collection_path: ${{ steps.identify.outputs.collection_path }} 139 | tar_file: ${{ steps.identify.outputs.tar_file }} 140 | ansible_version: ${{ matrix.ansible-version }} 141 | 142 | - name: Print the ansible version 143 | run: ansible --version 144 | 145 | - name: Print the python dependencies 146 | run: python3 -m pip list 147 | 148 | - name: Run sanity tests 149 | run: ansible-test sanity --requirements --color --python ${{ matrix.python-version }} 150 | working-directory: ${{ steps.identify.outputs.collection_path }} 151 | -------------------------------------------------------------------------------- /.github/workflows/tox-linters.yml: -------------------------------------------------------------------------------- 1 | name: tox-linters 2 | on: 3 | workflow_call: 4 | jobs: 5 | changelog: 6 | runs-on: ubuntu-latest 7 | name: Runs code linting tests 8 | steps: 9 | - name: Code checkout 10 | uses: actions/checkout@v3 11 | 12 | - name: Code linting 13 | uses: ansible-network/github_actions/.github/actions/tox@main 14 | with: 15 | path: "." 16 | tox_envlist: "linters" 17 | -------------------------------------------------------------------------------- /.github/workflows/tox.yml: -------------------------------------------------------------------------------- 1 | name: tox 2 | on: 3 | workflow_call: 4 | inputs: 5 | envname: 6 | description: Tox environment name to use to limit the run 7 | required: false 8 | type: string 9 | default: "" 10 | labelname: 11 | description: Tox label to use to limit the run 12 | required: false 13 | type: string 14 | default: "" 15 | checkout_ref: 16 | description: Repository reference to pass to checkout action 17 | required: false 18 | type: string 19 | default: "" 20 | checkout_fetch_depth: 21 | description: Fetch depth to pass to checkout action 22 | required: false 23 | type: string 24 | default: "0" 25 | jobs: 26 | tox: 27 | runs-on: ubuntu-latest 28 | name: Run Tox based code tests 29 | steps: 30 | - name: Code checkout 31 | if: ${{ ! inputs.checkout_ref }} 32 | uses: actions/checkout@v3 33 | 34 | - name: Code checkout with parameters 35 | if: ${{ inputs.checkout_ref }} 36 | uses: actions/checkout@v3 37 | with: 38 | ref: ${{ inputs.checkout_ref }} 39 | fetch-depth: ${{ inputs.checkout_fetch_depth }} 40 | 41 | - name: Run tests 42 | uses: ansible-network/github_actions/.github/actions/tox@main 43 | with: 44 | path: "." 45 | tox_envlist: ${{ inputs.envname }} 46 | tox_labellist: ${{ inputs.labelname }} 47 | -------------------------------------------------------------------------------- /.github/workflows/unit_galaxy.yml: -------------------------------------------------------------------------------- 1 | name: Unit tests, dependencies from galaxy 2 | on: 3 | workflow_call: 4 | inputs: 5 | collection_pre_install: 6 | required: false 7 | type: string 8 | default: "" 9 | matrix_exclude: 10 | # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix 11 | # 2.15 supports Python 3.9-3.11 12 | # 2.16 supports Python 3.10-3.11 13 | # 2.17 supports Python 3.10-3.12 14 | # 2.18 supports Python 3.11-3.13 15 | # 2.19 supports Python 3.11-3.13 16 | # support for Python 3.13 added and 3.10 removed in 2.18 for control node 17 | # target node supported Python 3.8-3.13 as of 2.18 and 2.19 18 | # milestone is and devel is switched to 2.20 19 | # https://docs.ansible.com/ansible/devel/roadmap/ROADMAP_2_19.html 20 | default: >- 21 | [ 22 | { 23 | "ansible-version": "devel", 24 | "python-version": "3.10" 25 | }, 26 | { 27 | "ansible-version": "milestone", 28 | "python-version": "3.10" 29 | }, 30 | { 31 | "ansible-version": "stable-2.19", 32 | "python-version": "3.10" 33 | }, 34 | { 35 | "ansible-version": "stable-2.18", 36 | "python-version": "3.10" 37 | }, 38 | { 39 | "ansible-version": "stable-2.17", 40 | "python-version": "3.13" 41 | }, 42 | { 43 | "ansible-version": "stable-2.16", 44 | "python-version": "3.12" 45 | }, 46 | { 47 | "ansible-version": "stable-2.16", 48 | "python-version": "3.13" 49 | }, 50 | { 51 | "ansible-version": "stable-2.15", 52 | "python-version": "3.12" 53 | }, 54 | { 55 | "ansible-version": "stable-2.15", 56 | "python-version": "3.13" 57 | } 58 | ] 59 | required: false 60 | type: string 61 | matrix_include: 62 | default: >- 63 | [] 64 | required: false 65 | type: string 66 | 67 | jobs: 68 | unit_galaxy: 69 | env: 70 | PY_COLORS: "1" 71 | source_directory: "./source" 72 | strategy: 73 | fail-fast: false 74 | matrix: 75 | os: 76 | - ubuntu-latest 77 | ansible-version: 78 | # ansible-core 2.15 reached EOL on November 2024 79 | - stable-2.16 80 | - stable-2.17 81 | - stable-2.18 82 | - stable-2.19 83 | - milestone 84 | - devel 85 | python-version: 86 | # 2.16 supports Python 3.10-3.11 87 | # 2.17 supports Python 3.10-3.12 88 | # 2.18 supports Python 3.11-3.13 89 | - "3.10" 90 | - "3.11" 91 | - "3.12" 92 | - "3.13" 93 | exclude: ${{ fromJSON(inputs.matrix_exclude) }} 94 | include: ${{ fromJSON(inputs.matrix_include) }} 95 | runs-on: ${{ matrix.os }} 96 | continue-on-error: ${{ matrix.ansible-version == 'devel' }} 97 | 98 | name: "py${{ matrix.python-version }} / ${{ matrix.os }} / ${{ matrix.ansible-version }}" 99 | steps: 100 | - name: Checkout the collection repository 101 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 102 | with: 103 | path: ${{ env.source_directory }} 104 | ref: ${{ github.event.pull_request.head.sha }} 105 | fetch-depth: "0" 106 | 107 | - name: Set up Python ${{ matrix.python-version }} 108 | uses: actions/setup-python@v4 109 | with: 110 | python-version: ${{ matrix.python-version }} 111 | 112 | # ansible-pylibssh does not have cp312 wheels 113 | # when building from sdist libssh-dev needs to be installed 114 | # extra install step starts 115 | - name: Install build toolchain and openssl headers on Linux 116 | shell: bash 117 | run: sudo apt update && sudo apt install build-essential libssl-dev 118 | if: ${{ matrix.python-version == 3.12 }} 119 | 120 | - name: Install catchsegv and libssh headers on Linux for cythonize+coverage 121 | shell: bash 122 | run: sudo apt update && sudo apt install libssh-dev 123 | if: ${{ matrix.python-version == 3.12 }} 124 | # extra install step ends 125 | 126 | - name: Install ansible-core (${{ matrix.ansible-version }}) 127 | run: python3 -m pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check 128 | 129 | - name: Pre install collections dependencies first so the collection install does not 130 | run: ansible-galaxy collection install --pre ${{ inputs.collection_pre_install }} -p /home/runner/collections 131 | if: ${{ inputs.collection_pre_install != '' && matrix.ansible-version != 'stable-2.9' }} 132 | 133 | - name: Read collection metadata from galaxy.yml 134 | id: identify 135 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 136 | with: 137 | source_path: ${{ env.source_directory }} 138 | 139 | - name: Build and install the collection 140 | uses: ansible-network/github_actions/.github/actions/build_install_collection@main 141 | with: 142 | install_python_dependencies: true 143 | source_path: ${{ env.source_directory }} 144 | collection_path: ${{ steps.identify.outputs.collection_path }} 145 | tar_file: ${{ steps.identify.outputs.tar_file }} 146 | ansible_version: ${{ matrix.ansible-version }} 147 | 148 | - name: Print the ansible version 149 | run: ansible --version 150 | 151 | - name: Print the python dependencies 152 | run: python3 -m pip list 153 | 154 | - name: Run unit tests 155 | run: python -m pytest tests/unit --showlocals 156 | working-directory: ${{ steps.identify.outputs.collection_path }} 157 | -------------------------------------------------------------------------------- /.github/workflows/unit_source.yml: -------------------------------------------------------------------------------- 1 | name: Unit tests, dependencies from source 2 | on: 3 | workflow_call: 4 | inputs: 5 | collection_pre_install: 6 | required: false 7 | type: string 8 | default: "" 9 | matrix_exclude: 10 | # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix 11 | # 2.15 supports Python 3.9-3.11 12 | # 2.16 supports Python 3.10-3.11 13 | # 2.17 supports Python 3.10-3.12 14 | # 2.18 supports Python 3.11-3.13 15 | # 2.19 supports Python 3.11-3.13 16 | # support for Python 3.13 added and 3.10 removed in 2.18 for control node 17 | # target node supported Python 3.8-3.13 as of 2.18 and 2.19 18 | # milestone is and devel is switched to 2.29 19 | # https://docs.ansible.com/ansible/devel/roadmap/ROADMAP_2_18.html 20 | default: >- 21 | [ 22 | { 23 | "ansible-version": "devel", 24 | "python-version": "3.10" 25 | }, 26 | { 27 | "ansible-version": "milestone", 28 | "python-version": "3.10" 29 | }, 30 | { 31 | "ansible-version": "stable-2.19", 32 | "python-version": "3.10" 33 | }, 34 | { 35 | "ansible-version": "stable-2.18", 36 | "python-version": "3.10" 37 | }, 38 | { 39 | "ansible-version": "stable-2.17", 40 | "python-version": "3.13" 41 | }, 42 | { 43 | "ansible-version": "stable-2.16", 44 | "python-version": "3.12" 45 | }, 46 | { 47 | "ansible-version": "stable-2.16", 48 | "python-version": "3.13" 49 | }, 50 | { 51 | "ansible-version": "stable-2.15", 52 | "python-version": "3.12" 53 | }, 54 | { 55 | "ansible-version": "stable-2.15", 56 | "python-version": "3.13" 57 | } 58 | ] 59 | required: false 60 | type: string 61 | 62 | jobs: 63 | unit_source: 64 | env: 65 | PY_COLORS: "1" 66 | source_directory: "./source" 67 | runs-on: ubuntu-latest 68 | strategy: 69 | fail-fast: false 70 | matrix: 71 | ansible-version: 72 | # ansible-core 2.15 reached EOL on November 2024 73 | - stable-2.16 74 | - stable-2.17 75 | - stable-2.18 76 | - stable-2.19 77 | - milestone 78 | - devel 79 | python-version: 80 | # 2.16 supports Python 3.10-3.11 81 | # 2.17 supports Python 3.10-3.12 82 | # 2.18 supports Python 3.11-3.13 83 | - "3.10" 84 | - "3.11" 85 | - "3.12" 86 | - "3.13" 87 | exclude: ${{ fromJSON(inputs.matrix_exclude) }} 88 | continue-on-error: ${{ matrix.ansible-version == 'devel' }} 89 | 90 | name: "py${{ matrix.python-version }} / ${{ matrix.ansible-version }}" 91 | steps: 92 | - name: Checkout the collection repository 93 | uses: ansible-network/github_actions/.github/actions/checkout_dependency@main 94 | with: 95 | path: ${{ env.source_directory }} 96 | ref: ${{ github.event.pull_request.head.sha }} 97 | fetch-depth: "0" 98 | 99 | - name: Set up Python ${{ matrix.python-version }} 100 | uses: actions/setup-python@v4 101 | with: 102 | python-version: ${{ matrix.python-version }} 103 | 104 | # ansible-pylibssh does not have cp312 wheels 105 | # when building from sdist libssh-dev needs to be installed 106 | # extra install step starts 107 | - name: Install build toolchain and openssl headers on Linux 108 | shell: bash 109 | run: sudo apt update && sudo apt install build-essential libssl-dev 110 | if: ${{ matrix.python-version >= 3.12 }} 111 | 112 | - name: Install catchsegv and libssh headers on Linux for cythonize+coverage 113 | shell: bash 114 | run: sudo apt update && sudo apt install libssh-dev 115 | if: ${{ matrix.python-version >= 3.12 }} 116 | # extra install step ends 117 | 118 | - name: Install ansible-core (${{ matrix.ansible-version }}) 119 | run: python3 -m pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible-version }}.tar.gz --disable-pip-version-check 120 | 121 | - name: Pre install collections dependencies first so the collection install does not 122 | run: ansible-galaxy collection install --pre ${{ inputs.collection_pre_install }} -p /home/runner/collections 123 | if: inputs.collection_pre_install != '' 124 | 125 | - name: Read collection metadata from galaxy.yml 126 | id: identify 127 | uses: ansible-network/github_actions/.github/actions/identify_collection@main 128 | with: 129 | source_path: ${{ env.source_directory }} 130 | 131 | - name: Build and install the collection 132 | uses: ansible-network/github_actions/.github/actions/build_install_collection@main 133 | with: 134 | install_python_dependencies: true 135 | source_path: ${{ env.source_directory }} 136 | collection_path: ${{ steps.identify.outputs.collection_path }} 137 | tar_file: ${{ steps.identify.outputs.tar_file }} 138 | ansible_version: ${{ matrix.ansible-version }} 139 | 140 | - name: Print the ansible version 141 | run: ansible --version 142 | 143 | - name: Print the python dependencies 144 | run: python3 -m pip list 145 | 146 | - name: Run unit tests 147 | run: python -m pytest tests/unit --showlocals --ansible-host-pattern localhost 148 | working-directory: ${{ steps.identify.outputs.collection_path }} 149 | -------------------------------------------------------------------------------- /.github/workflows/update_aws_variables.yml: -------------------------------------------------------------------------------- 1 | name: aws-variables 2 | on: 3 | workflow_call: 4 | secrets: 5 | GH_TOKEN: 6 | description: The Github token to use. 7 | required: false 8 | 9 | jobs: 10 | user-agent: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: write 14 | 15 | steps: 16 | - name: Update aws user-agent variable 17 | uses: ansible-network/github_actions/.github/actions/commit_to_pullrequest@main 18 | with: 19 | commit_message: "update aws user-agent variable with version from galaxy.yml" 20 | python_libs: "pyyaml" 21 | python_executable_url: "https://raw.githubusercontent.com/ansible-network/github_actions/main/scripts/update_aws_user_agent.py" 22 | file_pattern: "plugins/*.py" 23 | token: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }} 24 | 25 | boto-constraints: 26 | runs-on: ubuntu-latest 27 | permissions: 28 | contents: write 29 | 30 | steps: 31 | - name: Update collection tests constraints 32 | uses: ansible-network/github_actions/.github/actions/commit_to_pullrequest@main 33 | with: 34 | commit_message: "update botocore and boto3 tests constraints" 35 | python_executable_url: "https://raw.githubusercontent.com/ansible-network/github_actions/main/scripts/update_aws_boto_constraints.py" 36 | token: ${{ secrets.GH_TOKEN || secrets.GITHUB_TOKEN }} 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | repos: 3 | - repo: https://github.com/PyCQA/isort 4 | rev: 5.13.2 5 | hooks: 6 | - id: isort 7 | args: ["--filter-files"] 8 | 9 | - repo: https://github.com/pre-commit/mirrors-prettier 10 | rev: "v4.0.0-alpha.8" 11 | hooks: 12 | - id: prettier 13 | additional_dependencies: 14 | - prettier 15 | - prettier-plugin-toml 16 | 17 | - repo: https://github.com/psf/black 18 | rev: 24.4.2 19 | hooks: 20 | - id: black 21 | 22 | - repo: https://github.com/Lucas-C/pre-commit-hooks 23 | rev: v1.5.5 24 | hooks: 25 | - id: remove-tabs 26 | 27 | - repo: https://github.com/pre-commit/pre-commit-hooks 28 | rev: v4.6.0 29 | hooks: 30 | - id: trailing-whitespace 31 | - id: check-merge-conflict 32 | - id: end-of-file-fixer 33 | - id: no-commit-to-branch 34 | - id: fix-byte-order-marker 35 | - id: debug-statements 36 | 37 | - repo: https://github.com/codespell-project/codespell 38 | rev: v2.3.0 39 | hooks: 40 | - id: codespell 41 | 42 | - repo: https://github.com/pycqa/flake8 43 | rev: 7.1.0 44 | hooks: 45 | - id: flake8 46 | additional_dependencies: 47 | - darglint 48 | - flake8-2020 >= 1.6.0 49 | - flake8-docstrings # uses pydocstyle 50 | - flake8-isort >= 4.1.1 51 | 52 | - repo: https://github.com/asottile/pyupgrade 53 | # keep it after flake8 54 | rev: v3.16.0 55 | hooks: 56 | - id: pyupgrade 57 | args: ["--py39-plus"] 58 | 59 | - repo: https://github.com/pre-commit/mirrors-mypy 60 | rev: v1.10.0 61 | hooks: 62 | - id: mypy 63 | additional_dependencies: 64 | - types-PyYAML 65 | - pygithub 66 | - pytest 67 | - types-requests 68 | 69 | - repo: https://github.com/pycqa/pylint 70 | rev: v3.2.3 71 | hooks: 72 | - id: pylint 73 | additional_dependencies: 74 | - PyYAML 75 | - pygithub 76 | - pytest 77 | - semver 78 | 79 | - repo: local 80 | hooks: 81 | - id: pytest-check 82 | name: pytest-check 83 | entry: pytest .github scripts -vvvv 84 | types: [python] 85 | language: python 86 | pass_filenames: false 87 | always_run: true 88 | additional_dependencies: 89 | - pytest 90 | - pygithub 91 | - pyyaml 92 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.formatting.provider": "black", 3 | "editor.formatOnSave": true, 4 | "python.linting.pylintEnabled": true, 5 | "python.linting.flake8Enabled": true, 6 | "python.linting.mypyEnabled": true, 7 | "isort.check": true, 8 | "[python]": { 9 | "editor.codeActionsOnSave": { 10 | "source.organizeImports": "explicit" 11 | } 12 | }, 13 | "prettier.enable": true 14 | } 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # github_actions 2 | 3 | A repository for github actions 4 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | files = 3 | docs/, 4 | share/, 5 | src/, 6 | tests/ 7 | install_types = true 8 | namespace_packages = true 9 | no_implicit_optional = true 10 | non_interactive = true 11 | pretty = true 12 | show_column_numbers = true 13 | show_error_codes = true 14 | show_error_context = true 15 | strict = true 16 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool] 2 | 3 | [tool.black] 4 | line-length = 100 5 | 6 | [tool.isort] 7 | force_single_line = true # Force from .. import to be 1 per line, minimizing changes at time of implementation 8 | lines_after_imports = 2 # Ensures consistency for cases when there's variable vs function/class definitions after imports 9 | lines_between_types = 1 # Separate import/from with 1 line, minimizing changes at time of implementation 10 | no_lines_before = "LOCALFOLDER" # Keeps local imports bundled with first-party 11 | profile = "black" # Avoid conflict with black 12 | 13 | [tool.pylint] 14 | 15 | [tool.pylint.format] 16 | max-line-length = 100 17 | 18 | [tool.pylint.master] 19 | no-docstring-rgx = "__.*__" 20 | 21 | [tool.pylint.messages_control] 22 | disable = ["fixme"] 23 | enable = [ 24 | "useless-suppression", # Identify unneeded pylint disable statements 25 | 26 | ] 27 | -------------------------------------------------------------------------------- /scripts/create_github_release.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to read release content from CHANGELOG.rst file.""" 3 | 4 | import logging 5 | import os 6 | import re 7 | 8 | from argparse import ArgumentParser 9 | from pathlib import PosixPath 10 | 11 | from github import Github 12 | 13 | 14 | FORMAT = "[%(asctime)s] - %(message)s" 15 | logging.basicConfig(format=FORMAT) 16 | logger = logging.getLogger(__file__) 17 | logger.setLevel(logging.DEBUG) 18 | 19 | 20 | def create_git_release( 21 | repository: str, release_name: str, release_tag: str, release_content: str 22 | ) -> None: 23 | """Create github release on Repository. 24 | 25 | :param repository: Github repository name. 26 | :param release_tag: Release tag. 27 | :param release_content: Release description. 28 | :param release_name: The name of the release to create. 29 | """ 30 | access_token = os.environ.get("GITHUB_TOKEN") 31 | 32 | gh_client = Github(access_token) 33 | gh_repository = gh_client.get_repo(repository) 34 | gh_repository.create_git_release(release_tag, release_name, release_content) 35 | 36 | 37 | def parse_release_content(release_version: str) -> str: 38 | """Parse release content from CHANGELOG.rst. 39 | 40 | :param release_version: Release version to parse content. 41 | :returns: The release content found from CHANGELOG.rst 42 | """ 43 | if not PosixPath("CHANGELOG.rst").exists(): 44 | logger.error("CHANGELOG.rst does not exist.") 45 | return "..." 46 | 47 | release_content = "..." 48 | with PosixPath("CHANGELOG.rst").open(encoding="utf-8") as file_write: 49 | data = file_write.read().splitlines() 50 | idx = 0 51 | start, end = -1, 0 52 | while idx < len(data): 53 | if data[idx].startswith(f"v{release_version}") and data[idx + 1] == "======": 54 | start = idx + 2 55 | idx += 2 56 | elif ( 57 | start > 0 58 | and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+$", data[idx]) 59 | and data[idx + 1] == "======" 60 | ): 61 | end = idx 62 | break 63 | idx += 1 64 | if start != -1: 65 | release_content = "\n".join(data[start:]) if not end else "\n".join(data[start:end]) 66 | return release_content 67 | 68 | 69 | def main() -> None: 70 | """Read release content from CHANGELOG.rst for a specific version.""" 71 | parser = ArgumentParser( 72 | description="Read release content from CHANGELOG.rst for a specific version." 73 | ) 74 | parser.add_argument("--repository", required=True, help="Repository name.") 75 | parser.add_argument("--release-tag", required=True, help="Release tag.") 76 | parser.add_argument("--release-name", required=True, help="Name of the release to create.") 77 | 78 | args = parser.parse_args() 79 | 80 | release_content = parse_release_content(args.release_tag) 81 | create_git_release(args.repository, args.release_name, args.release_tag, release_content) 82 | 83 | 84 | if __name__ == "__main__": 85 | main() 86 | -------------------------------------------------------------------------------- /scripts/update_aws_boto_constraints.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to update boto* library constraints aws hard-coded variable.""" 3 | 4 | import logging 5 | import os 6 | import re 7 | 8 | from functools import partial 9 | from pathlib import PosixPath 10 | 11 | 12 | FORMAT = "[%(asctime)s] - %(message)s" 13 | logging.basicConfig(format=FORMAT) 14 | logger = logging.getLogger("update_aws_user_agent") 15 | logger.setLevel(logging.DEBUG) 16 | 17 | MIN_BOTOCORE_RE = re.compile(r"MINIMUM_BOTOCORE_VERSION( *)=( *)[\"|'][0-9\.]+[\"|']") 18 | MIN_BOTO3_RE = re.compile(r"MINIMUM_BOTO3_VERSION( *)=( *)[\"|'][0-9\.]+[\"|']") 19 | 20 | 21 | def replace_vars(values: dict[str, str], line: str) -> str: 22 | """Replace a variable from a string. 23 | 24 | :param values: A dictionary of values to search into string. 25 | :param line: The string to replace values in. 26 | :returns: The updated string. 27 | """ 28 | res = None 29 | for var, value in values.items(): 30 | match = re.match(rf"^{var}([ =\"']*)[0-9\.]+(.*)", line) 31 | if match: 32 | res = var + match.group(1) + value + match.group(2) 33 | break 34 | return line if res is None else res 35 | 36 | 37 | def update_single_file(path: str, values: dict[str, str]) -> None: 38 | """Update requirement file with boto3 and botocore constraints. 39 | 40 | :param path: The path to the file to update. 41 | :param values: dictionary of boto3 and botocore constraints 42 | """ 43 | with open(path, encoding="utf-8") as file_read: 44 | content = file_read.read().split("\n") 45 | new_content = list(map(partial(replace_vars, values), content)) 46 | if new_content != content: 47 | with open(path, "w", encoding="utf-8") as file_write: 48 | file_write.write("\n".join(new_content)) 49 | logger.info("%s => updated", path) 50 | 51 | 52 | def update_tests_constraints(boto3_version: str, botocore_version: str) -> None: 53 | """Update boto3 and botocore constraints from requirement file. 54 | 55 | :param boto3_version: The boto3 version to define. 56 | :param botocore_version: The boto core version to define. 57 | """ 58 | boto_values = {"boto3": boto3_version, "botocore": botocore_version} 59 | for file in ("tests/unit/constraints.txt", "tests/integration/constraints.txt"): 60 | if PosixPath(file).exists(): 61 | update_single_file(file, boto_values) 62 | 63 | min_boto_values = { 64 | "MINIMUM_BOTO3_VERSION": boto3_version, 65 | "MINIMUM_BOTOCORE_VERSION": botocore_version, 66 | } 67 | for root, _, files in os.walk("plugins"): 68 | for name in files: 69 | if not name.endswith(".py"): 70 | continue 71 | update_single_file(os.path.join(root, name), min_boto_values) 72 | 73 | 74 | def read_boto_version() -> tuple[str, str]: 75 | """Read boto version constraints from requirement file. 76 | 77 | :returns: Tuple of boto3 and botocore version constraints 78 | """ 79 | botocore_regex = re.compile(r"^botocore[>=<]+([0-9\.]+)", re.MULTILINE | re.IGNORECASE) 80 | boto3_regex = re.compile(r"^boto3[>=<]+([0-9\.]+)", re.MULTILINE | re.IGNORECASE) 81 | 82 | with PosixPath("requirements.txt").open(encoding="utf-8") as file_desc: 83 | content = file_desc.read() 84 | m_boto3 = boto3_regex.search(content) 85 | m_botocore = botocore_regex.search(content) 86 | return m_boto3.group(1) if m_boto3 else "", m_botocore.group(1) if m_botocore else "" 87 | 88 | 89 | def main() -> None: 90 | """Read boto constraints and update variables accordingly.""" 91 | boto3_version, botocore_version = read_boto_version() 92 | logger.info("boto3='%s' - botocore='%s'", boto3_version, botocore_version) 93 | update_tests_constraints(boto3_version, botocore_version) 94 | 95 | 96 | if __name__ == "__main__": 97 | main() 98 | -------------------------------------------------------------------------------- /scripts/update_aws_user_agent.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to update aws hard-coded user agent variable with value from galaxy.yml.""" 3 | 4 | import logging 5 | import re 6 | 7 | from pathlib import PosixPath 8 | 9 | import yaml 10 | 11 | 12 | FORMAT = "[%(asctime)s] - %(message)s" 13 | logging.basicConfig(format=FORMAT) 14 | logger = logging.getLogger("update_aws_user_agent") 15 | logger.setLevel(logging.DEBUG) 16 | 17 | 18 | def update_user_agent(src: PosixPath, var_name: str, galaxy_version: str) -> bool: 19 | """Update aws user agent variable from file passed in input. 20 | 21 | :param src: The path to the file to search variable in. 22 | :param var_name: The name of the variable to update 23 | :param galaxy_version: The collection version stored into galaxy.yml 24 | :returns: Whether the variable has been updated 25 | """ 26 | variable_regex = rf"^{var_name} = [\"|'](.*)[\"|']" 27 | new_content = [] 28 | updated = False 29 | logger.info("********** Parsing file => %s *************", src) 30 | with src.open() as file_handler: 31 | for line in file_handler.read().split("\n"): 32 | match = re.match(variable_regex, line) 33 | if match and match.group(1) != galaxy_version: 34 | logger.info("Match variable [%s] with value [%s]", var_name, match.group(1)) 35 | updated = True 36 | new_content.append(f'{var_name} = "{galaxy_version}"') 37 | else: 38 | new_content.append(line) 39 | 40 | if updated: 41 | src.write_text("\n".join(new_content)) 42 | return updated 43 | 44 | 45 | def update_collection_user_agent(var_name: str, galaxy_version: str) -> bool: 46 | """Update aws variable name with value provided as input. 47 | 48 | :param var_name: The name of the variable to update 49 | :param galaxy_version: The collection version stored into galaxy.yml 50 | :returns: Whether the variable has been updated somewhere 51 | """ 52 | 53 | def _get_files_from_directory(path: PosixPath) -> list[PosixPath]: 54 | if not path.is_dir(): 55 | return [path] 56 | result = [] 57 | for child in path.iterdir(): 58 | result.extend(_get_files_from_directory(child)) 59 | return result 60 | 61 | return any( 62 | update_user_agent(src, var_name, galaxy_version) 63 | for src in _get_files_from_directory(PosixPath("plugins")) 64 | if str(src).endswith(".py") 65 | ) 66 | 67 | 68 | def main() -> None: 69 | """Read collection info and update aws user agent if needed.""" 70 | # Read collection information from galaxy.yml 71 | collection_info = {} 72 | with PosixPath("galaxy.yml").open(encoding="utf-8") as file_desc: 73 | collection_info = yaml.safe_load(file_desc) 74 | logger.info("collection information from galaxy.yml: %s", collection_info) 75 | variable_name = ( 76 | collection_info["namespace"].upper() 77 | + "_" 78 | + collection_info["name"].upper() 79 | + "_COLLECTION_VERSION" 80 | ) 81 | logger.info("Expecting collection user-agent variable => '%s'", variable_name) 82 | 83 | galaxy_version = collection_info["version"] 84 | update_collection_user_agent(variable_name, galaxy_version) 85 | 86 | 87 | if __name__ == "__main__": 88 | main() 89 | --------------------------------------------------------------------------------