├── test ├── utils │ ├── dummy_project │ │ ├── NotCppFile.js │ │ ├── build │ │ │ └── FileInBuildDir.hpp │ │ ├── exclude_dir_1 │ │ │ └── ExcludedFile1.hpp │ │ ├── exclude_dir_2 │ │ │ └── ExcludedFile2.hpp │ │ ├── DummyFile.hpp │ │ ├── DummyFile.cpp │ │ └── dummy.py │ └── helper_functions.py ├── test_utils.py ├── test_static_analysis_python.py └── test_static_analysis_cpp.py ├── .gitignore ├── Dockerfile ├── .pylintrc ├── .github └── workflows │ ├── shellcheck.yml │ ├── unit_tests.yml │ ├── linter.yml │ └── test_action.yml ├── LICENSE ├── src ├── patch_compile_commands.py ├── get_files_to_check.py ├── static_analysis_python.py ├── static_analysis_cpp.py └── sa_utils.py ├── docker └── static_analysis.dockerfile ├── entrypoint_python.sh ├── action.yml ├── entrypoint.sh ├── entrypoint_cpp.sh ├── llvm.sh └── README.md /test/utils/dummy_project/NotCppFile.js: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | *__pycache__/ 3 | -------------------------------------------------------------------------------- /test/utils/dummy_project/build/FileInBuildDir.hpp: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/utils/dummy_project/exclude_dir_1/ExcludedFile1.hpp: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/utils/dummy_project/exclude_dir_2/ExcludedFile2.hpp: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/utils/dummy_project/DummyFile.hpp: -------------------------------------------------------------------------------- 1 | inline void func() { 2 | int anotherUnused; 3 | } -------------------------------------------------------------------------------- /test/utils/dummy_project/DummyFile.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | int SomeFunction(int){ 4 | return 1; 5 | } 6 | 7 | int main(int, char**){ 8 | int some_variable = 4; 9 | } 10 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jdomagala/static_analysis:latest 2 | 3 | WORKDIR /src 4 | 5 | COPY src/*.py ./ 6 | 7 | COPY *.sh ./ 8 | RUN chmod +x *.sh 9 | 10 | 11 | ENTRYPOINT ["/src/entrypoint.sh"] 12 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | ignore=__init__.py 3 | 4 | [MESSAGES CONTROL] 5 | disable=import-error, 6 | missing-module-docstring, 7 | missing-function-docstring, 8 | global-statement, 9 | wrong-import-position, 10 | duplicate-code 11 | 12 | [FORMAT] 13 | max-line-length=120 14 | 15 | [DESIGN] 16 | max-locals=25 17 | min-similarity-lines=10 18 | -------------------------------------------------------------------------------- /.github/workflows/shellcheck.yml: -------------------------------------------------------------------------------- 1 | name: "Shellcheck" 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | shellcheck: 11 | name: Shellcheck 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | - name: Run ShellCheck 16 | uses: ludeeus/action-shellcheck@master 17 | with: 18 | ignore_names: llvm.sh # External file 19 | -------------------------------------------------------------------------------- /.github/workflows/unit_tests.yml: -------------------------------------------------------------------------------- 1 | name: Unit Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | check: 11 | name: Run Unit Tests 12 | runs-on: ubuntu-24.04 13 | steps: 14 | - uses: actions/checkout@v3 15 | - name: Set up Python 16 | uses: actions/setup-python@v4 17 | with: 18 | python-version: 3.13.0 19 | - name: Install dependencies 20 | run: | 21 | python -m pip install --upgrade pip 22 | pip install pytest PyGithub 23 | - name: Test with pytest 24 | run: | 25 | pytest 26 | -------------------------------------------------------------------------------- /test/utils/dummy_project/dummy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import string 4 | 5 | shift = 3 6 | choice = input("would you like to encode or decode?") 7 | word = input("Please enter text") 8 | letters = string.ascii_letters + string.punctuation + string.digits 9 | encoded = "" 10 | if choice == "encode": 11 | for letter in word: 12 | if letter == " ": 13 | encoded = encoded + " " 14 | else: 15 | x = letters.index(letter) + shift 16 | encoded = encoded + letters[x] 17 | if choice == "decode": 18 | for letter in word: 19 | if letter == " ": 20 | encoded = encoded + " " 21 | else: 22 | x = letters.index(letter) - shift 23 | encoded = encoded + letters[x] 24 | 25 | print(encoded) 26 | -------------------------------------------------------------------------------- /test/test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import sys 4 | 5 | try: 6 | PROJECT_PATH = f"{os.sep}".join(os.path.abspath(__file__).split(os.sep)[:-2]) 7 | sys.path.append(PROJECT_PATH) 8 | except Exception as exception: 9 | print(f"Can not add project path to system path! Exiting!\nERROR: {exception}") 10 | raise SystemExit(1) from exception 11 | 12 | from src import sa_utils 13 | 14 | 15 | class TestUtils(unittest.TestCase): 16 | """Unit tests for utils_sa module""" 17 | 18 | maxDiff = None 19 | 20 | def test_get_lines_changed_from_patch(self): 21 | patch = "@@ -43,6 +48,8 @@\n@@ -0,0 +1 @@" 22 | 23 | lines = sa_utils.get_lines_changed_from_patch(patch) 24 | self.assertEqual(lines, [(48, 56), (1, 1)]) 25 | 26 | 27 | if __name__ == "__main__": 28 | unittest.main() 29 | -------------------------------------------------------------------------------- /test/utils/helper_functions.py: -------------------------------------------------------------------------------- 1 | def generate_comment(comment_title, content, issues_found, tool_name): 2 | if issues_found == 0: 3 | return ( 4 | '##

:white_check_mark:' 5 | f"{comment_title} - no issues found! :white_check_mark:

" 6 | ) 7 | 8 | expected_comment_body = ( 9 | f'##

:zap: {comment_title} :zap:

\n\n' 10 | ) 11 | if tool_name == "clang-tidy": 12 | expected_comment_body += "\n\n *** \n" 13 | 14 | expected_comment_body += ( 15 | f"
:red_circle: {tool_name} found " 16 | f"{issues_found} {'issues' if issues_found > 1 else 'issue'}!" 17 | " Click here to see details.
" 18 | f"{content}
" 19 | ) 20 | 21 | if tool_name == "cppcheck": 22 | expected_comment_body += "\n\n *** \n" 23 | else: 24 | expected_comment_body += "
\n" 25 | 26 | return expected_comment_body 27 | -------------------------------------------------------------------------------- /.github/workflows/linter.yml: -------------------------------------------------------------------------------- 1 | name: Linter 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | check: 11 | name: Run Linter 12 | runs-on: ubuntu-24.04 13 | steps: 14 | - uses: actions/checkout@v3 15 | 16 | - name: CodeQuality 17 | uses: JacobDomagala/StaticAnalysis@master 18 | with: 19 | language: "Python" 20 | pylint_args: "--rcfile=.pylintrc --recursive=true" 21 | python_dirs: "src test" 22 | exclude_dir: "test/utils/" 23 | 24 | - name: PyLint 25 | uses: ricardochaves/python-lint@v1.4.0 26 | with: 27 | python-root-list: "src test" 28 | use-pylint: false 29 | use-pycodestyle: true 30 | use-flake8: true 31 | use-black: true 32 | use-mypy: true 33 | use-isort: false 34 | extra-mypy-options: "--ignore-missing-imports --show-error-codes" 35 | extra-flake8-options: "--max-line-length=120 --ignore=E203,E402,W503" 36 | extra-pycodestyle-options: "--max-line-length=120 --ignore=E203,E402,W503" 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2021 GitHub, Inc. and contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/patch_compile_commands.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import json 3 | import os 4 | import sys 5 | 6 | from typing import Any, Dict, List, Sequence 7 | 8 | NEW_PREFIX = "/github/workspace" 9 | 10 | 11 | def _collect_paths(entries: Sequence[Dict[str, Any]]) -> List[str]: 12 | return [ 13 | os.path.realpath(v) 14 | for e in entries 15 | for v in (e.get("directory"), e.get("file")) 16 | if isinstance(v, str) 17 | ] 18 | 19 | 20 | def patch_compile_commands(path: str) -> None: 21 | with open(path, "r", encoding="utf-8") as f: 22 | data: List[Dict[str, Any]] = json.load(f) 23 | 24 | old_prefix = os.path.commonpath(_collect_paths(data)) 25 | print(f"[INFO] Patching compile_commands.json: '{old_prefix}' → '{NEW_PREFIX}'") 26 | for entry in data: 27 | for key in ("file", "directory", "command"): 28 | val = entry.get(key) 29 | if isinstance(val, str) and old_prefix in val: 30 | entry[key] = val.replace(old_prefix, NEW_PREFIX) 31 | 32 | with open(path, "w", encoding="utf-8") as f: 33 | json.dump(data, f, indent=2) 34 | 35 | 36 | if __name__ == "__main__": 37 | if len(sys.argv) != 2: 38 | sys.exit("usage: patch_compile_commands.py ") 39 | patch_compile_commands(sys.argv[1]) 40 | -------------------------------------------------------------------------------- /docker/static_analysis.dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:24.04 AS base 2 | 3 | # Define versions as environment variables 4 | ENV CLANG_VERSION=20 \ 5 | CPPCHECK_VERSION=2.16.0 \ 6 | CXX=clang++ \ 7 | CC=clang \ 8 | DEBIAN_FRONTEND=noninteractive 9 | 10 | # Copy the llvm.sh installation script 11 | COPY llvm.sh /llvm.sh 12 | 13 | # Install dependencies 14 | RUN apt-get update && apt-get install -y \ 15 | build-essential python3 python3-pip git wget libssl-dev ninja-build \ 16 | lsb-release software-properties-common gnupg \ 17 | # Execute the LLVM install script with the version number 18 | && chmod +x /llvm.sh && /llvm.sh $CLANG_VERSION \ 19 | && apt-get clean \ 20 | && rm -rf /var/lib/apt/lists/* \ 21 | # Install Python packages 22 | && pip3 install --break-system-packages PyGithub pylint \ 23 | # Create symlinks for clang and clang++ 24 | && ln -s "$(which clang++-$CLANG_VERSION)" /usr/bin/clang++ \ 25 | && ln -s "$(which clang-$CLANG_VERSION)" /usr/bin/clang \ 26 | && ln -s /usr/bin/python3 /usr/bin/python 27 | 28 | WORKDIR /opt 29 | 30 | # Build CMake from source 31 | RUN git clone https://github.com/Kitware/CMake.git \ 32 | && cd CMake \ 33 | && ./bootstrap && make -j$(nproc) && make install 34 | 35 | # Install cppcheck 36 | RUN git clone https://github.com/danmar/cppcheck.git \ 37 | && cd cppcheck \ 38 | && git checkout tags/$CPPCHECK_VERSION \ 39 | && mkdir build && cd build \ 40 | && cmake -G Ninja .. && ninja all && ninja install 41 | -------------------------------------------------------------------------------- /entrypoint_python.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # shellcheck disable=SC2155 3 | 4 | set -e 5 | 6 | # Following variables are declared/defined in parent script 7 | preselected_files=${preselected_files:-""} 8 | print_to_console=${print_to_console:-false} 9 | use_extra_directory=${use_extra_directory:-false} 10 | common_ancestor=${common_ancestor:-""} 11 | 12 | if [ -z "$INPUT_PYTHON_DIRS" ]; then 13 | debug_print "Error: python_dirs action input is empty! You have to provide directories that contain python files to be checked." 14 | exit 1 15 | fi 16 | 17 | if [ -z "$INPUT_EXCLUDE_DIR" ]; then 18 | debug_print "Running: files_to_check=\$(python3 /src/get_files_to_check.py -dir=\"$GITHUB_WORKSPACE\" -preselected=\"$preselected_files\" -lang=\"python\")" 19 | files_to_check=$(python3 /src/get_files_to_check.py -dir="$GITHUB_WORKSPACE" -preselected="$preselected_files" -lang="python") 20 | else 21 | debug_print "Running: files_to_check=\$(python3 /src/get_files_to_check.py -exclude=\"$GITHUB_WORKSPACE/$INPUT_EXCLUDE_DIR\" -dir=\"$GITHUB_WORKSPACE\" -preselected=\"$preselected_files\" -lang=\"python\")" 22 | files_to_check=$(python3 /src/get_files_to_check.py -exclude="$GITHUB_WORKSPACE/$INPUT_EXCLUDE_DIR" -dir="$GITHUB_WORKSPACE" -preselected="$preselected_files" -lang="python") 23 | fi 24 | 25 | debug_print "Files to check = $files_to_check" 26 | 27 | if [ -z "$files_to_check" ]; then 28 | echo "No files to check" 29 | else 30 | # Trim newlines 31 | INPUT_PYLINT_ARGS="${INPUT_PYLINT_ARGS%"${INPUT_PYLINT_ARGS##*[![:space:]]}"}" 32 | eval "pylint $files_to_check --output-format=json:pylint_out.json $INPUT_PYLINT_ARGS || true" 33 | 34 | cd / 35 | python3 -m src.static_analysis_python -pl "$GITHUB_WORKSPACE/pylint_out.json" -o "$print_to_console" -fk "$use_extra_directory" --common "$common_ancestor" --head "origin/$GITHUB_HEAD_REF" 36 | fi 37 | -------------------------------------------------------------------------------- /.github/workflows/test_action.yml: -------------------------------------------------------------------------------- 1 | name: Test Action 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | check: 11 | name: Test Action 12 | runs-on: ubuntu-24.04 13 | steps: 14 | - uses: actions/checkout@v3 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: 3.13.0 20 | 21 | - name: Test the action 22 | shell: bash 23 | run: | 24 | git config --global user.email ${{ secrets.USER_EMAIL }} 25 | git config --global user.name ${{ secrets.USER_NAME }} 26 | 27 | if [ ${GITHUB_EVENT_NAME} = 'push' ] 28 | then 29 | BRANCH_NAME=${GITHUB_REF_NAME} 30 | else 31 | BRANCH_NAME=${GITHUB_HEAD_REF} 32 | fi 33 | 34 | git clone "https://${{secrets.TOKEN}}@github.com/JacobDomagala/TestRepo.git" 35 | cd TestRepo 36 | python ./switch_sa_branch.py -br=$BRANCH_NAME 37 | git diff --quiet && git diff --staged --quiet || git commit -am"Update branch name: ($BRANCH_NAME)" 38 | git push 39 | 40 | # test PR (CMake) 41 | git checkout test-static-analysis 42 | git commit -as --amend --no-edit 43 | git push -f 44 | 45 | # test PR (non CMake) 46 | git checkout test-sa-without-cmake 47 | git commit -as --amend --no-edit 48 | git push -f 49 | 50 | # test pull_request_target 51 | git clone "https://${{secrets.TOKEN}}@github.com/JacobDTest/TestRepo.git" 52 | cd TestRepo 53 | git checkout test-branch-fork 54 | git commit -as --amend --no-edit 55 | git push -f 56 | 57 | - name: Output results (PR) 58 | if: github.event_name == 'pull_request' 59 | uses: mshick/add-pr-comment@v2 60 | with: 61 | message: | 62 | ## Test Action results 63 | *** 64 | ### [Result for push](https://github.com/JacobDomagala/TestRepo/actions/workflows/test.yml?query=branch%3Amain) 65 | 66 | *** 67 | ### [Result for pull_request (CMake)](https://github.com/JacobDomagala/TestRepo/pull/3#issuecomment-1404081176) 68 | 69 | *** 70 | ### [Result for pull_request (non CMake)](https://github.com/JacobDomagala/TestRepo/pull/3#issuecomment-1404102205) 71 | 72 | *** 73 | ### [Result for pull_request_target (CMake)](https://github.com/JacobDomagala/TestRepo/pull/7#issuecomment-1404081052) 74 | 75 | *** 76 | ### [Result for pull_request_target (non CMake)](https://github.com/JacobDomagala/TestRepo/pull/7#issuecomment-1404101648) 77 | -------------------------------------------------------------------------------- /src/get_files_to_check.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from pathlib import Path 3 | 4 | 5 | def get_files_to_check(directory_in, excludes_in, preselected_files, lang): 6 | """ 7 | Given a directory path and a string of prefixes to exclude, 8 | return a space-separated string of all files in the directory (and its subdirectories) 9 | that have a supported extension and do not start with any of the excluded prefixes. 10 | 11 | Args: 12 | directory_in (str): The path to the directory to search for files. 13 | excludes_in (str): A space-separated string of prefixes to exclude from the search. 14 | preselected_files (str): If present, then it's the list of files to be checked (minus excluded ones) 15 | lang (str): Programming language 16 | 17 | Returns: 18 | str: A space-separated string of file paths that meet the search criteria. 19 | """ 20 | 21 | exclude_prefixes = [f"{directory_in}/build"] 22 | 23 | if excludes_in is not None: 24 | excludes_list = excludes_in.split(" ") 25 | for exclude in excludes_list: 26 | exclude_prefixes.append(str(exclude)) 27 | 28 | if lang == "c++": 29 | supported_extensions = (".h", ".hpp", ".hcc", ".c", ".cc", ".cpp", ".cxx") 30 | elif lang == "python": 31 | supported_extensions = ".py" 32 | else: 33 | raise RuntimeError(f"Unknown language {lang}") 34 | 35 | all_files = [] 36 | 37 | if len(preselected_files) == 0: 38 | for path in Path(directory_in).rglob("*.*"): 39 | path_ = str(path.resolve()) 40 | if path_.endswith(supported_extensions) and not path_.startswith( 41 | tuple(exclude_prefixes) 42 | ): 43 | all_files.append(path_) 44 | else: 45 | for file in preselected_files: 46 | if not file.startswith(directory_in): 47 | file = f"{directory_in}/{file}" 48 | if not file.startswith(tuple(exclude_prefixes)): 49 | all_files.append(file) 50 | 51 | return " ".join(all_files) 52 | 53 | 54 | if __name__ == "__main__": 55 | parser = argparse.ArgumentParser() 56 | parser.add_argument("-exclude", help="Exclude prefix", required=False) 57 | parser.add_argument( 58 | "-preselected", help="Preselected files", default="", required=False 59 | ) 60 | parser.add_argument("-dir", help="Source directory", required=True) 61 | parser.add_argument("-lang", help="Programming language", required=True) 62 | 63 | directory = parser.parse_args().dir 64 | preselected = parser.parse_args().preselected 65 | excludes = parser.parse_args().exclude 66 | language = parser.parse_args().lang 67 | 68 | print(get_files_to_check(directory, excludes, preselected.split(), language)) 69 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: "Static analysis for C++/Python project" 2 | description: "Static analysis with cppcheck & clang-tidy for C++, pylint for Python. Posts results to PRs or console." 3 | 4 | inputs: 5 | github_token: 6 | description: 'Github token used for Github API requests' 7 | default: ${{ github.token }} 8 | pr_num: 9 | description: 'Pull request number for which the comment will be created' 10 | default: ${{ github.event.pull_request.number }} 11 | repo: 12 | description: 'Repository name' 13 | default: ${{ github.repository }} 14 | pr_repo: 15 | description: 'Head repository (This is useful when using Action with [pull_request_target])' 16 | default: ${{ github.event.pull_request.head.repo.full_name }} 17 | pr_head: 18 | description: 'Head (branch) for PR (same as `pr_repo` input, useful with [pull_request_target]' 19 | default: ${{ github.event.pull_request.head.ref }} 20 | comment_title: 21 | description: 'Title for comment with the raport. This should be an unique name' 22 | default: Static analysis result 23 | exclude_dir: 24 | description: 'Directories (space separated) which should be excluded from the raport' 25 | apt_pckgs: 26 | description: 'Additional (space separated) packages that need to be installed in order for project to compile' 27 | compile_commands: 28 | description: 'User generated compile_commands.json' 29 | compile_commands_replace_prefix: 30 | description: 'Whether we should replace the prefix of files inside user generated compile_commands.json file' 31 | default: false 32 | init_script: 33 | description: | 34 | 'Optional shell script that will be run before configuring project (i.e. running CMake command).' 35 | 'This should be used, when the project requires some environmental set-up beforehand' 36 | 'Script will be run with 2 arguments: `root_dir`(root directory of user's code) and `build_dir`(build directory created for running SA)' 37 | 'Note. `apt_pckgs` will run before this script, just in case you need some packages installed' 38 | 'Also this script will be run in the root of the project (`root_dir`)' 39 | cppcheck_args: 40 | description: 'cppcheck (space separated) arguments that will be used' 41 | default: --enable=all --suppress=missingIncludeSystem --inline-suppr --inconclusive 42 | clang_tidy_args: 43 | description: 'clang-tidy arguments that will be used (example: -checks="*,fuchsia-*,google-*,zircon-*"' 44 | report_pr_changes_only: 45 | description: 'Only post the issues found within the changes introduced in this Pull Request' 46 | default: false 47 | use_cmake: 48 | description: 'Determines wether CMake should be used to generate compile_commands.json file' 49 | default: true 50 | cmake_args: 51 | description: 'Additional CMake arguments' 52 | force_console_print: 53 | description: 'Output the action result to console, instead of creating the comment' 54 | default: false 55 | verbose: 56 | description: 'Verbose output. Used for debugging' 57 | default: false 58 | language: 59 | description: 'Programming language to check for. Supported languages are: C++/Python' 60 | default: C++ 61 | pylint_args: 62 | description: 'PyLint options' 63 | python_dirs: 64 | description: 'Directories containing python files to be checked' 65 | 66 | runs: 67 | using: "docker" 68 | image: "Dockerfile" 69 | 70 | branding: 71 | icon: "book-open" 72 | color: "white" 73 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # shellcheck disable=SC1091 3 | 4 | set -e 5 | 6 | export TERM=xterm-color 7 | 8 | debug_print() { 9 | if [ "$INPUT_VERBOSE" = "true" ]; then 10 | echo -e "\u001b[32m $1 \u001b[0m" 11 | fi 12 | } 13 | 14 | if [ "$RUNNER_DEBUG" = "1" ]; then 15 | export INPUT_VERBOSE="true" 16 | debug_print "Runner is running in debug mode - enabling verbose output" 17 | fi 18 | 19 | print_to_console=${INPUT_FORCE_CONSOLE_PRINT} 20 | check_cpp=$( [ "${INPUT_LANGUAGE,,}" = "c++" ] && echo "true" || echo "false" ) 21 | check_python=$( [ "${INPUT_LANGUAGE,,}" = "python" ] && echo "true" || echo "false" ) 22 | 23 | # Some debug info 24 | debug_print "Print to console = $print_to_console" 25 | 26 | if [ "$print_to_console" = true ]; then 27 | echo "The 'force_console_print' option is enabled. Printing output to console." 28 | elif [ -z "$INPUT_PR_NUM" ]; then 29 | echo "Pull request number input (pr_num) is not present. Printing output to console." 30 | print_to_console=true 31 | else 32 | debug_print "Pull request number: ${INPUT_PR_NUM}" 33 | fi 34 | 35 | if [ -n "$INPUT_APT_PCKGS" ]; then 36 | apt-get update && eval apt-get install -y "$INPUT_APT_PCKGS" 37 | fi 38 | 39 | debug_print "Repo = ${INPUT_PR_REPO} PR_HEAD = ${INPUT_PR_HEAD} event name = ${GITHUB_EVENT_NAME}" 40 | 41 | use_extra_directory=false 42 | original_root_dir="$GITHUB_WORKSPACE" 43 | 44 | # This is useful when running this Action from fork (together with [pull_request_target]) 45 | if [ "$GITHUB_EVENT_NAME" = "pull_request_target" ] && [ -n "$INPUT_PR_REPO" ]; then 46 | debug_print "Running in [pull_request_target] event! Cloning the Head repo ..." 47 | [ ! -d 'pr_tree' ] && git clone "https://www.github.com/$INPUT_PR_REPO" pr_tree 48 | cd pr_tree || exit 49 | git checkout "$INPUT_PR_HEAD" 50 | use_extra_directory=true 51 | 52 | # Override commit SHA, in order to get the correct code snippet 53 | NEW_GITHUB_SHA=$(git rev-parse HEAD) 54 | 55 | export GITHUB_SHA=$NEW_GITHUB_SHA 56 | pwd=$(pwd) 57 | export GITHUB_WORKSPACE=$pwd 58 | fi 59 | 60 | preselected_files="" 61 | common_ancestor="" 62 | if [ "$INPUT_REPORT_PR_CHANGES_ONLY" = true ]; then 63 | echo "The 'report_pr_changes_only' option is enabled. Running SA only on modified files." 64 | git config --global --add safe.directory /github/workspace 65 | git fetch origin 66 | common_ancestor=$(git merge-base origin/"$GITHUB_BASE_REF" "origin/$GITHUB_HEAD_REF") 67 | debug_print "Common ancestor: $common_ancestor" 68 | if [ "$check_cpp" = "true" ]; then 69 | preselected_files="$(git diff --name-only "$common_ancestor" "origin/$GITHUB_HEAD_REF" | grep -E '\.(c|cpp|h|hpp)$')" || true 70 | output_string="No (C/C++) files changed in the PR! Only files ending with .c, .cpp, .h, or .hpp are considered." 71 | fi 72 | 73 | if [ "$check_python" = "true" ]; then 74 | preselected_files="$(git diff --name-only "$common_ancestor" "origin/$GITHUB_HEAD_REF" | grep -E '\.(py)$')" || true 75 | output_string="No Python files changed in the PR! Only files ending with .py are considered." 76 | fi 77 | 78 | if [ -z "$preselected_files" ]; then 79 | debug_print "$output_string" 80 | else 81 | debug_print "Preselected files: \n$preselected_files" 82 | fi 83 | fi 84 | 85 | debug_print "GITHUB_WORKSPACE = ${GITHUB_WORKSPACE} INPUT_EXCLUDE_DIR = ${INPUT_EXCLUDE_DIR} use_extra_directory = ${use_extra_directory}" 86 | 87 | mkdir -p build 88 | chown -R "$(stat -c %u "$GITHUB_WORKSPACE")":"$(stat -c %g "$GITHUB_WORKSPACE")" build 89 | 90 | if [ -n "$INPUT_INIT_SCRIPT" ]; then 91 | # Use $original_root_dir here, just in case we're running in pull_request_target 92 | chmod +x "$original_root_dir/$INPUT_INIT_SCRIPT" 93 | # shellcheck source=/dev/null 94 | source "$original_root_dir/$INPUT_INIT_SCRIPT" "$GITHUB_WORKSPACE" "$GITHUB_WORKSPACE/build" 95 | fi 96 | 97 | if [ "${INPUT_LANGUAGE,,}" = "c++" ]; then 98 | debug_print "Running checks on c++ code" 99 | source "/src/entrypoint_cpp.sh" 100 | else # assume python 101 | debug_print "Running checks on Python code" 102 | source "/src/entrypoint_python.sh" 103 | fi 104 | -------------------------------------------------------------------------------- /entrypoint_cpp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # shellcheck disable=SC2155 3 | 4 | set -e 5 | 6 | # Following variables are declared/defined in parent script 7 | preselected_files=${preselected_files:-""} 8 | print_to_console=${print_to_console:-false} 9 | use_extra_directory=${use_extra_directory:-false} 10 | common_ancestor=${common_ancestor:-""} 11 | 12 | CLANG_TIDY_ARGS="${INPUT_CLANG_TIDY_ARGS//$'\n'/}" 13 | CPPCHECK_ARGS="${INPUT_CPPCHECK_ARGS//$'\n'/}" 14 | 15 | if [ -n "$INPUT_COMPILE_COMMANDS" ]; then 16 | debug_print "Using compile_commands.json file ($INPUT_COMPILE_COMMANDS) - use_cmake input is not being used!" 17 | export INPUT_USE_CMAKE=false 18 | if [ "$INPUT_COMPILE_COMMANDS_REPLACE_PREFIX" = true ]; then 19 | debug_print "Replacing prefix inside user generated compile_commands.json file!" 20 | python3 /src/patch_compile_commands.py "/github/workspace/$INPUT_COMPILE_COMMANDS" 21 | fi 22 | fi 23 | 24 | cd build 25 | 26 | if [ "$INPUT_REPORT_PR_CHANGES_ONLY" = true ]; then 27 | if [ -z "$preselected_files" ]; then 28 | # Create empty files 29 | touch cppcheck.txt 30 | touch clang_tidy.txt 31 | 32 | cd / 33 | python3 -m src.static_analysis_cpp -cc "${GITHUB_WORKSPACE}/build/cppcheck.txt" -ct "${GITHUB_WORKSPACE}/build/clang_tidy.txt" -o "$print_to_console" -fk "$use_extra_directory" --common "$common_ancestor" --head "origin/$GITHUB_HEAD_REF" 34 | exit 0 35 | fi 36 | fi 37 | 38 | if [ "$INPUT_USE_CMAKE" = true ]; then 39 | # Trim trailing newlines 40 | INPUT_CMAKE_ARGS="${INPUT_CMAKE_ARGS%"${INPUT_CMAKE_ARGS##*[![:space:]]}"}" 41 | debug_print "Running cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON $INPUT_CMAKE_ARGS -S $GITHUB_WORKSPACE -B $(pwd)" 42 | eval "cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON $INPUT_CMAKE_ARGS -S $GITHUB_WORKSPACE -B $(pwd)" 43 | fi 44 | 45 | if [ -z "$INPUT_EXCLUDE_DIR" ]; then 46 | files_to_check=$(python3 /src/get_files_to_check.py -dir="$GITHUB_WORKSPACE" -preselected="$preselected_files" -lang="c++") 47 | debug_print "Running: files_to_check=python3 /src/get_files_to_check.py -dir=\"$GITHUB_WORKSPACE\" -preselected=\"$preselected_files\" -lang=\"c++\")" 48 | else 49 | files_to_check=$(python3 /src/get_files_to_check.py -exclude="$GITHUB_WORKSPACE/$INPUT_EXCLUDE_DIR" -dir="$GITHUB_WORKSPACE" -preselected="$preselected_files" -lang="c++") 50 | debug_print "Running: files_to_check=python3 /src/get_files_to_check.py -exclude=\"$GITHUB_WORKSPACE/$INPUT_EXCLUDE_DIR\" -dir=\"$GITHUB_WORKSPACE\" -preselected=\"$preselected_files\" -lang=\"c++\")" 51 | fi 52 | 53 | debug_print "Files to check = $files_to_check" 54 | debug_print "CPPCHECK_ARGS = $CPPCHECK_ARGS" 55 | debug_print "CLANG_TIDY_ARGS = $CLANG_TIDY_ARGS" 56 | 57 | num_proc=$(nproc) 58 | 59 | if [ -z "$files_to_check" ]; then 60 | echo "No files to check" 61 | else 62 | if [ "$INPUT_USE_CMAKE" = true ] || [ -n "$INPUT_COMPILE_COMMANDS" ]; then 63 | # Determine path to compile_commands.json 64 | if [ -n "$INPUT_COMPILE_COMMANDS" ]; then 65 | compile_commands_path="/github/workspace/$INPUT_COMPILE_COMMANDS" 66 | compile_commands_dir=$(dirname "$compile_commands_path") 67 | 68 | else 69 | compile_commands_path="compile_commands.json" 70 | compile_commands_dir=$(pwd) 71 | fi 72 | 73 | for file in $files_to_check; do 74 | exclude_arg="" 75 | if [ -n "$INPUT_EXCLUDE_DIR" ]; then 76 | exclude_arg="-i$GITHUB_WORKSPACE/$INPUT_EXCLUDE_DIR" 77 | fi 78 | 79 | # Replace '/' with '_' 80 | file_name=$(echo "$file" | tr '/' '_') 81 | 82 | debug_print "Running cppcheck --project=$compile_commands_path $CPPCHECK_ARGS --file-filter=$file --output-file=cppcheck_$file_name.txt $exclude_arg" 83 | eval cppcheck --project="$compile_commands_path" "$CPPCHECK_ARGS" --file-filter="$file" --output-file="cppcheck_$file_name.txt" "$exclude_arg" || true 84 | done 85 | 86 | cat cppcheck_*.txt > cppcheck.txt 87 | 88 | # Excludes for clang-tidy are handled in python script 89 | debug_print "Running run-clang-tidy-20 $CLANG_TIDY_ARGS -p $compile_commands_dir $files_to_check >>clang_tidy.txt 2>&1" 90 | eval run-clang-tidy-20 "$CLANG_TIDY_ARGS" -p "$compile_commands_dir" "$files_to_check" > clang_tidy.txt 2>&1 || true 91 | 92 | else 93 | # Without compile_commands.json 94 | debug_print "Running cppcheck -j $num_proc $files_to_check $CPPCHECK_ARGS --output-file=cppcheck.txt ..." 95 | eval cppcheck -j "$num_proc" "$files_to_check" "$CPPCHECK_ARGS" --output-file=cppcheck.txt || true 96 | 97 | debug_print "Running run-clang-tidy-20 $CLANG_TIDY_ARGS $files_to_check >>clang_tidy.txt 2>&1" 98 | eval run-clang-tidy-20 "$CLANG_TIDY_ARGS" "$files_to_check" > clang_tidy.txt 2>&1 || true 99 | fi 100 | 101 | cd / 102 | 103 | python3 -m src.static_analysis_cpp -cc "${GITHUB_WORKSPACE}/build/cppcheck.txt" -ct "${GITHUB_WORKSPACE}/build/clang_tidy.txt" -o "$print_to_console" -fk "$use_extra_directory" --common "$common_ancestor" --head "origin/$GITHUB_HEAD_REF" 104 | fi 105 | -------------------------------------------------------------------------------- /llvm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ################################################################################ 3 | # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 | # See https://llvm.org/LICENSE.txt for license information. 5 | # SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 | ################################################################################ 7 | # 8 | # This script will install the llvm toolchain on the different 9 | # Debian and Ubuntu versions 10 | 11 | set -eux 12 | 13 | usage() { 14 | set +x 15 | echo "Usage: $0 [llvm_major_version] [all] [OPTIONS]" 1>&2 16 | echo -e "all\t\t\tInstall all packages." 1>&2 17 | echo -e "-n=code_name\t\tSpecifies the distro codename, for example bionic" 1>&2 18 | echo -e "-h\t\t\tPrints this help." 1>&2 19 | echo -e "-m=repo_base_url\tSpecifies the base URL from which to download." 1>&2 20 | exit 1; 21 | } 22 | 23 | CURRENT_LLVM_STABLE=18 24 | BASE_URL="http://apt.llvm.org" 25 | 26 | # Check for required tools 27 | needed_binaries=(lsb_release wget add-apt-repository gpg) 28 | missing_binaries=() 29 | for binary in "${needed_binaries[@]}"; do 30 | if ! which $binary &>/dev/null ; then 31 | missing_binaries+=($binary) 32 | fi 33 | done 34 | if [[ ${#missing_binaries[@]} -gt 0 ]] ; then 35 | echo "You are missing some tools this script requires: ${missing_binaries[@]}" 36 | echo "(hint: apt install lsb-release wget software-properties-common gnupg)" 37 | exit 4 38 | fi 39 | 40 | # Set default values for commandline arguments 41 | # We default to the current stable branch of LLVM 42 | LLVM_VERSION=$CURRENT_LLVM_STABLE 43 | ALL=0 44 | DISTRO=$(lsb_release -is) 45 | VERSION=$(lsb_release -sr) 46 | UBUNTU_CODENAME="" 47 | CODENAME_FROM_ARGUMENTS="" 48 | # Obtain VERSION_CODENAME and UBUNTU_CODENAME (for Ubuntu and its derivatives) 49 | source /etc/os-release 50 | DISTRO=${DISTRO,,} 51 | case ${DISTRO} in 52 | debian) 53 | # Debian Trixie has a workaround because of 54 | # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1038383 55 | if [[ "${VERSION}" == "unstable" ]] || [[ "${VERSION}" == "testing" ]] || [[ "${VERSION_CODENAME}" == "trixie" ]]; then 56 | CODENAME=unstable 57 | LINKNAME= 58 | else 59 | # "stable" Debian release 60 | CODENAME=${VERSION_CODENAME} 61 | LINKNAME=-${CODENAME} 62 | fi 63 | ;; 64 | *) 65 | # ubuntu and its derivatives 66 | if [[ -n "${UBUNTU_CODENAME}" ]]; then 67 | CODENAME=${UBUNTU_CODENAME} 68 | if [[ -n "${CODENAME}" ]]; then 69 | LINKNAME=-${CODENAME} 70 | fi 71 | fi 72 | ;; 73 | esac 74 | 75 | # read optional command line arguments 76 | if [ "$#" -ge 1 ] && [ "${1::1}" != "-" ]; then 77 | if [ "$1" != "all" ]; then 78 | LLVM_VERSION=$1 79 | else 80 | # special case for ./llvm.sh all 81 | ALL=1 82 | fi 83 | OPTIND=2 84 | if [ "$#" -ge 2 ]; then 85 | if [ "$2" == "all" ]; then 86 | # Install all packages 87 | ALL=1 88 | OPTIND=3 89 | fi 90 | fi 91 | fi 92 | 93 | while getopts ":hm:n:" arg; do 94 | case $arg in 95 | h) 96 | usage 97 | ;; 98 | m) 99 | BASE_URL=${OPTARG} 100 | ;; 101 | n) 102 | CODENAME=${OPTARG} 103 | if [[ "${CODENAME}" == "unstable" ]]; then 104 | # link name does not apply to unstable repository 105 | LINKNAME= 106 | else 107 | LINKNAME=-${CODENAME} 108 | fi 109 | CODENAME_FROM_ARGUMENTS="true" 110 | ;; 111 | esac 112 | done 113 | 114 | if [[ $EUID -ne 0 ]]; then 115 | echo "This script must be run as root!" 116 | exit 1 117 | fi 118 | 119 | declare -A LLVM_VERSION_PATTERNS 120 | LLVM_VERSION_PATTERNS[9]="-9" 121 | LLVM_VERSION_PATTERNS[10]="-10" 122 | LLVM_VERSION_PATTERNS[11]="-11" 123 | LLVM_VERSION_PATTERNS[12]="-12" 124 | LLVM_VERSION_PATTERNS[13]="-13" 125 | LLVM_VERSION_PATTERNS[14]="-14" 126 | LLVM_VERSION_PATTERNS[15]="-15" 127 | LLVM_VERSION_PATTERNS[16]="-16" 128 | LLVM_VERSION_PATTERNS[17]="-17" 129 | LLVM_VERSION_PATTERNS[18]="-18" 130 | LLVM_VERSION_PATTERNS[19]="-19" 131 | LLVM_VERSION_PATTERNS[20]="-20" 132 | LLVM_VERSION_PATTERNS[21]="" 133 | 134 | if [ ! ${LLVM_VERSION_PATTERNS[$LLVM_VERSION]+_} ]; then 135 | echo "This script does not support LLVM version $LLVM_VERSION" 136 | exit 3 137 | fi 138 | 139 | LLVM_VERSION_STRING=${LLVM_VERSION_PATTERNS[$LLVM_VERSION]} 140 | 141 | # join the repository name 142 | if [[ -n "${CODENAME}" ]]; then 143 | REPO_NAME="deb ${BASE_URL}/${CODENAME}/ llvm-toolchain${LINKNAME}${LLVM_VERSION_STRING} main" 144 | 145 | # check if the repository exists for the distro and version 146 | if ! wget -q --method=HEAD ${BASE_URL}/${CODENAME} &> /dev/null; then 147 | if [[ -n "${CODENAME_FROM_ARGUMENTS}" ]]; then 148 | echo "Specified codename '${CODENAME}' is not supported by this script." 149 | else 150 | echo "Distribution '${DISTRO}' in version '${VERSION}' is not supported by this script." 151 | fi 152 | exit 2 153 | fi 154 | fi 155 | 156 | 157 | # install everything 158 | 159 | if [[ ! -f /etc/apt/trusted.gpg.d/apt.llvm.org.asc ]]; then 160 | # download GPG key once 161 | wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc 162 | fi 163 | 164 | if [[ -z "`apt-key list 2> /dev/null | grep -i llvm`" ]]; then 165 | # Delete the key in the old format 166 | apt-key del AF4F7421 || true 167 | fi 168 | if [[ "${VERSION_CODENAME}" == "bookworm" ]]; then 169 | # add it twice to workaround: 170 | # https://github.com/llvm/llvm-project/issues/62475 171 | add-apt-repository -y "${REPO_NAME}" 172 | fi 173 | 174 | add-apt-repository -y "${REPO_NAME}" 175 | apt-get update 176 | PKG="clang-$LLVM_VERSION lldb-$LLVM_VERSION lld-$LLVM_VERSION clangd-$LLVM_VERSION clang-tidy-$LLVM_VERSION clang-format-$LLVM_VERSION clang-tools-$LLVM_VERSION" 177 | if [[ $ALL -eq 1 ]]; then 178 | # same as in test-install.sh 179 | # No worries if we have dups 180 | PKG="$PKG clang-tidy-$LLVM_VERSION clang-format-$LLVM_VERSION clang-tools-$LLVM_VERSION llvm-$LLVM_VERSION-dev lld-$LLVM_VERSION lldb-$LLVM_VERSION llvm-$LLVM_VERSION-tools libomp-$LLVM_VERSION-dev libc++-$LLVM_VERSION-dev libc++abi-$LLVM_VERSION-dev libclang-common-$LLVM_VERSION-dev libclang-$LLVM_VERSION-dev libclang-cpp$LLVM_VERSION-dev libunwind-$LLVM_VERSION-dev" 181 | if test $LLVM_VERSION -gt 14; then 182 | PKG="$PKG libclang-rt-$LLVM_VERSION-dev libpolly-$LLVM_VERSION-dev" 183 | fi 184 | fi 185 | apt-get install -y $PKG 186 | -------------------------------------------------------------------------------- /src/static_analysis_python.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import json 4 | 5 | from src import sa_utils as utils 6 | 7 | 8 | def parse_pylint_json( 9 | pylint_json_in, output_to_console, common_ancestor, feature_branch 10 | ): 11 | with open(pylint_json_in, "r", encoding="utf-8") as file: 12 | data = file.read() 13 | 14 | files_changed_in_pr = {} 15 | if not output_to_console and (utils.ONLY_PR_CHANGES == "true"): 16 | files_changed_in_pr = utils.get_changed_files(common_ancestor, feature_branch) 17 | 18 | pylint_comment_out, pylint_issues_found_out = create_comment_for_output( 19 | json.loads(data), files_changed_in_pr, output_to_console 20 | ) 21 | 22 | if output_to_console and pylint_issues_found_out: 23 | print("##[error] Issues found!\n") 24 | error_color = "\u001b[31m" 25 | print(f"{error_color}PyLint results: {pylint_comment_out}") 26 | 27 | return pylint_comment_out, pylint_issues_found_out 28 | 29 | 30 | def parse_input_vars(): 31 | parser = utils.create_common_input_vars_parser() 32 | parser.add_argument( 33 | "-pl", "--pylint", help="Output file name for pylint", required=True 34 | ) 35 | 36 | if parser.parse_args().fork_repository == "true": 37 | # Make sure to use Head repository 38 | utils.REPO_NAME = os.getenv("INPUT_PR_REPO") 39 | 40 | pylint_file_name = parser.parse_args().pylint 41 | output_to_console = parser.parse_args().output_to_console == "true" 42 | 43 | common_ancestor = parser.parse_args().common 44 | feature_branch = parser.parse_args().head 45 | 46 | return (pylint_file_name, output_to_console, common_ancestor, feature_branch) 47 | 48 | 49 | def append_issue(is_note, per_issue_string, new_line, list_of_issues): 50 | if not is_note: 51 | if len(per_issue_string) > 0 and (per_issue_string not in list_of_issues): 52 | list_of_issues.append(per_issue_string) 53 | per_issue_string = new_line 54 | else: 55 | per_issue_string += new_line 56 | 57 | return per_issue_string 58 | 59 | 60 | def create_comment_for_output(tool_output, files_changed_in_pr, output_to_console): 61 | """ 62 | Generates a comment for a GitHub pull request based on the tool output. 63 | 64 | Parameters: 65 | tool_output (str): The tool output to parse. 66 | files_changed_in_pr (dict): A dictionary containing the files that were 67 | changed in the pull request and the lines that were modified. 68 | output_to_console (bool): Whether or not to output the results to the console. 69 | 70 | Returns: 71 | tuple: A tuple containing the generated comment and the number of issues found. 72 | """ 73 | 74 | list_of_issues = [] 75 | per_issue_string = "" 76 | 77 | utils.debug_print(f"PyLint output:\n{tool_output}\n") 78 | 79 | for line in tool_output: 80 | file_path = line["path"] 81 | file_line_start = line["line"] 82 | issue_description = ( 83 | f"{line['message-id']}: {line['message']} ({line['symbol']})" 84 | ) 85 | 86 | file_line_end = utils.get_file_line_end(file_path, file_line_start) 87 | 88 | # In case where we only output to console, skip the next part 89 | if output_to_console: 90 | per_issue_string = append_issue( 91 | False, 92 | per_issue_string, 93 | f"{file_path}:{file_line_start} {issue_description}", 94 | list_of_issues, 95 | ) 96 | continue 97 | 98 | if utils.is_part_of_pr_changes(file_path, file_line_start, files_changed_in_pr): 99 | per_issue_string, description = utils.generate_description( 100 | False, 101 | False, 102 | file_line_start, 103 | issue_description, 104 | per_issue_string, 105 | ) 106 | 107 | new_line = utils.generate_output( 108 | False, 109 | (utils.WORK_DIR, file_path), 110 | file_line_start, 111 | file_line_end, 112 | description, 113 | ) 114 | 115 | if utils.check_for_char_limit(new_line): 116 | per_issue_string = append_issue( 117 | False, per_issue_string, new_line, list_of_issues 118 | ) 119 | utils.CURRENT_COMMENT_LENGTH += len(new_line) 120 | 121 | else: 122 | utils.CURRENT_COMMENT_LENGTH = utils.COMMENT_MAX_SIZE 123 | 124 | return "\n".join(list_of_issues), len(list_of_issues) 125 | 126 | # Append any unprocessed issues 127 | if len(per_issue_string) > 0 and (per_issue_string not in list_of_issues): 128 | list_of_issues.append(per_issue_string) 129 | 130 | output_string = "\n".join(list_of_issues) 131 | 132 | utils.debug_print(f"\nFinal output_string = \n{output_string}\n") 133 | 134 | return output_string, len(list_of_issues) 135 | 136 | 137 | def prepare_comment_body(pylint_comment_in, pylint_issues_found_in): 138 | """ 139 | Generates a comment body based on the results of the PyLint analysis. 140 | 141 | Args: 142 | pylint_comment (str): The comment body generated for the PyLint analysis. 143 | pylint_issues_found (int): The number of issues found by PyLint analysis. 144 | 145 | Returns: 146 | str: The final comment body that will be posted as a comment on the pull request. 147 | """ 148 | 149 | if pylint_issues_found_in == 0: 150 | full_comment_body = ( 151 | '##

:white_check_mark:' 152 | f"{utils.COMMENT_TITLE} - no issues found! :white_check_mark:

" 153 | ) 154 | else: 155 | full_comment_body = ( 156 | f'##

:zap: {utils.COMMENT_TITLE} :zap:

\n\n' 157 | ) 158 | 159 | full_comment_body += ( 160 | f"
:red_circle: PyLint found " 161 | f"{pylint_issues_found_in} {'issues' if pylint_issues_found_in > 1 else 'issue'}!" 162 | " Click here to see details.
" 163 | f"{pylint_comment_in}
" 164 | ) 165 | 166 | full_comment_body += "\n\n *** \n" 167 | 168 | if utils.CURRENT_COMMENT_LENGTH == utils.COMMENT_MAX_SIZE: 169 | full_comment_body += f"\n```diff\n{utils.MAX_CHAR_COUNT_REACHED}\n```" 170 | 171 | utils.debug_print( 172 | f"Repo={utils.REPO_NAME} pr_num={utils.PR_NUM} comment_title={utils.COMMENT_TITLE}" 173 | ) 174 | 175 | return full_comment_body 176 | 177 | 178 | if __name__ == "__main__": 179 | ( 180 | pylint_file_name_in, 181 | output_to_console_in, 182 | common_ancestor_in, 183 | feature_branch_in, 184 | ) = parse_input_vars() 185 | 186 | pylint_comment, pylint_issues_found = parse_pylint_json( 187 | pylint_file_name_in, output_to_console_in, common_ancestor_in, feature_branch_in 188 | ) 189 | if not output_to_console_in: 190 | comment_body_in = prepare_comment_body(pylint_comment, pylint_issues_found) 191 | utils.create_or_edit_comment(comment_body_in) 192 | 193 | sys.exit(pylint_issues_found) 194 | -------------------------------------------------------------------------------- /test/test_static_analysis_python.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | import os 4 | import sys 5 | 6 | try: 7 | PROJECT_PATH = f"{os.sep}".join(os.path.abspath(__file__).split(os.sep)[:-2]) 8 | sys.path.append(PROJECT_PATH) 9 | except Exception as exception: 10 | print(f"Can not add project path to system path! Exiting!\nERROR: {exception}") 11 | raise SystemExit(1) from exception 12 | 13 | os.environ["GITHUB_WORKSPACE"] = f"{PROJECT_PATH}/test/utils/dummy_project" 14 | os.environ["INPUT_VERBOSE"] = "True" 15 | os.environ["INPUT_REPORT_PR_CHANGES_ONLY"] = "False" 16 | os.environ["INPUT_REPO"] = "RepoName" 17 | os.environ["GITHUB_SHA"] = "1234" 18 | os.environ["INPUT_COMMENT_TITLE"] = "title" 19 | 20 | from src import static_analysis_python 21 | 22 | 23 | class TestStaticAnalysisPython(unittest.TestCase): 24 | """Unit tests for static_analysis_python""" 25 | 26 | maxDiff = None 27 | 28 | def test_create_comment_for_output(self): 29 | """ 30 | Test the `create_comment_for_output()` function. 31 | 32 | This test case checks whether the `create_comment_for_output()` function correctly 33 | generates a GitHub comment that displays static analysis issues for a given set of 34 | files. 35 | 36 | The test case creates a mock set of files and static analysis issues, and expects the 37 | generated GitHub comment to match a pre-defined expected string. 38 | """ 39 | 40 | pylint_content = r""" [ 41 | { 42 | "type": "convention", 43 | "module": "dummy", 44 | "obj": "", 45 | "line": 5, 46 | "column": 0, 47 | "endLine": 5, 48 | "endColumn": 5, 49 | "path": "dummy.py", 50 | "symbol": "invalid-name", 51 | "message": "Constant name \"shift\" doesn't conform to UPPER_CASE naming style", 52 | "message-id": "C0103" 53 | }, 54 | { 55 | "type": "convention", 56 | "module": "dummy", 57 | "obj": "", 58 | "line": 8, 59 | "column": 0, 60 | "endLine": 8, 61 | "endColumn": 7, 62 | "path": "dummy.py", 63 | "symbol": "invalid-name", 64 | "message": "Constant name \"letters\" doesn't conform to UPPER_CASE naming style", 65 | "message-id": "C0103" 66 | }, 67 | { 68 | "type": "convention", 69 | "module": "dummy", 70 | "obj": "", 71 | "line": 9, 72 | "column": 0, 73 | "endLine": 9, 74 | "endColumn": 7, 75 | "path": "dummy.py", 76 | "symbol": "invalid-name", 77 | "message": "Constant name \"encoded\" doesn't conform to UPPER_CASE naming style", 78 | "message-id": "C0103" 79 | }, 80 | { 81 | "type": "convention", 82 | "module": "dummy", 83 | "obj": "", 84 | "line": 13, 85 | "column": 12, 86 | "endLine": 13, 87 | "endColumn": 19, 88 | "path": "dummy.py", 89 | "symbol": "invalid-name", 90 | "message": "Constant name \"encoded\" doesn't conform to UPPER_CASE naming style", 91 | "message-id": "C0103" 92 | }, 93 | { 94 | "type": "convention", 95 | "module": "dummy", 96 | "obj": "", 97 | "line": 15, 98 | "column": 12, 99 | "endLine": 15, 100 | "endColumn": 13, 101 | "path": "dummy.py", 102 | "symbol": "invalid-name", 103 | "message": "Constant name \"x\" doesn't conform to UPPER_CASE naming style", 104 | "message-id": "C0103" 105 | }, 106 | { 107 | "type": "convention", 108 | "module": "dummy", 109 | "obj": "", 110 | "line": 16, 111 | "column": 12, 112 | "endLine": 16, 113 | "endColumn": 19, 114 | "path": "dummy.py", 115 | "symbol": "invalid-name", 116 | "message": "Constant name \"encoded\" doesn't conform to UPPER_CASE naming style", 117 | "message-id": "C0103" 118 | }, 119 | { 120 | "type": "convention", 121 | "module": "dummy", 122 | "obj": "", 123 | "line": 20, 124 | "column": 12, 125 | "endLine": 20, 126 | "endColumn": 19, 127 | "path": "dummy.py", 128 | "symbol": "invalid-name", 129 | "message": "Constant name \"encoded\" doesn't conform to UPPER_CASE naming style", 130 | "message-id": "C0103" 131 | }, 132 | { 133 | "type": "convention", 134 | "module": "dummy", 135 | "obj": "", 136 | "line": 22, 137 | "column": 12, 138 | "endLine": 22, 139 | "endColumn": 13, 140 | "path": "dummy.py", 141 | "symbol": "invalid-name", 142 | "message": "Constant name \"x\" doesn't conform to UPPER_CASE naming style", 143 | "message-id": "C0103" 144 | }, 145 | { 146 | "type": "convention", 147 | "module": "dummy", 148 | "obj": "", 149 | "line": 23, 150 | "column": 12, 151 | "endLine": 23, 152 | "endColumn": 19, 153 | "path": "dummy.py", 154 | "symbol": "invalid-name", 155 | "message": "Constant name \"encoded\" doesn't conform to UPPER_CASE naming style", 156 | "message-id": "C0103" 157 | } 158 | ]""" 159 | 160 | files_changed_in_pr = {"/github/workspace/dummy.py": ("added", (1, 25))} 161 | result = static_analysis_python.create_comment_for_output( 162 | json.loads(pylint_content), files_changed_in_pr, False 163 | ) 164 | 165 | sha = os.getenv("GITHUB_SHA") 166 | repo_name = os.getenv("INPUT_REPO") 167 | expected = ( 168 | f"\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L5-L10 \n" 169 | "```diff" 170 | '\n!Line: 5 - C0103: Constant name "shift" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 171 | "``` \n
" 172 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L8-L13 \n" 173 | "```diff\n" 174 | '!Line: 8 - C0103: Constant name "letters" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 175 | "``` \n
" 176 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L9-L14 \n" 177 | "```diff\n" 178 | '!Line: 9 - C0103: Constant name "encoded" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 179 | "```" 180 | " \n
" 181 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L13-L18 \n" 182 | "```diff\n" 183 | '!Line: 13 - C0103: Constant name "encoded" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 184 | "``` \n
" 185 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L15-L20 \n" 186 | "```diff\n" 187 | '!Line: 15 - C0103: Constant name "x" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 188 | "``` \n
" 189 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L16-L21 \n" 190 | "```diff\n" 191 | '!Line: 16 - C0103: Constant name "encoded" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 192 | "``` \n
" 193 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L20-L25 \n" 194 | "```diff\n" 195 | '!Line: 20 - C0103: Constant name "encoded" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 196 | "``` \n
" 197 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L22-L25 \n" 198 | "```diff\n" 199 | '!Line: 22 - C0103: Constant name "x" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 200 | "``` \n
" 201 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/dummy.py#L23-L25 \n" 202 | "```diff\n" 203 | '!Line: 23 - C0103: Constant name "encoded" doesn\'t conform to UPPER_CASE naming style (invalid-name)\n' 204 | "``` \n
\n" 205 | ) 206 | 207 | print(result) 208 | 209 | self.assertEqual(result, (expected, 9)) 210 | 211 | 212 | if __name__ == "__main__": 213 | unittest.main() 214 | -------------------------------------------------------------------------------- /test/test_static_analysis_cpp.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import sys 4 | import utils.helper_functions as utils 5 | 6 | try: 7 | PROJECT_PATH = f"{os.sep}".join(os.path.abspath(__file__).split(os.sep)[:-2]) 8 | sys.path.append(PROJECT_PATH) 9 | except Exception as exception: 10 | print(f"Can not add project path to system path! Exiting!\nERROR: {exception}") 11 | raise SystemExit(1) from exception 12 | 13 | os.environ["GITHUB_WORKSPACE"] = f"{PROJECT_PATH}/test/utils/dummy_project" 14 | os.environ["INPUT_VERBOSE"] = "True" 15 | os.environ["INPUT_REPORT_PR_CHANGES_ONLY"] = "False" 16 | os.environ["INPUT_REPO"] = "RepoName" 17 | os.environ["GITHUB_SHA"] = "1234" 18 | os.environ["INPUT_COMMENT_TITLE"] = "title" 19 | 20 | from src import static_analysis_cpp, get_files_to_check 21 | 22 | 23 | def to_list_and_sort(string_in): 24 | # create list (of strings) from space separated string 25 | # and then sort it 26 | list_out = string_in.split(" ") 27 | list_out.sort() 28 | 29 | return list_out 30 | 31 | 32 | class TestStaticAnalysisCpp(unittest.TestCase): 33 | """Unit tests for static_analysis_cpp""" 34 | 35 | maxDiff = None 36 | 37 | def test_create_comment_for_output(self): 38 | """ 39 | Test the `create_comment_for_output()` function. 40 | 41 | This test case checks whether the `create_comment_for_output()` function correctly 42 | generates a GitHub comment that displays static analysis issues for a given set of 43 | files. 44 | 45 | The test case creates a mock set of files and static analysis issues, and expects the 46 | generated GitHub comment to match a pre-defined expected string. 47 | """ 48 | 49 | cppcheck_content = [ 50 | "/github/workspace/DummyFile.cpp:8:23: style: Error message\n", 51 | " Part of code\n", 52 | " ^\n", 53 | "/github/workspace/DummyFile.cpp:6:12: note: Note message\n", 54 | " Part of code\n", 55 | " ^\n", 56 | "/github/workspace/DummyFile.cpp:7:4: note: Another note message\n", 57 | " Part of code\n", 58 | " ^\n", 59 | "/github/workspace/DummyFile.cpp:3:0: style: Error message\n", 60 | " Part of code\n", 61 | " ^\n", 62 | ] 63 | 64 | files_changed_in_pr = { 65 | "/github/workspace/DummyFile.hpp": ("added", (1, 10)), 66 | "/github/workspace/DummyFile.cpp": ("added", (1, 10)), 67 | } 68 | result = static_analysis_cpp.create_comment_for_output( 69 | cppcheck_content, "/github/workspace", files_changed_in_pr, False 70 | ) 71 | 72 | sha = os.getenv("GITHUB_SHA") 73 | repo_name = os.getenv("INPUT_REPO") 74 | expected = ( 75 | f"\n\nhttps://github.com/{repo_name}/blob/{sha}/DummyFile.cpp#L8-L9 \n" 76 | f"```diff\n!Line: 8 - style: Error message" 77 | f"\n\n!Line: 6 - note: Note message" 78 | f"\n!Line: 7 - note: Another note message\n``` " 79 | f"\n\n\n\nhttps://github.com/{repo_name}/blob/{sha}/DummyFile.cpp#L3-L8 \n" 80 | f"```diff\n!Line: 3 - style: Error message\n\n``` \n
\n" 81 | ) 82 | 83 | print(result) 84 | 85 | self.assertEqual(result, (expected, 2)) 86 | 87 | def test_prepare_comment_body(self): 88 | """ 89 | Test the `prepare_comment_body()` function. 90 | 91 | This test case checks whether the `prepare_comment_body()` function correctly generates 92 | the body text of a GitHub comment for a given set of static analysis issues. 93 | 94 | The test case creates mock input parameters representing different types of static 95 | analysis issues, and expects the generated comment body to match a pre-defined expected 96 | string. 97 | """ 98 | 99 | comment_title = os.getenv("INPUT_COMMENT_TITLE") 100 | comment_body = static_analysis_cpp.prepare_comment_body("", "", 0, 0) 101 | 102 | # Empty results 103 | expected_comment_body = utils.generate_comment(comment_title, "", 0, "cppcheck") 104 | 105 | self.assertEqual(expected_comment_body, comment_body) 106 | 107 | # Multiple cppcheck issues 108 | cppcheck_issues_found = 4 109 | cppcheck_comment = "dummy issues" 110 | expected_comment_body = utils.generate_comment( 111 | comment_title, cppcheck_comment, cppcheck_issues_found, "cppcheck" 112 | ) 113 | 114 | comment_body = static_analysis_cpp.prepare_comment_body( 115 | cppcheck_comment, "", cppcheck_issues_found, 0 116 | ) 117 | 118 | self.assertEqual(expected_comment_body, comment_body) 119 | 120 | # Single cppcheck issue 121 | cppcheck_issues_found = 1 122 | cppcheck_comment = "dummy issue" 123 | expected_comment_body = utils.generate_comment( 124 | comment_title, cppcheck_comment, cppcheck_issues_found, "cppcheck" 125 | ) 126 | 127 | comment_body = static_analysis_cpp.prepare_comment_body( 128 | cppcheck_comment, "", cppcheck_issues_found, 0 129 | ) 130 | 131 | self.assertEqual(expected_comment_body, comment_body) 132 | 133 | # Multiple clang-tidy issues 134 | clang_tidy_issues_found = 4 135 | clang_tidy_comment = "dummy issues" 136 | expected_comment_body = utils.generate_comment( 137 | comment_title, clang_tidy_comment, clang_tidy_issues_found, "clang-tidy" 138 | ) 139 | 140 | comment_body = static_analysis_cpp.prepare_comment_body( 141 | "", clang_tidy_comment, 0, clang_tidy_issues_found 142 | ) 143 | 144 | self.assertEqual(expected_comment_body, comment_body) 145 | 146 | # Single clang-tidy issue 147 | clang_tidy_issues_found = 1 148 | clang_tidy_comment = "dummy issue" 149 | expected_comment_body = utils.generate_comment( 150 | comment_title, clang_tidy_comment, clang_tidy_issues_found, "clang-tidy" 151 | ) 152 | 153 | comment_body = static_analysis_cpp.prepare_comment_body( 154 | "", clang_tidy_comment, 0, clang_tidy_issues_found 155 | ) 156 | 157 | self.assertEqual(expected_comment_body, comment_body) 158 | 159 | def test_get_files_to_check(self): 160 | """ 161 | Test the `get_files_to_check()` function. 162 | 163 | This test case checks whether the `get_files_to_check()` function correctly generates a 164 | list of file paths to check for static analysis issues in a given directory, excluding 165 | any directories that should be skipped. 166 | 167 | The test case creates a mock directory structure and a set of directories to skip, 168 | and expects the generated list of file paths to match a pre-defined expected list of 169 | file paths. 170 | """ 171 | 172 | pwd = os.path.dirname(os.path.realpath(__file__)) 173 | 174 | # Excludes == None 175 | expected = [ 176 | f"{pwd}/utils/dummy_project/DummyFile.cpp", 177 | f"{pwd}/utils/dummy_project/DummyFile.hpp", 178 | f"{pwd}/utils/dummy_project/exclude_dir_1/ExcludedFile1.hpp", 179 | f"{pwd}/utils/dummy_project/exclude_dir_2/ExcludedFile2.hpp", 180 | ] 181 | result = get_files_to_check.get_files_to_check( 182 | f"{pwd}/utils/dummy_project", None, "", "c++" 183 | ) 184 | 185 | self.assertEqual(to_list_and_sort(result), expected) 186 | 187 | # Single exclude_dir 188 | expected = [ 189 | f"{pwd}/utils/dummy_project/DummyFile.cpp", 190 | f"{pwd}/utils/dummy_project/DummyFile.hpp", 191 | f"{pwd}/utils/dummy_project/exclude_dir_2/ExcludedFile2.hpp", 192 | ] 193 | result = get_files_to_check.get_files_to_check( 194 | f"{pwd}/utils/dummy_project", 195 | f"{pwd}/utils/dummy_project/exclude_dir_1", 196 | "", 197 | "c++", 198 | ) 199 | 200 | self.assertEqual(to_list_and_sort(result), expected) 201 | 202 | # Multiple exclude_dir 203 | expected = [ 204 | f"{pwd}/utils/dummy_project/DummyFile.cpp", 205 | f"{pwd}/utils/dummy_project/DummyFile.hpp", 206 | ] 207 | result = get_files_to_check.get_files_to_check( 208 | f"{pwd}/utils/dummy_project", 209 | f"{pwd}/utils/dummy_project/exclude_dir_1 {pwd}/utils/dummy_project/exclude_dir_2", 210 | "", 211 | "c++", 212 | ) 213 | 214 | # Preselected files present 215 | expected = [f"{pwd}/utils/dummy_project/DummyFile.cpp"] 216 | result = get_files_to_check.get_files_to_check( 217 | f"{pwd}/utils/dummy_project", 218 | f"{pwd}/utils/dummy_project/exclude_dir_1 {pwd}/utils/dummy_project/exclude_dir_2", 219 | f"{pwd}/utils/dummy_project/DummyFile.cpp {pwd}/utils/dummy_project/exclude_dir_1/ExcludedFile1.hpp", 220 | "c++", 221 | ) 222 | 223 | 224 | if __name__ == "__main__": 225 | unittest.main() 226 | -------------------------------------------------------------------------------- /src/static_analysis_cpp.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from src import sa_utils as utils 5 | 6 | 7 | def append_issue(is_note, per_issue_string, new_line, list_of_issues): 8 | if not is_note: 9 | if len(per_issue_string) > 0 and (per_issue_string not in list_of_issues): 10 | list_of_issues.append(per_issue_string) 11 | per_issue_string = new_line 12 | else: 13 | per_issue_string += new_line 14 | 15 | return per_issue_string 16 | 17 | 18 | def create_comment_for_output( 19 | tool_output, prefix, files_changed_in_pr, output_to_console 20 | ): 21 | """ 22 | Generates a comment for a GitHub pull request based on the tool output. 23 | 24 | Parameters: 25 | tool_output (str): The tool output to parse. 26 | prefix (str): The prefix to look for in order to identify issues. 27 | files_changed_in_pr (dict): A dictionary containing the files that were 28 | changed in the pull request and the lines that were modified. 29 | output_to_console (bool): Whether or not to output the results to the console. 30 | 31 | Returns: 32 | tuple: A tuple containing the generated comment and the number of issues found. 33 | """ 34 | list_of_issues = [] 35 | per_issue_string = "" 36 | was_note = False 37 | 38 | for line in tool_output: 39 | if line.startswith(prefix) and not utils.is_excluded_dir(line): 40 | ( 41 | file_path, 42 | is_note, 43 | file_line_start, 44 | file_line_end, 45 | issue_description, 46 | ) = utils.extract_info(line, prefix) 47 | 48 | # In case where we only output to console, skip the next part 49 | if output_to_console: 50 | per_issue_string = append_issue( 51 | is_note, per_issue_string, line, list_of_issues 52 | ) 53 | continue 54 | 55 | if utils.is_part_of_pr_changes( 56 | file_path, file_line_start, files_changed_in_pr 57 | ): 58 | per_issue_string, description = utils.generate_description( 59 | is_note, 60 | was_note, 61 | file_line_start, 62 | issue_description, 63 | per_issue_string, 64 | ) 65 | was_note = is_note 66 | new_line = utils.generate_output( 67 | is_note, 68 | (prefix, file_path), 69 | file_line_start, 70 | file_line_end, 71 | description, 72 | ) 73 | 74 | if utils.check_for_char_limit(new_line): 75 | per_issue_string = append_issue( 76 | is_note, per_issue_string, new_line, list_of_issues 77 | ) 78 | utils.CURRENT_COMMENT_LENGTH += len(new_line) 79 | 80 | else: 81 | utils.CURRENT_COMMENT_LENGTH = utils.COMMENT_MAX_SIZE 82 | 83 | return "\n".join(list_of_issues), len(list_of_issues) 84 | 85 | # Append any unprocessed issues 86 | if len(per_issue_string) > 0 and (per_issue_string not in list_of_issues): 87 | list_of_issues.append(per_issue_string) 88 | 89 | output_string = "\n".join(list_of_issues) 90 | 91 | utils.debug_print(f"\nFinal output_string = \n{output_string}\n") 92 | 93 | return output_string, len(list_of_issues) 94 | 95 | 96 | def read_files_and_parse_results(): 97 | """Reads the output files generated by cppcheck and clang-tidy and creates comments 98 | for the pull request, based on the issues found. The comments can be output to console 99 | and/or added to the pull request. Returns a tuple with the comments generated for 100 | cppcheck and clang-tidy, and boolean values indicating whether issues were found by 101 | each tool, whether output was generated to the console, and whether the actual code 102 | is in the 'pr_tree' directory. 103 | 104 | Returns: 105 | A tuple with the following values: 106 | - cppcheck_comment (str): The comment generated for cppcheck, if any issues were found. 107 | - clang_tidy_comment (str): The comment generated for clang-tidy, if any issues were found. 108 | - cppcheck_issues_found (bool): Whether issues were found by cppcheck. 109 | - clang_tidy_issues_found (bool): Whether issues were found by clang-tidy. 110 | - output_to_console (bool): Whether output was generated to the console. 111 | """ 112 | 113 | # Get cppcheck and clang-tidy files 114 | parser = utils.create_common_input_vars_parser() 115 | parser.add_argument( 116 | "-cc", "--cppcheck", help="Output file name for cppcheck", required=True 117 | ) 118 | parser.add_argument( 119 | "-ct", "--clangtidy", help="Output file name for clang-tidy", required=True 120 | ) 121 | 122 | if parser.parse_args().fork_repository == "true": 123 | # Make sure to use Head repository 124 | utils.REPO_NAME = os.getenv("INPUT_PR_REPO") 125 | 126 | cppcheck_file_name = parser.parse_args().cppcheck 127 | clangtidy_file_name = parser.parse_args().clangtidy 128 | output_to_console = parser.parse_args().output_to_console == "true" 129 | 130 | cppcheck_content = "" 131 | with open(cppcheck_file_name, "r", encoding="utf-8") as file: 132 | cppcheck_content = file.readlines() 133 | 134 | clang_tidy_content = "" 135 | with open(clangtidy_file_name, "r", encoding="utf-8") as file: 136 | clang_tidy_content = file.readlines() 137 | 138 | common_ancestor = parser.parse_args().common 139 | feature_branch = parser.parse_args().head 140 | 141 | line_prefix = f"{utils.WORK_DIR}" 142 | 143 | utils.debug_print( 144 | f"cppcheck result: \n {cppcheck_content} \n" 145 | f"clang-tidy result: \n {clang_tidy_content} \n" 146 | f"line_prefix: {line_prefix} \n" 147 | ) 148 | 149 | files_changed_in_pr = {} 150 | if not output_to_console and (utils.ONLY_PR_CHANGES == "true"): 151 | files_changed_in_pr = utils.get_changed_files(common_ancestor, feature_branch) 152 | 153 | cppcheck_comment, cppcheck_issues_found = create_comment_for_output( 154 | cppcheck_content, line_prefix, files_changed_in_pr, output_to_console 155 | ) 156 | clang_tidy_comment, clang_tidy_issues_found = create_comment_for_output( 157 | clang_tidy_content, line_prefix, files_changed_in_pr, output_to_console 158 | ) 159 | 160 | if output_to_console and (cppcheck_issues_found or clang_tidy_issues_found): 161 | print("##[error] Issues found!\n") 162 | error_color = "\u001b[31m" 163 | 164 | if cppcheck_issues_found: 165 | print(f"{error_color}cppcheck results: {cppcheck_comment}") 166 | 167 | if clang_tidy_issues_found: 168 | print(f"{error_color}clang-tidy results: {clang_tidy_comment}") 169 | 170 | return ( 171 | cppcheck_comment, 172 | clang_tidy_comment, 173 | cppcheck_issues_found, 174 | clang_tidy_issues_found, 175 | output_to_console, 176 | ) 177 | 178 | 179 | def prepare_comment_body( 180 | cppcheck_comment, clang_tidy_comment, cppcheck_issues_found, clang_tidy_issues_found 181 | ): 182 | """ 183 | Generates a comment body based on the results of the cppcheck and clang-tidy analysis. 184 | 185 | Args: 186 | cppcheck_comment (str): The comment body generated for the cppcheck analysis. 187 | clang_tidy_comment (str): The comment body generated for the clang-tidy analysis. 188 | cppcheck_issues_found (int): The number of issues found by cppcheck analysis. 189 | clang_tidy_issues_found (int): The number of issues found by clang-tidy analysis. 190 | 191 | Returns: 192 | str: The final comment body that will be posted as a comment on the pull request. 193 | """ 194 | 195 | if cppcheck_issues_found == 0 and clang_tidy_issues_found == 0: 196 | full_comment_body = ( 197 | '##

:white_check_mark:' 198 | f"{utils.COMMENT_TITLE} - no issues found! :white_check_mark:

" 199 | ) 200 | else: 201 | full_comment_body = ( 202 | f'##

:zap: {utils.COMMENT_TITLE} :zap:

\n\n' 203 | ) 204 | 205 | if len(cppcheck_comment) > 0: 206 | full_comment_body += ( 207 | f"
:red_circle: cppcheck found " 208 | f"{cppcheck_issues_found} {'issues' if cppcheck_issues_found > 1 else 'issue'}!" 209 | " Click here to see details.
" 210 | f"{cppcheck_comment}
" 211 | ) 212 | 213 | full_comment_body += "\n\n *** \n" 214 | 215 | if len(clang_tidy_comment) > 0: 216 | full_comment_body += ( 217 | f"
:red_circle: clang-tidy found " 218 | f"{clang_tidy_issues_found} {'issues' if clang_tidy_issues_found > 1 else 'issue'}!" 219 | " Click here to see details.
" 220 | f"{clang_tidy_comment}

\n" 221 | ) 222 | 223 | if utils.CURRENT_COMMENT_LENGTH == utils.COMMENT_MAX_SIZE: 224 | full_comment_body += f"\n```diff\n{utils.MAX_CHAR_COUNT_REACHED}\n```" 225 | 226 | utils.debug_print( 227 | f"Repo={utils.REPO_NAME} pr_num={utils.PR_NUM} comment_title={utils.COMMENT_TITLE}" 228 | ) 229 | 230 | return full_comment_body 231 | 232 | 233 | if __name__ == "__main__": 234 | ( 235 | cppcheck_comment_in, 236 | clang_tidy_comment_in, 237 | cppcheck_issues_found_in, 238 | clang_tidy_issues_found_in, 239 | output_to_console_in, 240 | ) = read_files_and_parse_results() 241 | 242 | if not output_to_console_in: 243 | comment_body_in = prepare_comment_body( 244 | cppcheck_comment_in, 245 | clang_tidy_comment_in, 246 | cppcheck_issues_found_in, 247 | clang_tidy_issues_found_in, 248 | ) 249 | utils.create_or_edit_comment(comment_body_in) 250 | 251 | sys.exit(cppcheck_issues_found_in + clang_tidy_issues_found_in) 252 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Linter](https://github.com/JacobDomagala/StaticAnalysis/actions/workflows/linter.yml/badge.svg?branch=master)](https://github.com/JacobDomagala/StaticAnalysis/actions/workflows/linter.yml?query=branch%3Amaster) 2 | [![Test Action](https://github.com/JacobDomagala/StaticAnalysis/actions/workflows/test_action.yml/badge.svg?branch=master)](https://github.com/JacobDomagala/StaticAnalysis/actions/workflows/test_action.yml?query=branch%3Amaster) 3 | [![Unit Tests](https://github.com/JacobDomagala/StaticAnalysis/actions/workflows/unit_tests.yml/badge.svg?branch=master)](https://github.com/JacobDomagala/StaticAnalysis/actions/workflows/unit_tests.yml?query=branch%3Amaster) 4 | 5 | # Static Analysis 6 | 7 | This GitHub Action is designed for **C++ and Python projects** and performs static analysis using: 8 | * [cppcheck](http://cppcheck.sourceforge.net/) and [clang-tidy](https://clang.llvm.org/extra/clang-tidy/) for C++ 9 | * [pylint](https://pylint.readthedocs.io/en/latest/index.html) for Python 10 | 11 | It can be triggered by push and pull requests. 12 | 13 | For further information and guidance on setup and various inputs, please see the sections dedicated to each language ([**C++**](https://github.com/JacobDomagala/StaticAnalysis?tab=readme-ov-file#c) and [**Python**](https://github.com/JacobDomagala/StaticAnalysis?tab=readme-ov-file#python)). 14 | 15 | --- 16 | 17 | ## Pull Request Comment 18 | 19 | The created comment will include code snippets and issue descriptions. When this action runs for the first time on a pull request, it creates a comment with the initial analysis results. Subsequent runs will update this same comment with the latest status. 20 | 21 | Note that the number of detected issues might cause the comment's body to exceed GitHub's character limit (currently 65,536 characters) per PR comment. If this occurs, the comment will contain issues up to the limit and indicate that the character limit was reached. 22 | 23 | --- 24 | 25 | ## Output Example (C++) 26 | ![output](https://github.com/JacobDomagala/StaticAnalysis/wiki/output_example.png) 27 | 28 | --- 29 | 30 | ## Non-Pull Request Events 31 | 32 | For non-pull request events, the output will be printed directly to the GitHub Actions console. This behavior can also be forced using the `force_console_print` input. 33 | 34 | --- 35 | 36 | ## Output Example (C++) 37 | ![output](https://github.com/JacobDomagala/StaticAnalysis/wiki/console_output_example.png) 38 | 39 | --- 40 | 41 | # C++ 42 | 43 | While it's recommended that your project is CMake-based, it's not strictly required (see the [**Inputs**](https://github.com/JacobDomagala/StaticAnalysis#inputs) section below). We also recommend using a `.clang-tidy` file in your repository's root directory. If your project requires additional packages, you can install them using the `apt_pckgs` and/or `init_script` input variables (see the [**Workflow example**](https://github.com/JacobDomagala/StaticAnalysis#workflow-example) or [**Inputs**](https://github.com/JacobDomagala/StaticAnalysis#inputs) sections below). If your repository allows contributions from forks, you must use this Action with the `pull_request_target` trigger event, as the GitHub API won't allow PR comments otherwise. 44 | 45 | By default, **cppcheck** runs with the following flags: 46 | ```--enable=all --suppress=missingIncludeSystem --inline-suppr --inconclusive``` 47 | You can use the `cppcheck_args` input to set your own flags. 48 | 49 | **Clang-Tidy** looks for a `.clang-tidy` file in your repository, but you can also specify checks using the `clang_tidy_args` input. 50 | 51 | --- 52 | 53 | ## Using a Custom `compile_commands.json` File 54 | 55 | You can use a pre-generated `compile_commands.json` file with the `compile_commands` input. This is incredibly useful when you need **more control over your compilation database**, whether you're working with a complex build system, have a specific build configuration, or simply want to reuse a file generated elsewhere. 56 | 57 | When using a custom `compile_commands.json` with this GitHub Action, you'll encounter a common technical challenge: a **mismatch between the directory where the file was originally generated and the path used by this GitHub Action** (specifically, inside its Docker container). This means the source file paths listed in your `compile_commands.json` might not be valid from the container's perspective. 58 | 59 | To resolve this, you have two main options: 60 | 61 | * **Manually replace the prefixes** in your `compile_commands.json` file (for example, change `/original/path/to/repo` to `/github/workspace`). This method gives you complete control over the path adjustments. 62 | * **Let the action try to replace the prefixes for you.** For simpler directory structures, you can enable this convenient feature using the `compile_commands_replace_prefix` input. 63 | 64 | --- 65 | 66 | Beyond path adjustments, another important consideration when using a custom `compile_commands.json` file is **dependency resolution** for your static analysis tools. `clang-tidy` performs deep semantic analysis, which means it requires all necessary include files and headers to be found and accessible during its run. If these dependencies are missing or incorrectly referenced, `clang-tidy` may stop analyzing the affected file, leading to incomplete results. In contrast, `cppcheck` is generally more resilient to missing include paths, as it primarily focuses on lexical and syntactic analysis rather than full semantic parsing. 67 | 68 | --- 69 | 70 | ## Workflow Example 71 | 72 | ```yml 73 | name: Static Analysis 74 | 75 | on: 76 | # Runs on 'push' events to specified branches. Output will be printed to the console. 77 | push: 78 | branches: 79 | - develop 80 | - master 81 | - main 82 | 83 | # Uses 'pull_request_target' to allow analysis of forked repositories. 84 | # Output will be shown in PR comments (unless 'force_console_print' is used). 85 | pull_request_target: 86 | branches: 87 | - "*" 88 | 89 | jobs: 90 | static_analysis: 91 | runs-on: ubuntu-latest 92 | 93 | steps: 94 | - uses: actions/checkout@v2 95 | 96 | - name: setup init_script 97 | shell: bash 98 | run: | 99 | echo "#!/bin/bash 100 | 101 | # Input args provided by StaticAnalysis action 102 | root_dir=\${1} 103 | build_dir=\${2} 104 | echo \"Hello from the init script! First arg=\${root_dir} second arg=\${build_dir}\" 105 | 106 | add-apt-repository ppa:oibaf/graphics-drivers 107 | apt update && apt upgrade -y 108 | apt install -y libvulkan1 mesa-vulkan-drivers vulkan-utils" > init_script.sh 109 | 110 | - name: Run Static Analysis 111 | uses: JacobDomagala/StaticAnalysis@master 112 | with: 113 | language: c++ 114 | 115 | # Exclude any issues found in ${Project_root_dir}/lib 116 | exclude_dir: lib 117 | 118 | use_cmake: true 119 | 120 | # Additional apt packages required before running CMake 121 | apt_pckgs: software-properties-common libglu1-mesa-dev freeglut3-dev mesa-common-dev 122 | 123 | # Optional shell script that runs AFTER 'apt_pckgs' and before CMake 124 | init_script: init_script.sh 125 | 126 | # Optional Clang-Tidy arguments 127 | clang_tidy_args: -checks='*,fuchsia-*,google-*,zircon-*,abseil-*,modernize-use-trailing-return-type' 128 | 129 | # Optional Cppcheck arguments 130 | cppcheck_args: --enable=all --suppress=missingIncludeSystem 131 | ``` 132 | 133 | ## Inputs 134 | 135 | | Name | Description | Default value | 136 | |-------------------------|------------------------------------|---------------| 137 | | `github_token` | Github token used for Github API requests |`${{github.token}}`| 138 | | `pr_num` | Pull request number for which the comment will be created |`${{github.event.pull_request.number}}`| 139 | | `comment_title` | Title for comment with the raport. This should be an unique name | `Static analysis result` | 140 | | `exclude_dir` | Directory which should be excluded from the raport | `` | 141 | | `apt_pckgs` | Additional (space separated) packages that need to be installed in order for project to compile | `` | 142 | | `init_script` | Optional shell script that will be run before configuring project (i.e. running CMake command). This should be used, when the project requires some environmental set-up beforehand. Script will be run with 2 arguments: `root_dir`(root directory of user's code) and `build_dir`(build directory created for running SA). Note. `apt_pckgs` will run before this script, just in case you need some packages installed. Also this script will be run in the root of the project (`root_dir`) | `` | 143 | | `cppcheck_args` | Cppcheck (space separated) arguments that will be used |`--enable=all --suppress=missingIncludeSystem --inline-suppr --inconclusive`| 144 | | `clang_tidy_args` | clang-tidy arguments that will be used (example: `-checks='*,fuchsia-*,google-*,zircon-*'` |``| 145 | | `report_pr_changes_only`| Only post the issues found within the changes introduced in this Pull Request. This means that only the issues found within the changed lines will po posted. Any other issues caused by these changes in the repository, won't be reported, so in general you should run static analysis on entire code base |`false`| 146 | | `use_cmake` | Determines wether CMake should be used to generate compile_commands.json file | `true` | 147 | | `cmake_args` | Additional CMake arguments |``| 148 | | `force_console_print` | Output the action result to console, instead of creating the comment |`false`| 149 | | `compile_commands` | User generated compile_commands.json |``| 150 | | `compile_commands_replace_prefix` | Whether we should replace the prefix of files inside user generated compile_commands.json file |`false`| 151 | 152 | **NOTE: `apt_pckgs` will run before `init_script`, just in case you need some packages installed before running the script** 153 | 154 |

155 | 156 | # Python 157 | 158 | 159 | ## Workflow example 160 | 161 | ```yml 162 | name: Static analysis 163 | 164 | on: 165 | push: 166 | branches: 167 | - master 168 | pull_request: 169 | 170 | jobs: 171 | check: 172 | name: Run Linter 173 | runs-on: ubuntu-20.04 174 | steps: 175 | - uses: actions/checkout@v3 176 | 177 | - name: CodeQuality 178 | uses: JacobDomagala/StaticAnalysis@master 179 | with: 180 | language: "Python" 181 | pylint_args: "--rcfile=.pylintrc --recursive=true" 182 | python_dirs: "src test" 183 | ``` 184 | 185 | ## Inputs 186 | 187 | | Name | Description | Default value | 188 | |-------------------------|------------------------------------|---------------| 189 | | `github_token` | Github token used for Github API requests |`${{github.token}}`| 190 | | `pr_num` | Pull request number for which the comment will be created |`${{github.event.pull_request.number}}`| 191 | | `comment_title` | Title for comment with the raport. This should be an unique name | `Static analysis result` | 192 | | `exclude_dir` | Directory which should be excluded from the raport | `` | 193 | | `apt_pckgs` | Additional (space separated) packages that need to be installed in order for project to compile | `` | 194 | | `init_script` | Optional shell script that will be run before configuring project (i.e. running CMake command). This should be used, when the project requires some environmental set-up beforehand. Script will be run with 2 arguments: `root_dir`(root directory of user's code) and `build_dir`(build directory created for running SA). Note. `apt_pckgs` will run before this script, just in case you need some packages installed. Also this script will be run in the root of the project (`root_dir`) | `` | 195 | | `pylint_args` | Pylint (space separated) arguments that will be used |``| 196 | | `python_dirs` | Directories that contain python files to be checked | `` | 197 | | `report_pr_changes_only`| Only post the issues found within the changes introduced in this Pull Request. This means that only the issues found within the changed lines will po posted. Any other issues caused by these changes in the repository, won't be reported, so in general you should run static analysis on entire code base |`false`| 198 | | `force_console_print` | Output the action result to console, instead of creating the comment |`false`| 199 | 200 | **NOTE: `apt_pckgs` will run before `init_script`, just in case you need some packages installed before running the script** 201 | -------------------------------------------------------------------------------- /src/sa_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import subprocess 4 | import argparse 5 | 6 | from github import Github 7 | 8 | # Input variables from Github action 9 | GITHUB_TOKEN = os.getenv("INPUT_GITHUB_TOKEN") 10 | PR_NUM = os.getenv("INPUT_PR_NUM", "-1") 11 | WORK_DIR = f'{os.getenv("GITHUB_WORKSPACE")}' 12 | REPO_NAME = os.getenv("INPUT_REPO") 13 | TARGET_REPO_NAME = os.getenv("INPUT_REPO", "") 14 | SHA = os.getenv("GITHUB_SHA") 15 | COMMENT_TITLE = os.getenv("INPUT_COMMENT_TITLE", "Static Analysis") 16 | ONLY_PR_CHANGES = os.getenv("INPUT_REPORT_PR_CHANGES_ONLY", "False").lower() 17 | VERBOSE = os.getenv("INPUT_VERBOSE", "False").lower() == "true" 18 | LANG = os.getenv("INPUT_LANGUAGE", "c++").lower() 19 | FILES_WITH_ISSUES = {} 20 | 21 | # Max characters per comment - 65536 22 | # Make some room for HTML tags and error message 23 | MAX_CHAR_COUNT_REACHED = ( 24 | "!Maximum character count per GitHub comment has been reached!" 25 | " Not all warnings/errors has been parsed!" 26 | ) 27 | COMMENT_MAX_SIZE = 65000 28 | CURRENT_COMMENT_LENGTH = 0 29 | 30 | 31 | def debug_print(message): 32 | if VERBOSE: 33 | lines = message.split("\n") 34 | for line in lines: 35 | print(f"\033[96m {line}") 36 | 37 | 38 | def parse_diff_output(changed_files): 39 | """ 40 | Parses the diff output to extract filenames and corresponding line numbers of changes. 41 | 42 | The function identifies changed lines in files and excludes certain directories 43 | based on the file extension. It then extracts the line numbers of the changes 44 | (additions) and associates them with their respective files. 45 | 46 | Parameters: 47 | - changed_files (str): The diff output string. 48 | 49 | Returns: 50 | - dict: A dictionary where keys are filenames and values are lists of line numbers 51 | that have changes. 52 | 53 | Usage Example: 54 | ```python 55 | diff_output = "" 56 | changed_file_data = parse_diff_output(diff_output) 57 | for file, lines in changed_file_data.items(): 58 | print(f"File: {file}, Changed Lines: {lines}") 59 | ``` 60 | 61 | Note: 62 | - The function only considers additions in the diff, lines starting with "+". 63 | - Filenames in the return dictionary include their paths relative to the repo root. 64 | """ 65 | 66 | # Regex to capture filename and the line numbers of the changes 67 | file_pattern = re.compile(r"^\+\+\+ b/(.*?)$", re.MULTILINE) 68 | line_pattern = re.compile(r"^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@", re.MULTILINE) 69 | 70 | if LANG == "c++": 71 | supported_extensions = (".h", ".hpp", ".hcc", ".c", ".cc", ".cpp", ".cxx") 72 | elif LANG == "python": 73 | supported_extensions = ".py" 74 | else: 75 | raise RuntimeError(f"Unknown language {LANG}") 76 | 77 | files = {} 78 | for match in file_pattern.finditer(changed_files): 79 | file_name = match.group(1) 80 | 81 | # Filtering for language specific files and excluding certain directories 82 | if file_name.endswith(supported_extensions): 83 | # Find the lines that changed for this file 84 | lines_start_at = match.end() 85 | next_file_match = file_pattern.search(changed_files, pos=match.span(0)[1]) 86 | 87 | # Slice out the part of the diff that pertains to this file 88 | file_diff = changed_files[ 89 | lines_start_at : next_file_match.span(0)[0] if next_file_match else None 90 | ] 91 | 92 | # Extract line numbers of the changes 93 | changed_lines = [] 94 | for line_match in line_pattern.finditer(file_diff): 95 | start_line = int(line_match.group(1)) 96 | 97 | # The start and end positions for this chunk of diff 98 | chunk_start = line_match.end() 99 | next_chunk = line_pattern.search(file_diff, pos=line_match.span(0)[1]) 100 | chunk_diff = file_diff[ 101 | chunk_start : next_chunk.span(0)[0] if next_chunk else None 102 | ] 103 | 104 | lines = chunk_diff.splitlines() 105 | line_counter = 0 106 | for line in lines: 107 | if line.startswith("+"): 108 | changed_lines.append(start_line + line_counter) 109 | line_counter += 1 110 | 111 | if changed_lines: 112 | files[file_name] = changed_lines 113 | 114 | return files 115 | 116 | 117 | def get_changed_files(common_ancestor, feature_branch): 118 | """Get a dictionary of files and their changed lines between the common ancestor and feature_branch.""" 119 | cmd = ["git", "diff", "-U0", "--ignore-all-space", common_ancestor, feature_branch] 120 | result = subprocess.check_output(cmd).decode("utf-8") 121 | 122 | return parse_diff_output(result) 123 | 124 | 125 | def is_part_of_pr_changes(file_path, issue_file_line, files_changed_in_pr): 126 | """ 127 | Check if a given file and line number corresponds to a change in the files included in a pull request. 128 | 129 | Args: 130 | file_path (str): The path to the file in question. 131 | issue_file_line (int): The line number within the file to check. 132 | files_changed_in_pr (dict): A dictionary of files changed in a pull request, where the keys are file paths 133 | and the values are tuples of the form (status, lines_changed_for_file), where 134 | status is a string indicating the change status ("added", "modified", or "removed"), 135 | and lines_changed_for_file is a list of tuples, where each tuple represents a range 136 | of lines changed in the file (e.g. [(10, 15), (20, 25)] indicates that lines 10-15 137 | and 20-25 were changed in the file). 138 | 139 | Returns: 140 | bool: True if the file and line number correspond to a change in the pull request, False otherwise. 141 | """ 142 | 143 | if ONLY_PR_CHANGES == "false": 144 | return True 145 | 146 | debug_print( 147 | f"Looking for issue found in file={file_path} at line={issue_file_line}..." 148 | ) 149 | for file, lines_changed_for_file in files_changed_in_pr.items(): 150 | debug_print( 151 | f'Changed file by this PR "{file}" with changed lines "{lines_changed_for_file}"' 152 | ) 153 | if file == file_path: 154 | for line in lines_changed_for_file: 155 | if line == issue_file_line: 156 | debug_print(f"Issue line {issue_file_line} is a part of PR!") 157 | return True 158 | 159 | return False 160 | 161 | 162 | def get_lines_changed_from_patch(patch): 163 | """ 164 | Parses a unified diff patch and returns the range of lines that were changed. 165 | 166 | Parameters: 167 | patch (str): The unified diff patch to parse. 168 | 169 | Returns: 170 | list: A list of tuples containing the beginning and ending line numbers for each 171 | section of the file that was changed by the patch. 172 | """ 173 | 174 | lines_changed = [] 175 | lines = patch.split("\n") 176 | 177 | for line in lines: 178 | # Example line @@ -43,6 +48,8 @@ 179 | # ------------ ^ 180 | if line.startswith("@@"): 181 | # Example line @@ -43,6 +48,8 @@ 182 | # ----------------------^ 183 | idx_beg = line.index("+") 184 | 185 | # Example line @@ -43,6 +48,8 @@ 186 | # ^--^ 187 | try: 188 | idx_end = line[idx_beg:].index(",") 189 | line_begin = int(line[idx_beg + 1 : idx_beg + idx_end]) 190 | 191 | idx_beg = idx_beg + idx_end 192 | idx_end = line[idx_beg + 1 :].index("@@") 193 | 194 | num_lines = int(line[idx_beg + 1 : idx_beg + idx_end]) 195 | except ValueError: 196 | # Special case for single line files 197 | # such as @@ -0,0 +1 @@ 198 | idx_end = line[idx_beg:].index(" ") 199 | line_begin = int(line[idx_beg + 1 : idx_beg + idx_end]) 200 | num_lines = 0 201 | 202 | lines_changed.append((line_begin, line_begin + num_lines)) 203 | 204 | return lines_changed 205 | 206 | 207 | def check_for_char_limit(incoming_line): 208 | return (CURRENT_COMMENT_LENGTH + len(incoming_line)) <= COMMENT_MAX_SIZE 209 | 210 | 211 | def is_excluded_dir(line): 212 | """ 213 | Determines if a given line is from a directory that should be excluded from processing. 214 | 215 | Args: 216 | line (str): The line to check. 217 | 218 | Returns: 219 | bool: True if the line is from a directory that should be excluded, False otherwise. 220 | """ 221 | 222 | # In future this could be multiple different directories 223 | exclude_dir = os.getenv("INPUT_EXCLUDE_DIR") 224 | if not exclude_dir: 225 | return False 226 | 227 | excluded_dir = f"{WORK_DIR}/{exclude_dir}" 228 | debug_print( 229 | f"{line} and {excluded_dir} with result {line.startswith(excluded_dir)}" 230 | ) 231 | 232 | return line.startswith(excluded_dir) 233 | 234 | 235 | def get_file_line_end(file_in, file_line_start_in): 236 | """ 237 | Returns the ending line number for a given file, starting from a specified line number. 238 | 239 | Args: 240 | file_in (str): The name of the file to read. 241 | file_line_start_in (int): The starting line number. 242 | 243 | Returns: 244 | int: The ending line number, which is either `file_line_start + 5` 245 | or the total number of lines in the file, whichever is smaller. 246 | """ 247 | 248 | with open(f"{WORK_DIR}/{file_in}", encoding="utf-8") as file: 249 | num_lines = sum(1 for line in file) 250 | 251 | return min(file_line_start_in + 5, num_lines) 252 | 253 | 254 | def generate_description( 255 | is_note, was_note, file_line_start, issue_description, output_string 256 | ): 257 | """Generate description for an issue 258 | 259 | is_note -- is the current issue a Note: or not 260 | was_note -- was the previous issue a Note: or not 261 | file_line_start -- line to which the issue corresponds 262 | issue_description -- the description from cppcheck/clang-tidy 263 | output_string -- entire description (can be altered if the current/previous issue is/was Note:) 264 | """ 265 | global CURRENT_COMMENT_LENGTH 266 | 267 | if not is_note: 268 | description = ( 269 | f"\n```diff\n!Line: {file_line_start} - {issue_description}\n``` \n" 270 | ) 271 | else: 272 | if not was_note: 273 | # Previous line consists of ```diff ```, so remove the closing ``` 274 | # and append the with Note: ...` 275 | 276 | # 12 here means "``` \n
\n"` 277 | num_chars_to_remove = 12 278 | else: 279 | # Previous line is Note: so it ends with "``` \n" 280 | num_chars_to_remove = 6 281 | 282 | output_string = output_string[:-num_chars_to_remove] 283 | CURRENT_COMMENT_LENGTH -= num_chars_to_remove 284 | description = f"\n!Line: {file_line_start} - {issue_description}``` \n" 285 | 286 | return output_string, description 287 | 288 | 289 | def create_or_edit_comment(comment_body): 290 | """ 291 | Creates or edits a comment on a pull request with the given comment body. 292 | 293 | Args: 294 | - comment_body: A string containing the full comment body to be created or edited. 295 | 296 | Returns: 297 | - None. 298 | """ 299 | 300 | github = Github(GITHUB_TOKEN) 301 | repo = github.get_repo(TARGET_REPO_NAME) 302 | pull_request = repo.get_pull(int(PR_NUM)) 303 | 304 | comments = pull_request.get_issue_comments() 305 | found_id = -1 306 | comment_to_edit = None 307 | for comment in comments: 308 | if (comment.user.login == "github-actions[bot]") and ( 309 | COMMENT_TITLE in comment.body 310 | ): 311 | found_id = comment.id 312 | comment_to_edit = comment 313 | break 314 | 315 | if found_id != -1 and comment_to_edit: 316 | comment_to_edit.edit(body=comment_body) 317 | else: 318 | pull_request.create_issue_comment(body=comment_body) 319 | 320 | 321 | def generate_output( 322 | is_note, prefix_and_file_path, file_line_start, file_line_end, description 323 | ): 324 | """ 325 | Generate a formatted output string based on the details of a code issue. 326 | 327 | This function takes information about a code issue and constructs a string that 328 | includes details such as the location of the issue in the codebase, the affected code 329 | lines, and a description of the issue. If the issue is a note, only the description 330 | is returned. If the issue occurs in a different repository than the target, it 331 | also fetches the lines where the issue was detected. 332 | 333 | Parameters: 334 | - is_note (bool): Whether the issue is just a note or a code issue. 335 | - file_path (str): Path to the file where the issue was detected. 336 | - file_line_start (int): The line number in the file where the issue starts. 337 | - file_line_end (int): The line number in the file where the issue ends. 338 | - description (str): Description of the issue. 339 | 340 | Returns: 341 | - str: Formatted string with details of the issue. 342 | 343 | Note: 344 | - This function relies on several global variables like TARGET_REPO_NAME, REPO_NAME, 345 | FILES_WITH_ISSUES, and SHA which should be set before calling this function. 346 | """ 347 | 348 | # We assume that the file is not empty! 349 | # In case the tool will reffer to line 0 (meaning entire file) 350 | file_line_start = max(1, file_line_start) 351 | file_line_end = max(1, file_line_end) 352 | 353 | if not is_note: 354 | prefix, file_path = prefix_and_file_path 355 | if TARGET_REPO_NAME != REPO_NAME: 356 | if file_path not in FILES_WITH_ISSUES: 357 | try: 358 | with open(f"{prefix}/{file_path}", encoding="utf-8") as file: 359 | lines = file.readlines() 360 | FILES_WITH_ISSUES[file_path] = lines 361 | except FileNotFoundError: 362 | print(f"Error: The file '{prefix}/{file_path}' was not found.") 363 | 364 | modified_content = FILES_WITH_ISSUES[file_path][ 365 | file_line_start - 1 : file_line_end - 1 366 | ] 367 | 368 | debug_print( 369 | f"generate_output for following file: \nfile_path={file_path} \nmodified_content={modified_content}\n" 370 | ) 371 | 372 | modified_content[0] = modified_content[0][:-1] + " <---- HERE\n" 373 | file_content = "".join(modified_content) 374 | 375 | file_url = f"https://github.com/{REPO_NAME}/blob/{SHA}/{file_path}#L{file_line_start}" 376 | new_line = ( 377 | "\n\n------" 378 | f"\n\n Issue found in file [{REPO_NAME}/{file_path}]({file_url})\n" 379 | f"```{LANG}\n" 380 | f"{file_content}" 381 | f"\n``` \n" 382 | f"{description}
\n" 383 | ) 384 | 385 | else: 386 | new_line = ( 387 | f"\n\nhttps://github.com/{REPO_NAME}/blob/{SHA}/{file_path}" 388 | f"#L{file_line_start}-L{file_line_end} {description}
\n" 389 | ) 390 | else: 391 | new_line = description 392 | 393 | return new_line 394 | 395 | 396 | def extract_info(line, prefix): 397 | """ 398 | Extracts information from a given line containing file path, line number, and issue description. 399 | 400 | Args: 401 | - line (str): The input string containing file path, line number, and issue description. 402 | - prefix (str): The prefix to remove from the start of the file path in the line. 403 | - was_note (bool): Indicates if the previous issue was a note. 404 | - output_string (str): The string containing previous output information. 405 | 406 | Returns: 407 | - tuple: A tuple containing: 408 | - file_path (str): The path to the file. 409 | - is_note (bool): A flag indicating if the issue is a note. 410 | - description (str): Description of the issue. 411 | - file_line_start (int): The starting line number of the issue. 412 | - file_line_end (int): The ending line number of the issue. 413 | """ 414 | 415 | # Clean up line 416 | line = line.replace(prefix, "").lstrip("/") 417 | 418 | # Get the line starting position /path/to/file:line and trim it 419 | file_path_end_idx = line.index(":") 420 | file_path = line[:file_path_end_idx] 421 | 422 | # Extract the lines information 423 | line = line[file_path_end_idx + 1 :] 424 | 425 | # Get line (start, end) 426 | file_line_start = int(line[: line.index(":")]) 427 | file_line_end = get_file_line_end(file_path, file_line_start) 428 | 429 | # Get content of the issue 430 | issue_description = line[line.index(" ") + 1 :] 431 | is_note = issue_description.startswith("note:") 432 | 433 | return (file_path, is_note, file_line_start, file_line_end, issue_description) 434 | 435 | 436 | def create_common_input_vars_parser(): 437 | parser = argparse.ArgumentParser() 438 | parser.add_argument( 439 | "-o", 440 | "--output_to_console", 441 | help="Whether to output the result to console", 442 | required=True, 443 | ) 444 | parser.add_argument( 445 | "-fk", 446 | "--fork_repository", 447 | help="Whether the actual code is in 'pr_tree' directory", 448 | required=True, 449 | ) 450 | parser.add_argument( 451 | "--common", 452 | default="", 453 | help="common ancestor between two branches (default: %(default)s)", 454 | ) 455 | parser.add_argument("--head", default="", help="Head branch (default: %(default)s)") 456 | 457 | return parser 458 | --------------------------------------------------------------------------------