├── .github ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── ci.yml │ ├── codeql-analysis.yml │ └── stale.yml ├── .gitignore ├── .isort.cfg ├── Dockerfile ├── LICENSE ├── README.md ├── conftest.py ├── docker-compose.yml ├── hooks ├── command └── post-command ├── plugin.yml ├── requirements-dev.txt ├── requirements.txt ├── scripts ├── __init__.py └── generate_pipeline.py └── tests ├── CONSTANTS.py ├── Dockerfile └── unit ├── __init__.py ├── test_get_diff.py ├── test_git_diff_conditional.py ├── test_handler.py └── test_log_and_exit.py /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Please raise issues, and then reference the issue within a PR. This will help track bugs throughout their life-cycle if any are identified. 4 | 5 | ## Pull Requests 6 | 7 | ### Prefix commits 8 | 9 | Please prefix commits with the following tags depending on what they are targeting: 10 | 11 | - `[core]` - Updates to the core python script used to generate the `dynamic_pipeline` 12 | - `[tests]` - Updates to tests 13 | - `[docs]` - Documentation updates 14 | - `[format]` - Commits related to formatting of the code 15 | - `[ci]` - commits related to github actions 16 | 17 | Use long-descriptions to go into more depth if it is required 18 | 19 | ### Running the tests (locally) 20 | 21 | Ensure that all your tests are located within the `tests` directory and then run: 22 | 23 | ```bash 24 | docker-compose up --build 25 | ``` 26 | 27 | The python code should be formatted with [black](https://pypi.org/project/black/) and [isort](https://pypi.org/project/isort/) 28 | 29 | If `black` or `isort` fail, then you can fix them by running `black .` or `isort --recursive .`. Once you have formatted the code, commit formatting as a seperate commit (this reduces noise in a PR if formatting results in large changes) -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Background 2 | 3 | ### Description 4 | 5 | *Detailed description of bug/feature* 6 | 7 | ### Steps to Reproduce 8 | *Only relevant for bugs* 9 | 10 | * *Run command ...* 11 | * ... 12 | * ... 13 | 14 | ## Desired Change 15 | 16 | *High level overview of the desired change or outcome.* -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | to: 2 | cc: @zegocover/git-diff-conditional-buildkite-plugin-maintainers 3 | related to: 4 | resolves: 5 | 6 | ## Background 7 | 8 | Reason for the change 9 | 10 | ## Changes 11 | 12 | * Summary of changes 13 | * ... 14 | 15 | ## Testing 16 | 17 | Steps for how this change was tested and verified -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Run CI Testing & Linting 2 | on: 3 | [push] 4 | jobs: 5 | Linting: 6 | runs-on: ubuntu-18.04 7 | steps: 8 | - uses: actions/checkout@v2 9 | - name: Run Buildkite Plugin Linter 10 | run: docker-compose run --rm buildkite_plugin_linter 11 | - name: Run python black (code formatting) 12 | run: docker-compose run --rm python_black 13 | - name: Run python isort (import order) 14 | run: docker-compose run --rm python_isort 15 | Tests: 16 | runs-on: ubuntu-18.04 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Run Python Pytest 20 | run: docker-compose run --rm python_pytest -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | name: "CodeQL" 7 | 8 | on: 9 | push: 10 | branches: [master] 11 | pull_request: 12 | # The branches below must be a subset of the branches above 13 | branches: [master] 14 | schedule: 15 | - cron: '0 4 * * 5' 16 | 17 | jobs: 18 | analyze: 19 | name: Analyze 20 | runs-on: ubuntu-latest 21 | 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | # Override automatic language detection by changing the below list 26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] 27 | language: ['python'] 28 | # Learn more... 29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v2 34 | with: 35 | # We must fetch at least the immediate parents so that if this is 36 | # a pull request then we can checkout the head. 37 | fetch-depth: 2 38 | 39 | # If this run was triggered by a pull request event, then checkout 40 | # the head of the pull request instead of the merge commit. 41 | - run: git checkout HEAD^2 42 | if: ${{ github.event_name == 'pull_request' }} 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v1 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v1 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v1 72 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: Mark stale issues and pull requests 2 | 3 | on: 4 | schedule: 5 | - cron: "0 0 * * SUN" 6 | jobs: 7 | stale: 8 | runs-on: ubuntu-18.04 9 | steps: 10 | - uses: actions/stale@v1 11 | with: 12 | repo-token: ${{ secrets.GITHUB_TOKEN }} 13 | stale-issue-message: 'Stale issue message' 14 | stale-pr-message: 'Stale pull request message' 15 | stale-issue-label: 'no-issue-activity' 16 | stale-pr-label: 'no-pr-activity' 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | cover/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | .pybuilder/ 77 | target/ 78 | 79 | # Jupyter Notebook 80 | .ipynb_checkpoints 81 | 82 | # IPython 83 | profile_default/ 84 | ipython_config.py 85 | 86 | # pyenv 87 | # For a library or package, you might want to ignore these files since the code is 88 | # intended to run in multiple environments; otherwise, check them in: 89 | # .python-version 90 | 91 | # pipenv 92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 95 | # install all needed dependencies. 96 | #Pipfile.lock 97 | 98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 99 | __pypackages__/ 100 | 101 | # Celery stuff 102 | celerybeat-schedule 103 | celerybeat.pid 104 | 105 | # SageMath parsed files 106 | *.sage.py 107 | 108 | # Environments 109 | .env 110 | .venv 111 | env/ 112 | venv/ 113 | ENV/ 114 | env.bak/ 115 | venv.bak/ 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mkdocs documentation 125 | /site 126 | 127 | # mypy 128 | .mypy_cache/ 129 | .dmypy.json 130 | dmypy.json 131 | 132 | # Pyre type checker 133 | .pyre/ 134 | 135 | # pytype static type analyzer 136 | .pytype/ 137 | 138 | # Cython debug symbols 139 | cython_debug/ 140 | 141 | # static files generated from Django application using `collectstatic` 142 | media 143 | static 144 | 145 | # user setup 146 | .vscode/ 147 | .tool-versions -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | profile=black 3 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7-slim 2 | 3 | RUN apt-get update && \ 4 | apt-get upgrade -y 5 | 6 | WORKDIR "/buildkite" 7 | 8 | COPY requirements.txt ./ 9 | RUN pip install --no-cache-dir -r requirements.txt 10 | 11 | COPY scripts/generate_pipeline.py /usr/local/bin/ 12 | 13 | CMD ["generate_pipeline.py"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Zego 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # git-diff-conditional-buildkite-plugin 2 | 3 | ![Github Actions: Testing & Linting](https://github.com/Zegocover/git-diff-conditional-buildkite-plugin/workflows/Run%20CI%20Testing%20&%20Linting/badge.svg) 4 | 5 | This plugin can be used to create a dynamic pipeline based on your `git diff`. This requires TWO pipeline files in total and uses `docker` to run the plugin: 6 | 7 | - `initial_pipeline` - The one that tells buildkite to load the plugin 8 | - `dynamic_pipeline` - The pipeline which you want to run, but have steps skipped based on conditions held in the `initial` 9 | 10 | But what about [mono-repo-diff](https://github.com/chronotc/monorepo-diff-buildkite-plugin), it is designed to spin up multiple pipelines based on your `git diff`. The purpose of this plugin is to spin up a single pipeline based on your `git diff`. For another similar concept check out the gitlab plugin [onlychangesexceptchanges](https://docs.gitlab.com/ee/ci/yaml/#onlychangesexceptchanges) 11 | 12 | ## Getting Started 13 | 14 | Please see the below examples on how to use this plugin with buildkite. The [buildkite-agent](https://buildkite.com/docs/agent/v3) also requires access to `docker`. 15 | 16 | ### Example 17 | 18 | `initial_pipeline` 19 | ```yaml 20 | steps: 21 | - label: ":partyparrot: Creating the pipeline" 22 | plugins: 23 | - Zegocover/git-diff-conditional#v1.1.1: 24 | dynamic_pipeline: ".buildkite/dynamic_pipeline.yml" 25 | steps: 26 | - label: "build and deploy lambda" 27 | include: 28 | - "function_code/*" 29 | ``` 30 | 31 | `dynamic_pipeline` 32 | ```yaml 33 | steps: 34 | - label: "build and deploy lambda" 35 | commands: 36 | - make lambda 37 | agents: 38 | queue: awesome 39 | timeout_in_minutes: 10 40 | 41 | - wait 42 | 43 | - label: "terraform apply" 44 | commands: 45 | - terraform apply 46 | agents: 47 | queue: awesome 48 | timeout_in_minutes: 10 49 | ``` 50 | 51 | The above example `initial_pipeline` will skip the `build and deploy lambda` step unless there has been a change to the `function_code` directory. Everything else in the `dynamic_pipeline` file will be left intact and passed through to buildkite. It is possible to configure numerous `label` fields, using the configuration options below. 52 | 53 | 54 | ## Configuration 55 | 56 | | Option | Required | Type | Default | Description | 57 | | ---------------- | :------: | :-------: | :-----: | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | 58 | | dynamic_pipeline | Yes | `string` | | The name including the path to the pipeline that contains all the actual `steps` | 59 | | disable_plugin | No | `boolean` | `false` | This can be used to pass the entire `dynamic_pipeline` pipeline straight to buildkite without skipping a single step. | 60 | | diff | No | `string` | | Can be used to override the default commands (see below for a better explanation of the defaults) Pass a comma-seperated string of git diff commands if you want multiple custom git diff commands run | 61 | | log_level | No | `string` | `INFO` | The Level of logging to be used by the python script underneath. Pass `DEBUG` for verbose logging if errors occur | 62 | | steps | Yes | `array` | | Each Step should contain a `label` with the `include`/`exclude` settings relevant to the label it applies to within the `dynamic_pipeline` file | 63 | | label | Yes | `string` | | The `label` these conditions apply to within the `dynamic_pipeline` file. (These should be an EXACT match) | 64 | | include | No | `array` | | If any element is found within the `git diff` then this step will NOT be skipped | 65 | | exclude | No | `array` | | If any alement is found within the `git diff` then this step will be SKIPPED | 66 | 67 | Other useful things to note: 68 | - Both `include` and `exclude` make use of Unix shell-style wildcards (Look at `.gitignore` files for inspiration) 69 | - Every `label` defined within the `initial_pipeline` should contain `include`, `exclude` or both 70 | 71 | ### `diff` command 72 | 73 | The default `diff` commands are (run in the order shown): 74 | 75 | ```bash 76 | # Used to check if on a feature branch and check diff against master 77 | git diff --name-only origin/master...HEAD 78 | 79 | # Useful for checking master against master in a merge commit strategy environment 80 | git diff --name-only HEAD HEAD~1 81 | ``` 82 | 83 | Both of the above commands are run, in their order listed above to detect if there is any `diff`. If there isn't any `diff` then there will be no `dynamic_pipeline` uploaded. If you wish to disable the plugin temporarily then see the above [Configuration](#Configuration) 84 | 85 | Depending on your [merge strategy](https://help.github.com/en/github/administering-a-repository/about-merge-methods-on-github), you might need to use different `diff` commands. 86 | 87 | We are assuming that you are using a squash and merge strategy on the master branch 88 | 89 | ## Contributing 90 | 91 | Please read [CONTRIBUTING](https://github.com/Zegocover/git-diff-conditional-buildkite-plugin/blob/master/.github/CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us. 92 | 93 | ## Versioning 94 | 95 | We use [SemVer](http://semver.org/) for versioning. For the versions available, see the [tags on this repository](https://github.com/Zegocover/git-diff-conditional-buildkite-plugin/tags). 96 | 97 | ## Authors 98 | 99 | * **Jack** - *Initial work* - [jack1902](https://github.com/jack1902) 100 | * **Elliot** - *Initial work* - [wizardels](https://github.com/wizardels) 101 | 102 | See also the list of [contributors](https://github.com/Zegocover/git-diff-conditional-buildkite-plugin/contributors) who participated in this project. 103 | 104 | ## License 105 | 106 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details 107 | 108 | ## Acknowledgments 109 | 110 | * Initially looked at the [mono-repo-diff-buildkite-plugin](https://github.com/chronotc/monorepo-diff-buildkite-plugin) 111 | * Also looked at the gitlab plugin [onlychangesexceptchanges](https://docs.gitlab.com/ee/ci/yaml/#onlychangesexceptchanges) 112 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | """Place fixtures in this file for use across all test files""" 2 | import pytest 3 | 4 | 5 | @pytest.fixture(scope="function") 6 | def logger(caplog): 7 | caplog.set_level("DEBUG") 8 | return caplog 9 | 10 | 11 | @pytest.fixture 12 | def log_and_exit_mock(mocker): 13 | return mocker.patch("scripts.generate_pipeline.log_and_exit") 14 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: "3" 3 | services: 4 | buildkite_plugin_linter: 5 | image: buildkite/plugin-linter 6 | command: [ 7 | --id, Zegocover/git-diff-conditional 8 | ] 9 | volumes: 10 | - .:/plugin 11 | python_pytest: 12 | build: 13 | context: . 14 | dockerfile: tests/Dockerfile 15 | command: ["pytest"] 16 | volumes: 17 | - .:/buildkite 18 | python_black: 19 | build: 20 | context: . 21 | dockerfile: tests/Dockerfile 22 | command: ["black", ".", "--check", "--diff"] 23 | volumes: 24 | - .:/buildkite 25 | python_isort: 26 | build: 27 | context: . 28 | dockerfile: tests/Dockerfile 29 | command: ["isort", ".", "--check-only"] 30 | volumes: 31 | - .:/buildkite -------------------------------------------------------------------------------- /hooks/command: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | set +u 5 | # Check if the plugin is disabled 6 | if [[ -n "$BUILDKITE_PLUGIN_GIT_DIFF_CONDITIONAL_DISABLE_PLUGIN" ]]; then 7 | echo "Plugin disable flag detected, passing entire pipeline to buildkite" 8 | buildkite-agent pipeline upload "$BUILDKITE_PLUGIN_GIT_DIFF_CONDITIONAL_DYNAMIC_PIPELINE" 9 | exit 0 10 | fi 11 | set -u 12 | 13 | f_get_diff() { 14 | local default_diff_commands 15 | local diff_commands 16 | 17 | default_diff_commands="git diff --name-only origin/master...HEAD,git diff --name-only HEAD HEAD~1" 18 | 19 | IFS=',' 20 | read -r -a diff_commands <<< "${BUILDKITE_PLUGIN_GIT_DIFF_CONDITIONAL_DIFF:-$default_diff_commands}" 21 | 22 | for diff_command in "${diff_commands[@]}"; do 23 | echo >&2 "Checking for diff using: ($diff_command)" 24 | 25 | diff=$(eval "$diff_command") 26 | if [[ -n "$diff" ]]; then 27 | echo >&2 "Found diff using command ($diff_command)" 28 | break 29 | fi 30 | done 31 | 32 | echo "$diff" 33 | } 34 | 35 | basedir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" 36 | 37 | diff=$(f_get_diff) 38 | if [[ -z "$diff" ]]; then 39 | echo "No diff detected" 40 | exit 0 41 | else 42 | mkdir -p .git_diff_conditional 43 | echo "$diff" > .git_diff_conditional/git_diff 44 | fi 45 | 46 | docker build "$basedir" -t buildkite-pyyaml > /dev/null 47 | 48 | docker run --rm -v "$PWD:/buildkite" --env-file <(env | grep BUILDKITE) buildkite-pyyaml 49 | 50 | if [[ -s ".git_diff_conditional/pipeline_output" ]]; then 51 | echo "Uploading the pipeline to the buildkite-agent" 52 | buildkite-agent pipeline upload .git_diff_conditional/pipeline_output 53 | fi 54 | -------------------------------------------------------------------------------- /hooks/post-command: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | echo "Cleaning up plugin-cache" 5 | 6 | rm -rf .git_diff_conditional -------------------------------------------------------------------------------- /plugin.yml: -------------------------------------------------------------------------------- 1 | name: Git Diff Conditional 2 | description: Conditionally run steps based on git diff files 3 | author: https://github.com/zegocover 4 | requirements: 5 | - docker 6 | configuration: 7 | properties: 8 | dynamic_pipeline: 9 | type: string 10 | disable_plugin: 11 | type: boolean 12 | diff: 13 | type: string 14 | log_level: 15 | type: string 16 | steps: 17 | type: [ object, array ] 18 | minimum: 1 19 | properties: 20 | label: 21 | type: string 22 | include: 23 | type: [ string, array ] 24 | exclude: 25 | type: [ string, array ] 26 | required: 27 | - dynamic_pipeline 28 | - steps -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | # Install dev requirements 3 | pytest==5.4.1 4 | pytest-mock==2.0.0 5 | pytest-randomly==3.2.1 6 | pytest-cov==2.8.1 7 | pytest-sugar 8 | black 9 | isort -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyyaml==5.4 -------------------------------------------------------------------------------- /scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Zegocover/git-diff-conditional-buildkite-plugin/2905bf7097e421efa59411bb5db25fbae2912caa/scripts/__init__.py -------------------------------------------------------------------------------- /scripts/generate_pipeline.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import logging 4 | import os 5 | import re 6 | import sys 7 | from fnmatch import fnmatch 8 | 9 | import yaml 10 | from yaml.scanner import ScannerError 11 | 12 | # Setup logging 13 | logging.basicConfig(format="%(levelname)s %(message)s") 14 | LOG = logging.getLogger("cli") 15 | 16 | 17 | def log_and_exit(log_level: str, log_message: str, exit_code: int) -> None: 18 | """Handles the log_message and exit_code""" 19 | logger = getattr(LOG, log_level.lower()) 20 | logger(log_message) 21 | sys.exit(exit_code) 22 | 23 | 24 | class GitDiffConditional: 25 | """The class to generate the pipeline from environment variables""" 26 | 27 | def __init__(self, diff, plugin_prefix): 28 | self.diff = diff 29 | self.plugin_prefix = plugin_prefix 30 | 31 | def load_dynamic_pipeline(self, env_var_suffix: str) -> dict: 32 | """Load the pipeline from the given file_name 33 | 34 | Returns: 35 | dict: Contains the buildkite pipeline 36 | """ 37 | env_var = f"{self.plugin_prefix}_{env_var_suffix}" 38 | 39 | LOG.info("Checking env var: %s for file name", env_var) 40 | 41 | pipeline_file_name = os.environ[env_var] 42 | 43 | try: 44 | with open(pipeline_file_name, "r") as stream: 45 | pipeline = yaml.safe_load(stream) 46 | except FileNotFoundError as e: 47 | LOG.error(e) 48 | log_and_exit("error", f"File Name: ({pipeline_file_name}) Not Found", 1) 49 | except ScannerError: 50 | LOG.error("Invalid YAML in File: %s", pipeline_file_name) 51 | else: 52 | return pipeline 53 | 54 | def load_conditions_from_environment(self) -> dict: 55 | """Loads the defined conditions from the environment variables""" 56 | regex = re.compile(f"{self.plugin_prefix}_STEPS_[0-9]*_LABEL") 57 | step_labels = {k: v for k, v in os.environ.items() if re.search(regex, k)} 58 | 59 | conditions = {} 60 | 61 | for key, label in step_labels.items(): 62 | LOG.debug("Checking %s", key) 63 | 64 | step_conditions = {} 65 | step_number = key.replace(f"{self.plugin_prefix}_STEPS_", "").replace( 66 | "_LABEL", "" 67 | ) 68 | for option in ["INCLUDE", "EXCLUDE"]: 69 | LOG.debug("Checking %s", option) 70 | 71 | step_regex = re.compile( 72 | f"{self.plugin_prefix}_STEPS_{step_number}_{option}" 73 | ) 74 | 75 | patterns = [ 76 | v for k, v in os.environ.items() if re.search(step_regex, k) 77 | ] 78 | 79 | LOG.debug("Found patterns: (%s)", patterns) 80 | step_conditions[option] = patterns 81 | 82 | if step_conditions: 83 | conditions[label] = self.generate_skip( 84 | label, step_conditions["INCLUDE"], step_conditions["EXCLUDE"] 85 | ) 86 | 87 | return conditions 88 | 89 | def generate_skip(self, label: str, include: list, exclude: list) -> bool: 90 | skip = False 91 | if not include and not exclude: 92 | LOG.warning("label (%s) passed in but no skip settings configured", label) 93 | pass 94 | elif exclude: 95 | # Should skip the step (exclude is stronger than include) 96 | skip = self.pattern_match(exclude) 97 | elif include: 98 | # Should include the step 99 | skip = not self.pattern_match(include) 100 | 101 | return skip 102 | 103 | def pattern_match(self, patterns: list) -> bool: 104 | result = False 105 | 106 | for pattern in patterns: 107 | if any(fnmatch(_file, pattern) for _file in self.diff): 108 | result = True 109 | break 110 | 111 | return result 112 | 113 | def generate_pipeline_from_conditions( 114 | self, dynamic_pipeline: dict, conditions: dict 115 | ) -> dict: 116 | """Generate the pipeline based on logic held in the origial file 117 | 118 | Returns: 119 | dict: Contains the steps for the pipeline 120 | """ 121 | pipeline = {"steps": []} 122 | 123 | for step in dynamic_pipeline["steps"]: 124 | if isinstance(step, dict) and "wait" not in step.keys(): 125 | # Only check for actual steps, not waits 126 | step["skip"] = self.check_if_skip(conditions, step) 127 | 128 | # Always put the step back onto the pipeline even if it is skipped 129 | pipeline["steps"].append(step) 130 | 131 | return pipeline 132 | 133 | @staticmethod 134 | def check_if_skip(conditional_steps: dict, step: dict) -> bool: 135 | label = step["label"] if "label" in step else step["block"] 136 | 137 | if "skip" in step: 138 | # Skip setings already exist 139 | LOG.warning("label (%s) already has a skip key", label) 140 | return step["skip"] 141 | 142 | if label not in conditional_steps: 143 | LOG.warning("No Conditions set for label (%s)", label) 144 | return False 145 | 146 | return conditional_steps[label] 147 | 148 | 149 | def get_diff(): 150 | try: 151 | with open(".git_diff_conditional/git_diff", "r") as _fp: 152 | diff = [_file.strip() for _file in _fp.readlines() if _file.strip() != ""] 153 | except FileNotFoundError: 154 | log_and_exit("error", "Error getting diff from file", 1) 155 | else: 156 | return diff 157 | 158 | 159 | def handler(): 160 | # Setup Defaults 161 | plugin_prefix = "BUILDKITE_PLUGIN_GIT_DIFF_CONDITIONAL" 162 | 163 | log_level = os.getenv(f"{plugin_prefix}_LOG_LEVEL", "INFO") 164 | LOG.setLevel(log_level) 165 | 166 | # Get the git diff 167 | diff = get_diff() 168 | 169 | # Instantiate the Class 170 | git_diff_conditions = GitDiffConditional(diff, plugin_prefix) 171 | 172 | # Get the dynamic_pipeline 173 | dynamic_pipeline = git_diff_conditions.load_dynamic_pipeline("DYNAMIC_PIPELINE") 174 | 175 | # Get the conditions 176 | conditions = git_diff_conditions.load_conditions_from_environment() 177 | 178 | # Generate the pipeline 179 | pipeline = git_diff_conditions.generate_pipeline_from_conditions( 180 | dynamic_pipeline, conditions 181 | ) 182 | 183 | if not pipeline["steps"]: 184 | log_and_exit("info", f"No pipeline generated for diff: ({diff})", 0) 185 | else: 186 | LOG.info("Dynamic pipeline generated, saving for agent upload") 187 | 188 | try: 189 | with open(".git_diff_conditional/pipeline_output", "w") as _fp: 190 | yaml.dump(pipeline, _fp, default_flow_style=False) 191 | except Exception: 192 | log_and_exit("error", "error saving pipeline to disk", 1) 193 | 194 | 195 | if __name__ == "__main__": 196 | handler() 197 | -------------------------------------------------------------------------------- /tests/CONSTANTS.py: -------------------------------------------------------------------------------- 1 | LOGGER_NAME = "cli" 2 | PLUGIN_PREFIX = "BUILDKITE_PLUGIN_GIT_DIFF_CONDITIONAL" 3 | -------------------------------------------------------------------------------- /tests/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7-slim 2 | 3 | RUN apt-get update && \ 4 | apt-get upgrade -y && \ 5 | apt-get install -y git 6 | 7 | WORKDIR "/buildkite" 8 | 9 | COPY requirements.txt requirements-dev.txt ./ 10 | RUN pip install --no-cache-dir -r requirements-dev.txt -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Zegocover/git-diff-conditional-buildkite-plugin/2905bf7097e421efa59411bb5db25fbae2912caa/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/test_get_diff.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from scripts.generate_pipeline import get_diff 4 | 5 | # 6 | # function get_diff tests 7 | # 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "file_contents,expected_result", 12 | [ 13 | ( 14 | """test.py 15 | folder_a/test.tf 16 | folder_a/folder_b/test.txt""", 17 | ["test.py", "folder_a/test.tf", "folder_a/folder_b/test.txt"], 18 | ), # Diff present 19 | ("\n", []), # No Diff 20 | ], 21 | ) 22 | def test_get_diff(mocker, file_contents, expected_result): 23 | open_mock = mocker.patch( 24 | "scripts.generate_pipeline.open", mocker.mock_open(read_data=file_contents) 25 | ) 26 | 27 | result = get_diff() 28 | 29 | # Tests 30 | assert result == expected_result 31 | open_mock.assert_called_once_with(".git_diff_conditional/git_diff", "r") 32 | 33 | 34 | def test_get_diff_no_file(mocker, log_and_exit_mock): 35 | open_mock = mocker.patch( 36 | "scripts.generate_pipeline.open", side_effect=FileNotFoundError 37 | ) 38 | 39 | result = get_diff() 40 | 41 | # Tests 42 | assert result is None 43 | open_mock.assert_called_once_with(".git_diff_conditional/git_diff", "r") 44 | log_and_exit_mock.assert_called_once_with( 45 | "error", "Error getting diff from file", 1 46 | ) 47 | -------------------------------------------------------------------------------- /tests/unit/test_git_diff_conditional.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from CONSTANTS import LOGGER_NAME, PLUGIN_PREFIX 3 | 4 | from scripts.generate_pipeline import GitDiffConditional 5 | 6 | # Fixtures 7 | 8 | 9 | @pytest.fixture(scope="module") 10 | def file_name_env_var_suffix(): 11 | return "PIPELINE_FILE" 12 | 13 | 14 | @pytest.fixture(scope="module") 15 | def file_name(): 16 | return "pipeline.yml" 17 | 18 | 19 | @pytest.fixture(autouse=True) 20 | def put_env_var(monkeypatch, file_name_env_var_suffix, file_name): 21 | monkeypatch.setenv(f"{PLUGIN_PREFIX}_{file_name_env_var_suffix}", file_name) 22 | 23 | 24 | @pytest.fixture(scope="function") 25 | def git_diff_conditional(): 26 | return GitDiffConditional([], PLUGIN_PREFIX) 27 | 28 | 29 | # 30 | # Class GitDiffConditional Method load_dynamic_pipeline tests 31 | # 32 | 33 | 34 | def test_load_dynamic_pipeline_success( 35 | mocker, file_name_env_var_suffix, git_diff_conditional, logger 36 | ): 37 | file_name_env_var = f"{PLUGIN_PREFIX}_{file_name_env_var_suffix}" 38 | open_mock = mocker.patch( 39 | "scripts.generate_pipeline.open", 40 | mocker.mock_open( 41 | read_data=""" 42 | steps: 43 | - label: test 44 | queue: test 45 | """ 46 | ), 47 | ) 48 | 49 | result = git_diff_conditional.load_dynamic_pipeline(file_name_env_var_suffix) 50 | 51 | # Tests 52 | 53 | assert result == {"steps": [{"label": "test", "queue": "test"}]} 54 | 55 | open_mock.assert_called_once_with("pipeline.yml", "r") 56 | assert logger.record_tuples == [ 57 | (LOGGER_NAME, 20, f"Checking env var: {file_name_env_var} for file name") 58 | ] 59 | 60 | 61 | def test_load_dynamic_pipeline_file_not_found( 62 | mocker, log_and_exit_mock, file_name_env_var_suffix, git_diff_conditional, logger 63 | ): 64 | file_name_env_var = f"{PLUGIN_PREFIX}_{file_name_env_var_suffix}" 65 | 66 | open_mock = mocker.patch( 67 | "scripts.generate_pipeline.open", side_effect=FileNotFoundError 68 | ) 69 | yaml_load_patch = mocker.patch("scripts.generate_pipeline.yaml.safe_load") 70 | 71 | result = git_diff_conditional.load_dynamic_pipeline(file_name_env_var_suffix) 72 | 73 | # Tests 74 | 75 | assert result is None 76 | 77 | open_mock.assert_called_once_with("pipeline.yml", "r") 78 | yaml_load_patch.assert_not_called() 79 | assert logger.record_tuples == [ 80 | (LOGGER_NAME, 20, f"Checking env var: {file_name_env_var} for file name"), 81 | (LOGGER_NAME, 40, ""), 82 | ] 83 | log_and_exit_mock.assert_called_once_with( 84 | "error", "File Name: (pipeline.yml) Not Found", 1 85 | ) 86 | 87 | 88 | def test_load_dynamic_pipeline_bad_yaml( 89 | mocker, log_and_exit_mock, file_name_env_var_suffix, git_diff_conditional, logger 90 | ): 91 | file_name_env_var = f"{PLUGIN_PREFIX}_{file_name_env_var_suffix}" 92 | open_mock = mocker.patch( 93 | "scripts.generate_pipeline.open", mocker.mock_open(read_data="bad_yaml: - :") 94 | ) 95 | 96 | result = git_diff_conditional.load_dynamic_pipeline(file_name_env_var_suffix) 97 | 98 | # Tests 99 | assert result is None 100 | 101 | open_mock.assert_called_once_with("pipeline.yml", "r") 102 | assert logger.record_tuples == [ 103 | (LOGGER_NAME, 20, f"Checking env var: {file_name_env_var} for file name"), 104 | (LOGGER_NAME, 40, "Invalid YAML in File: pipeline.yml"), 105 | ] 106 | 107 | 108 | # 109 | # Class GitDiffConditional Method generate_skip tests 110 | # 111 | 112 | 113 | def test_generate_skip_empty(logger, git_diff_conditional): 114 | label = "test" 115 | 116 | result = git_diff_conditional.generate_skip(label, [], []) 117 | 118 | # Tests 119 | assert not result 120 | assert logger.record_tuples == [ 121 | (LOGGER_NAME, 30, f"label ({label}) passed in but no skip settings configured") 122 | ] 123 | 124 | 125 | # 126 | # Class GitDiffConditional Method load_conditions_from_environment tests 127 | # 128 | 129 | 130 | @pytest.mark.parametrize( 131 | "pipeline_as_env,diff,expected_result", 132 | [ 133 | ( # Test only one label with include 134 | {"0_LABEL": "test_1", "0_INCLUDE": "terraform/*.tf"}, 135 | ["terraform/main.tf"], 136 | {"test_1": False}, 137 | ), 138 | ( # Test only one label with exclude 139 | {"0_LABEL": "test_2", "0_EXCLUDE": "folder/**"}, 140 | ["folder/file"], 141 | {"test_2": True}, 142 | ), 143 | ( # Test two labels with include/exclude 144 | { 145 | "0_LABEL": "test_3", 146 | "0_INCLUDE": "**/file_0", 147 | "1_LABEL": "test_3_other", 148 | "1_EXCLUDE": "**/*_1", 149 | }, 150 | ["folder_a/file_0", "folder_b/.folder/file_2"], 151 | {"test_3": False, "test_3_other": False}, 152 | ), 153 | ({}, ["file.py"], {}), # Test with no vars passed 154 | ( 155 | {"0_LABEL": "test_4", "0_INCLUDE": "file.py"}, 156 | [], 157 | {"test_4": True}, 158 | ), # Test with no diff 159 | ( 160 | {"0_LABEL": "test_5", "0_INCLUDE": "*"}, 161 | ["file.py", "folder/file.tf"], 162 | {"test_5": False}, 163 | ), # Test with always include if diff 164 | ], 165 | ) 166 | def test_load_conditions_from_environment( 167 | monkeypatch, 168 | git_diff_conditional, 169 | pipeline_as_env, 170 | diff, 171 | expected_result, 172 | ): 173 | 174 | for key, value in pipeline_as_env.items(): 175 | monkeypatch.setenv(f"{PLUGIN_PREFIX}_STEPS_{key}", value) 176 | 177 | git_diff_conditional = GitDiffConditional(diff, PLUGIN_PREFIX) 178 | conditions = git_diff_conditional.load_conditions_from_environment() 179 | 180 | # Tests 181 | assert conditions == expected_result 182 | 183 | 184 | # 185 | # Class GitDiffConditional Method generate_pipeline_from_conditions tests 186 | # 187 | 188 | 189 | @pytest.mark.parametrize( 190 | "dynamic_steps,conditions,result_steps", 191 | [ 192 | ( 193 | [{"label": "test_0"}], 194 | {"test_0": True}, 195 | [{"label": "test_0", "skip": True}], 196 | ), # skip True 197 | ( 198 | [{"label": "test_1"}], 199 | {"test_1": False}, 200 | [{"label": "test_1", "skip": False}], 201 | ), # skip False 202 | ([], {"test": True}, []), # no dynamic_pipeline 203 | ( 204 | [{"label": "test_3"}, {"label": "test_3_a"}], 205 | {"test_3": True}, 206 | [{"label": "test_3", "skip": True}, {"label": "test_3_a", "skip": False}], 207 | ), # skip true/false 208 | ( 209 | [{"label": "test_4_label"}, "wait", {"block": "test_4_block"}], 210 | {"test_4_label": True, "test_4_block": True}, 211 | [ 212 | {"label": "test_4_label", "skip": True}, 213 | "wait", 214 | {"block": "test_4_block", "skip": True}, 215 | ], 216 | ), # Check label and block 217 | ( 218 | ["wait"], 219 | {}, 220 | ["wait"], 221 | ), # basic wait step 222 | ( 223 | [{"wait": None, "continue_on_failure": True}], 224 | {}, 225 | [{"wait": None, "continue_on_failure": True}], 226 | ), # dictionary wait step 227 | ], 228 | ) 229 | def test_generate_pipeline_from_conditions( 230 | logger, git_diff_conditional, dynamic_steps, conditions, result_steps 231 | ): 232 | dynamic_pipeline = {"steps": dynamic_steps} 233 | result = git_diff_conditional.generate_pipeline_from_conditions( 234 | dynamic_pipeline, conditions 235 | ) 236 | 237 | # Tests 238 | assert result == {"steps": result_steps} 239 | 240 | 241 | def test_generate_pipeline_from_conditions_with_skip_in_step( 242 | logger, git_diff_conditional 243 | ): 244 | dynamic_pipeline = {"steps": [{"label": "already contains skip", "skip": False}]} 245 | conditions = {} 246 | 247 | result = git_diff_conditional.generate_pipeline_from_conditions( 248 | dynamic_pipeline, conditions 249 | ) 250 | 251 | # Tests 252 | assert result == {"steps": [{"label": "already contains skip", "skip": False}]} 253 | assert logger.record_tuples == [ 254 | ("cli", 30, "label (already contains skip) already has a skip key") 255 | ] 256 | -------------------------------------------------------------------------------- /tests/unit/test_handler.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from CONSTANTS import PLUGIN_PREFIX 3 | 4 | from scripts.generate_pipeline import GitDiffConditional, handler 5 | 6 | 7 | # Mocks 8 | @pytest.fixture 9 | def get_diff_mock(mocker): 10 | return mocker.patch("scripts.generate_pipeline.get_diff", return_value=[]) 11 | 12 | 13 | # Tests 14 | def setup_git_diff_conditional_mock(mocker, condition_return_value): 15 | 16 | return_value = mocker.MagicMock( 17 | spec=GitDiffConditional, 18 | load_dynamic_pipeline=mocker.Mock(return_value={}), 19 | load_conditions_from_environment=mocker.Mock(return_value={}), 20 | generate_pipeline_from_conditions=mocker.Mock( 21 | return_value=condition_return_value 22 | ), 23 | ) 24 | 25 | return mocker.patch( 26 | "scripts.generate_pipeline.GitDiffConditional", return_value=return_value 27 | ) 28 | 29 | 30 | def test_handler_empty_steps( 31 | mocker, monkeypatch, logger, log_and_exit_mock, get_diff_mock 32 | ): 33 | monkeypatch.setenv(f"{PLUGIN_PREFIX}_LOG_LEVEL", "DEBUG") 34 | 35 | open_mock = mocker.patch("scripts.generate_pipeline.open", mocker.mock_open()) 36 | 37 | git_diff_conditional_mock = setup_git_diff_conditional_mock(mocker, {"steps": []}) 38 | 39 | handler() 40 | 41 | # Tests 42 | assert logger.record_tuples == [] 43 | open_mock.assert_not_called() 44 | get_diff_mock.assert_called_once_with() 45 | git_diff_conditional_mock.assert_called_once_with( 46 | get_diff_mock.return_value, PLUGIN_PREFIX 47 | ) 48 | log_and_exit_mock.assert_called_once_with( 49 | "info", "No pipeline generated for diff: ([])", 0 50 | ) 51 | 52 | 53 | def test_handler_with_steps( 54 | mocker, monkeypatch, logger, log_and_exit_mock, get_diff_mock 55 | ): 56 | monkeypatch.setenv(f"{PLUGIN_PREFIX}_LOG_LEVEL", "DEBUG") 57 | 58 | open_mock = mocker.patch("scripts.generate_pipeline.open", mocker.mock_open()) 59 | git_diff_conditional_mock = setup_git_diff_conditional_mock( 60 | mocker, {"steps": [{"label": "test"}]} 61 | ) 62 | 63 | handler() 64 | 65 | # Tests 66 | assert logger.record_tuples == [ 67 | ("cli", 20, "Dynamic pipeline generated, saving for agent upload") 68 | ] 69 | get_diff_mock.assert_called_once_with() 70 | git_diff_conditional_mock.assert_called_once_with( 71 | get_diff_mock.return_value, PLUGIN_PREFIX 72 | ) 73 | open_mock.assert_called_once_with(".git_diff_conditional/pipeline_output", "w") 74 | log_and_exit_mock.assert_not_called() 75 | 76 | return None 77 | 78 | 79 | def test_handler_error_saving_pipeline( 80 | mocker, monkeypatch, logger, log_and_exit_mock, get_diff_mock 81 | ): 82 | monkeypatch.setenv(f"{PLUGIN_PREFIX}_LOG_LEVEL", "DEBUG") 83 | 84 | open_mock = mocker.patch("scripts.generate_pipeline.open", side_effect=Exception) 85 | 86 | git_diff_conditional_mock = setup_git_diff_conditional_mock( 87 | mocker, {"steps": [{"label": "test"}]} 88 | ) 89 | 90 | handler() 91 | 92 | # Tests 93 | assert logger.record_tuples == [ 94 | ("cli", 20, "Dynamic pipeline generated, saving for agent upload") 95 | ] 96 | get_diff_mock.assert_called_once_with() 97 | git_diff_conditional_mock.assert_called_once_with( 98 | get_diff_mock.return_value, PLUGIN_PREFIX 99 | ) 100 | open_mock.assert_called_once_with(".git_diff_conditional/pipeline_output", "w") 101 | log_and_exit_mock.assert_called_once_with( 102 | "error", "error saving pipeline to disk", 1 103 | ) 104 | -------------------------------------------------------------------------------- /tests/unit/test_log_and_exit.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pytest 4 | 5 | from scripts.generate_pipeline import log_and_exit 6 | 7 | 8 | @pytest.mark.parametrize("log_level", ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]) 9 | def test_log_level(log_level, mocker, logger): 10 | exit_mock = mocker.patch("scripts.generate_pipeline.sys.exit", side_effect=None) 11 | 12 | exit_code = 0 13 | log_message = "TEST" 14 | 15 | log_and_exit(log_level, log_message, exit_code) 16 | 17 | # Tests 18 | assert len(logger.record_tuples) == 1 19 | assert logger.record_tuples[0] == ("cli", getattr(logging, log_level), log_message) 20 | exit_mock.assert_called_with(exit_code) 21 | 22 | 23 | @pytest.mark.parametrize("exit_code", range(0, 255)) 24 | def test_exit_code(exit_code, mocker, logger): 25 | exit_mock = mocker.patch("scripts.generate_pipeline.sys.exit", side_effect=None) 26 | log_message = "TEST" 27 | 28 | log_and_exit("info", log_message, exit_code) 29 | 30 | # Tests 31 | exit_mock.assert_called_with(exit_code) 32 | assert logger.record_tuples 33 | --------------------------------------------------------------------------------