├── .github ├── RELEASE.md └── workflows │ ├── build.yml │ └── release.yml ├── .gitignore ├── .gitlab-ci.yml ├── Cargo.lock ├── Cargo.toml ├── GITLAB_USAGE.md ├── LICENSE ├── README.md ├── cliff.toml ├── crates ├── README.md ├── evaluator │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── executor │ ├── Cargo.toml │ └── src │ │ ├── dependency.rs │ │ ├── docker.rs │ │ ├── docker_test.rs │ │ ├── engine.rs │ │ ├── environment.rs │ │ ├── lib.rs │ │ └── substitution.rs ├── github │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── gitlab │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── logging │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── matrix │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── models │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── parser │ ├── Cargo.toml │ └── src │ │ ├── gitlab.rs │ │ ├── lib.rs │ │ ├── schema.rs │ │ └── workflow.rs ├── runtime │ ├── Cargo.toml │ └── src │ │ ├── container.rs │ │ ├── emulation.rs │ │ ├── emulation_test.rs │ │ └── lib.rs ├── ui │ ├── Cargo.toml │ └── src │ │ ├── app │ │ ├── mod.rs │ │ └── state.rs │ │ ├── components │ │ ├── button.rs │ │ ├── checkbox.rs │ │ ├── mod.rs │ │ └── progress_bar.rs │ │ ├── handlers │ │ ├── mod.rs │ │ └── workflow.rs │ │ ├── lib.rs │ │ ├── models │ │ └── mod.rs │ │ ├── utils │ │ └── mod.rs │ │ └── views │ │ ├── execution_tab.rs │ │ ├── help_overlay.rs │ │ ├── job_detail.rs │ │ ├── logs_tab.rs │ │ ├── mod.rs │ │ ├── status_bar.rs │ │ ├── title_bar.rs │ │ └── workflows_tab.rs ├── utils │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── validators │ ├── Cargo.toml │ └── src │ │ ├── actions.rs │ │ ├── gitlab.rs │ │ ├── jobs.rs │ │ ├── lib.rs │ │ ├── matrix.rs │ │ ├── steps.rs │ │ └── triggers.rs └── wrkflw │ ├── Cargo.toml │ └── src │ ├── lib.rs │ └── main.rs ├── demo.cast ├── demo.gif ├── schemas ├── github-workflow.json └── gitlab-ci.json ├── test-workflows ├── 1-basic-workflow.yml ├── 2-reusable-workflow-caller.yml ├── 3-reusable-workflow-definition.yml ├── 4-mixed-jobs.yml ├── 5-no-name-reusable-caller.yml ├── 6-invalid-reusable-format.yml ├── 7-invalid-regular-job.yml ├── 8-cyclic-dependencies.yml ├── cpp-test.yml ├── example.yml ├── matrix-example.yml ├── node-test.yml ├── python-test.yml ├── rust-test.yml ├── test.yml └── trigger_gitlab.sh ├── test_gitlab_ci ├── .gitlab │ └── ci │ │ ├── build.yml │ │ └── test.yml ├── advanced.gitlab-ci.yml ├── basic.gitlab-ci.yml ├── docker.gitlab-ci.yml ├── includes.gitlab-ci.yml ├── invalid.gitlab-ci.yml ├── minimal.gitlab-ci.yml ├── services.gitlab-ci.yml └── workflow.gitlab-ci.yml ├── tests ├── README.md ├── cleanup_test.rs ├── matrix_test.rs └── reusable_workflow_test.rs ├── trigger.cast └── trigger.gif /.github/RELEASE.md: -------------------------------------------------------------------------------- 1 | # Release Process 2 | 3 | This document outlines the steps for creating a new release of wrkflw. 4 | 5 | ## Automatic Release Process 6 | 7 | The project uses a GitHub Actions workflow to automate the release process. Here's how it works: 8 | 9 | 1. Tag a new version with Git: 10 | ```bash 11 | git tag -a v0.x.y -m "Release v0.x.y" 12 | ``` 13 | 14 | 2. Push the tag to GitHub: 15 | ```bash 16 | git push origin v0.x.y 17 | ``` 18 | 19 | 3. The GitHub Actions workflow will automatically: 20 | - Build release binaries for multiple platforms (Linux, macOS, Windows) 21 | - Generate a changelog using git-cliff 22 | - Create a GitHub release with the changelog and binaries 23 | - Upload the release artifacts 24 | 25 | ## Commit Message Format 26 | 27 | To ensure proper changelog generation, please follow the conventional commit format for your commit messages: 28 | 29 | - `feat: add new feature` - for new features 30 | - `fix: resolve issue` - for bug fixes 31 | - `docs: update documentation` - for documentation updates 32 | - `style: format code` - for code style changes (no functional changes) 33 | - `refactor: improve code structure` - for code refactoring 34 | - `perf: improve performance` - for performance improvements 35 | - `test: add or update tests` - for test updates 36 | - `chore: update dependencies` - for maintenance tasks 37 | 38 | The changelog will be organized based on these commit types. 39 | 40 | ## Manual Release Steps (if needed) 41 | 42 | If you need to create a release manually: 43 | 44 | 1. Build the release binaries: 45 | ```bash 46 | cargo build --release 47 | ``` 48 | 49 | 2. Generate a changelog: 50 | ```bash 51 | git cliff --latest > CHANGELOG.md 52 | ``` 53 | 54 | 3. Create a new release on GitHub manually and upload the binaries. 55 | 56 | ## Configuration 57 | 58 | - `cliff.toml` - Configuration for git-cliff to generate changelogs 59 | - `.github/workflows/release.yml` - GitHub Actions workflow for releases -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: [ main ] 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | name: Build 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: [ubuntu-latest, macos-latest] 16 | include: 17 | - os: ubuntu-latest 18 | target: x86_64-unknown-linux-gnu 19 | - os: macos-latest 20 | target: x86_64-apple-darwin 21 | 22 | steps: 23 | - name: Checkout code 24 | uses: actions/checkout@v3 25 | 26 | - name: Setup Rust 27 | uses: actions-rs/toolchain@v1 28 | with: 29 | profile: minimal 30 | toolchain: stable 31 | target: ${{ matrix.target }} 32 | override: true 33 | components: clippy, rustfmt 34 | 35 | - name: Check formatting 36 | uses: actions-rs/cargo@v1 37 | with: 38 | command: fmt 39 | args: -- --check 40 | 41 | - name: Run clippy 42 | uses: actions-rs/cargo@v1 43 | with: 44 | command: clippy 45 | args: -- -D warnings 46 | 47 | - name: Build 48 | uses: actions-rs/cargo@v1 49 | with: 50 | command: build 51 | args: --target ${{ matrix.target }} 52 | 53 | - name: Run tests 54 | uses: actions-rs/cargo@v1 55 | with: 56 | command: test 57 | args: --target ${{ matrix.target }} -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | workflow_dispatch: 8 | inputs: 9 | version: 10 | description: 'Version to use (e.g. v1.0.0)' 11 | required: true 12 | default: 'test-release' 13 | 14 | # Add permissions at workflow level 15 | permissions: 16 | contents: write 17 | 18 | jobs: 19 | create-release: 20 | name: Create Release 21 | runs-on: ubuntu-latest 22 | # You can also set permissions at the job level if needed 23 | # permissions: 24 | # contents: write 25 | outputs: 26 | upload_url: ${{ steps.create_release.outputs.upload_url }} 27 | steps: 28 | - name: Checkout code 29 | uses: actions/checkout@v3 30 | with: 31 | fetch-depth: 0 32 | 33 | - name: Setup Rust 34 | uses: actions-rs/toolchain@v1 35 | with: 36 | profile: minimal 37 | toolchain: stable 38 | override: true 39 | 40 | - name: Install git-cliff 41 | run: | 42 | cargo install git-cliff --force 43 | 44 | - name: Generate Changelog 45 | run: git-cliff --latest --output CHANGELOG.md 46 | 47 | - name: Create Release 48 | id: create_release 49 | uses: softprops/action-gh-release@v1 50 | with: 51 | name: "wrkflw ${{ github.event.inputs.version || github.ref_name }}" 52 | body_path: CHANGELOG.md 53 | draft: false 54 | prerelease: false 55 | tag_name: ${{ github.event.inputs.version || github.ref_name }} 56 | env: 57 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 58 | 59 | build-release: 60 | name: Build Release 61 | needs: [create-release] 62 | runs-on: ${{ matrix.os }} 63 | # You can also set permissions at the job level if needed 64 | # permissions: 65 | # contents: write 66 | strategy: 67 | matrix: 68 | include: 69 | - os: ubuntu-latest 70 | target: x86_64-unknown-linux-gnu 71 | artifact_name: wrkflw 72 | asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-linux-x86_64 73 | - os: macos-latest 74 | target: x86_64-apple-darwin 75 | artifact_name: wrkflw 76 | asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-macos-x86_64 77 | - os: macos-latest 78 | target: aarch64-apple-darwin 79 | artifact_name: wrkflw 80 | asset_name: wrkflw-${{ github.event.inputs.version || github.ref_name }}-macos-arm64 81 | 82 | steps: 83 | - name: Checkout code 84 | uses: actions/checkout@v3 85 | 86 | - name: Setup Rust 87 | uses: actions-rs/toolchain@v1 88 | with: 89 | profile: minimal 90 | toolchain: stable 91 | target: ${{ matrix.target }} 92 | override: true 93 | 94 | - name: Build Release Binary 95 | uses: actions-rs/cargo@v1 96 | with: 97 | command: build 98 | args: --release --target ${{ matrix.target }} 99 | 100 | - name: Compress Release Binary (Unix) 101 | if: runner.os != 'Windows' 102 | run: | 103 | mkdir -p compressed 104 | cp target/${{ matrix.target }}/release/${{ matrix.artifact_name }} compressed/ 105 | cd compressed 106 | tar czvf ${{ matrix.asset_name }}.tar.gz ${{ matrix.artifact_name }} 107 | echo "ASSET=${{ matrix.asset_name }}.tar.gz" >> $GITHUB_ENV 108 | echo "ASSET_PATH=compressed/${{ matrix.asset_name }}.tar.gz" >> $GITHUB_ENV 109 | 110 | - name: Compress Release Binary (Windows) 111 | if: runner.os == 'Windows' 112 | run: | 113 | mkdir -p compressed 114 | copy target\${{ matrix.target }}\release\${{ matrix.artifact_name }} compressed\ 115 | cd compressed 116 | 7z a ${{ matrix.asset_name }}.zip ${{ matrix.artifact_name }} 117 | echo "ASSET=${{ matrix.asset_name }}.zip" >> $env:GITHUB_ENV 118 | echo "ASSET_PATH=compressed\${{ matrix.asset_name }}.zip" >> $env:GITHUB_ENV 119 | shell: pwsh 120 | 121 | - name: Upload Release Asset 122 | uses: softprops/action-gh-release@v1 123 | with: 124 | files: ${{ env.ASSET_PATH }} 125 | tag_name: ${{ github.event.inputs.version || github.ref_name }} 126 | env: 127 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | # GitLab CI/CD Pipeline for wrkflw 2 | # This pipeline will build and test the Rust project 3 | 4 | stages: 5 | - build 6 | - test 7 | - deploy 8 | 9 | variables: 10 | RUST_VERSION: "1.70.0" 11 | CARGO_TERM_COLOR: always 12 | 13 | # Cache settings 14 | cache: 15 | key: "$CI_COMMIT_REF_SLUG" 16 | paths: 17 | - target/ 18 | script: 19 | - echo "This is a placeholder - the cache directive doesn't need a script" 20 | 21 | # Lint job - runs rustfmt and clippy 22 | lint: 23 | stage: test 24 | image: rust:${RUST_VERSION} 25 | script: 26 | - rustup component add clippy 27 | - cargo clippy -- -D warnings 28 | allow_failure: true 29 | 30 | # Build job - builds the application 31 | build: 32 | stage: build 33 | image: rust:${RUST_VERSION} 34 | script: 35 | - cargo build --verbose 36 | artifacts: 37 | paths: 38 | - target/debug 39 | expire_in: 1 week 40 | 41 | # Test job - runs unit and integration tests 42 | test: 43 | stage: test 44 | image: rust:${RUST_VERSION} 45 | script: 46 | - cargo test --verbose 47 | dependencies: 48 | - build 49 | 50 | # Release job - creates a release build 51 | release: 52 | stage: deploy 53 | image: rust:${RUST_VERSION} 54 | script: 55 | - cargo build --release --verbose 56 | artifacts: 57 | paths: 58 | - target/release/wrkflw 59 | expire_in: 1 month 60 | rules: 61 | - if: $CI_PIPELINE_SOURCE == "web" && $BUILD_RELEASE == "true" 62 | when: always 63 | - if: $CI_COMMIT_TAG 64 | when: always 65 | - when: never 66 | 67 | # Custom job for documentation 68 | docs: 69 | stage: deploy 70 | image: rust:${RUST_VERSION} 71 | script: 72 | - cargo doc --no-deps 73 | - mkdir -p public 74 | - cp -r target/doc/* public/ 75 | artifacts: 76 | paths: 77 | - public 78 | only: 79 | - main 80 | 81 | format: 82 | stage: test 83 | image: rust:${RUST_VERSION} 84 | script: 85 | - rustup component add rustfmt 86 | - cargo fmt --check 87 | allow_failure: true 88 | 89 | pages: 90 | stage: deploy 91 | image: rust:${RUST_VERSION} 92 | script: 93 | - cargo doc --no-deps 94 | - mkdir -p public 95 | - cp -r target/doc/* public/ 96 | artifacts: 97 | paths: 98 | - public 99 | only: 100 | - main -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "crates/*" 4 | ] 5 | resolver = "2" 6 | 7 | [workspace.package] 8 | version = "0.4.0" 9 | edition = "2021" 10 | description = "A GitHub Actions workflow validator and executor" 11 | documentation = "https://github.com/bahdotsh/wrkflw" 12 | homepage = "https://github.com/bahdotsh/wrkflw" 13 | repository = "https://github.com/bahdotsh/wrkflw" 14 | keywords = ["workflows", "github", "local"] 15 | categories = ["command-line-utilities"] 16 | license = "MIT" 17 | 18 | [workspace.dependencies] 19 | clap = { version = "4.3", features = ["derive"] } 20 | colored = "2.0" 21 | serde = { version = "1.0", features = ["derive"] } 22 | serde_yaml = "0.9" 23 | serde_json = "1.0" 24 | jsonschema = "0.17" 25 | tokio = { version = "1.28", features = ["full"] } 26 | async-trait = "0.1" 27 | bollard = "0.14" 28 | futures-util = "0.3" 29 | futures = "0.3" 30 | chrono = "0.4" 31 | uuid = { version = "1.3", features = ["v4"] } 32 | tempfile = "3.6" 33 | tar = "0.4" 34 | dirs = "5.0" 35 | thiserror = "1.0" 36 | log = "0.4" 37 | which = "4.4" 38 | crossterm = "0.26.1" 39 | ratatui = { version = "0.23.0", features = ["crossterm"] } 40 | once_cell = "1.19.0" 41 | itertools = "0.11.0" 42 | indexmap = { version = "2.0.0", features = ["serde"] } 43 | rayon = "1.7.0" 44 | num_cpus = "1.16.0" 45 | regex = "1.10" 46 | lazy_static = "1.4" 47 | reqwest = { version = "0.11", features = ["json"] } 48 | libc = "0.2" 49 | nix = { version = "0.27.1", features = ["fs"] } 50 | urlencoding = "2.1.3" 51 | 52 | [profile.release] 53 | codegen-units = 1 54 | lto = true 55 | -------------------------------------------------------------------------------- /GITLAB_USAGE.md: -------------------------------------------------------------------------------- 1 | # Using wrkflw with GitLab Pipelines 2 | 3 | This guide explains how to use the `wrkflw` tool to trigger GitLab CI/CD pipelines. 4 | 5 | ## Prerequisites 6 | 7 | 1. A GitLab repository with a `.gitlab-ci.yml` file 8 | 2. A GitLab personal access token with API access 9 | 3. `wrkflw` installed on your system 10 | 11 | ## Setting Up 12 | 13 | 1. Create a GitLab personal access token: 14 | - Go to GitLab > User Settings > Access Tokens 15 | - Create a token with `api` scope 16 | - Copy the token value 17 | 18 | 2. Set the token as an environment variable: 19 | ```bash 20 | export GITLAB_TOKEN=your_token_here 21 | ``` 22 | 23 | ## Triggering a Pipeline 24 | 25 | You can trigger a GitLab pipeline using the `trigger-gitlab` command: 26 | 27 | ```bash 28 | # Trigger using the default branch 29 | wrkflw trigger-gitlab 30 | 31 | # Trigger on a specific branch 32 | wrkflw trigger-gitlab --branch feature-branch 33 | 34 | # Trigger with custom variables 35 | wrkflw trigger-gitlab --variable BUILD_RELEASE=true 36 | ``` 37 | 38 | ### Example: Triggering a Release Build 39 | 40 | To trigger the release build job in our sample pipeline: 41 | 42 | ```bash 43 | wrkflw trigger-gitlab --variable BUILD_RELEASE=true 44 | ``` 45 | 46 | This will set the `BUILD_RELEASE` variable to `true`, which activates the release job in our sample pipeline. 47 | 48 | ### Example: Building Documentation 49 | 50 | To trigger the documentation build job: 51 | 52 | ```bash 53 | wrkflw trigger-gitlab --variable BUILD_DOCS=true 54 | ``` 55 | 56 | ## Controlling Job Execution with Variables 57 | 58 | Our sample GitLab pipeline is configured to make certain jobs conditional based on variables. You can use the `--variable` flag to control which jobs run: 59 | 60 | | Variable | Purpose | 61 | |----------|---------| 62 | | `BUILD_RELEASE` | Set to `true` to run the release job | 63 | | `BUILD_DOCS` | Set to `true` to build documentation | 64 | 65 | ## Checking Pipeline Status 66 | 67 | After triggering a pipeline, you can check its status directly on GitLab: 68 | 69 | 1. Navigate to your GitLab repository 70 | 2. Go to CI/CD > Pipelines 71 | 3. Find your recently triggered pipeline 72 | 73 | The `wrkflw` command will also provide a direct URL to the pipeline after triggering. 74 | 75 | ## Troubleshooting 76 | 77 | If you encounter issues: 78 | 79 | 1. Verify your GitLab token is set correctly 80 | 2. Check that you're in a repository with a valid GitLab remote URL 81 | 3. Ensure your `.gitlab-ci.yml` file is valid 82 | 4. Check that your GitLab token has API access permissions 83 | 5. Review GitLab's CI/CD pipeline logs for detailed error information -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Gokul 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | [changelog] 2 | # The header of the changelog 3 | header = """ 4 | # Changelog 5 | 6 | All notable changes to wrkflw will be documented in this file. 7 | """ 8 | 9 | # Template for the changelog body 10 | body = """ 11 | {%- macro remote_url() -%} 12 | https://github.com/bahdotsh/wrkflw 13 | {%- endmacro -%} 14 | 15 | {% macro print_commit(commit) -%} 16 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 17 | {% if commit.breaking %}[**breaking**] {% endif %}\ 18 | {{ commit.message | upper_first }} - \ 19 | ([{{ commit.id | truncate(length=7, end="") }}]({{ self::remote_url() }}/commit/{{ commit.id }}))\ 20 | {% endmacro -%} 21 | 22 | {% if version %}\ 23 | {% if previous.version %}\ 24 | ## [{{ version | trim_start_matches(pat="v") }}]\ 25 | ({{ self::remote_url() }}/compare/{{ previous.version }}..{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }} 26 | {% else %}\ 27 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 28 | {% endif %}\ 29 | {% else %}\ 30 | ## [unreleased] 31 | {% endif %}\ 32 | 33 | {% for group, commits in commits | group_by(attribute="group") %} 34 | ### {{ group | striptags | trim | upper_first }} 35 | {% for commit in commits 36 | | filter(attribute="scope") 37 | | sort(attribute="scope") %} 38 | {{ self::print_commit(commit=commit) }} 39 | {%- endfor %} 40 | {% for commit in commits %} 41 | {%- if not commit.scope -%} 42 | {{ self::print_commit(commit=commit) }} 43 | {% endif -%} 44 | {% endfor -%} 45 | {% endfor -%} 46 | {%- if github -%} 47 | {% if github.contributors | filter(attribute="is_first_time", value=true) | length != 0 %} 48 | ## New Contributors ❤️ 49 | {% endif %}\ 50 | {% for contributor in github.contributors | filter(attribute="is_first_time", value=true) %} 51 | * @{{ contributor.username }} made their first contribution 52 | {%- if contributor.pr_number %} in \ 53 | [#{{ contributor.pr_number }}]({{ self::remote_url() }}/pull/{{ contributor.pr_number }}) \ 54 | {%- endif %} 55 | {%- endfor -%} 56 | {%- endif %} 57 | 58 | 59 | """ 60 | 61 | # Remove the leading and trailing whitespace from the template 62 | trim = true 63 | 64 | # The footer of the changelog 65 | footer = """ 66 | 67 | """ 68 | 69 | # This determines how the links to commits are formatted 70 | [git] 71 | conventional_commits = true 72 | filter_unconventional = true 73 | commit_parsers = [ 74 | { message = "^feat", group = "⛰️ Features" }, 75 | { message = "^fix", group = "🐛 Bug Fixes" }, 76 | { message = "^doc", group = "📚 Documentation" }, 77 | { message = "^perf", group = "⚡ Performance" }, 78 | { message = "^refactor\\(clippy\\)", skip = true }, 79 | { message = "^refactor", group = "🚜 Refactor" }, 80 | { message = "^style", group = "🎨 Styling" }, 81 | { message = "^test", group = "🧪 Testing" }, 82 | { message = "^chore\\(release\\): prepare for", skip = true }, 83 | { message = "^chore\\(deps.*\\)", skip = true }, 84 | { message = "^chore\\(pr\\)", skip = true }, 85 | { message = "^chore\\(pull\\)", skip = true }, 86 | { message = "^chore\\(npm\\).*yarn\\.lock", skip = true }, 87 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 88 | { body = ".*security", group = "🛡️ Security" }, 89 | { message = "^revert", group = "◀️ Revert" }, 90 | ] 91 | 92 | # Define the GitHub repository URL for commit links 93 | [git.link] 94 | # Format: https://github.com/USER/REPO/commit/{} 95 | commit_link = "https://github.com/bahdotsh/wrkflw/commit/{}" 96 | 97 | # Format of the git commit link 98 | link_parsers = [ 99 | { pattern = "#(\\d+)", href = "https://github.com/bahdotsh/wrkflw/issues/$1" }, 100 | ] 101 | 102 | filter_commits = true 103 | tag_pattern = "v[0-9]*" 104 | ignore_tags = "" 105 | date_format = "%Y-%m-%d" 106 | sort_commits = "oldest" -------------------------------------------------------------------------------- /crates/README.md: -------------------------------------------------------------------------------- 1 | # Wrkflw Crates 2 | 3 | This directory contains the Rust crates that make up the Wrkflw project. The project has been restructured to use a workspace-based approach with individual crates for better modularity and maintainability. 4 | 5 | ## Crate Structure 6 | 7 | - **wrkflw**: Main binary crate and entry point for the application 8 | - **models**: Data models and structures used throughout the application 9 | - **evaluator**: Workflow evaluation functionality 10 | - **executor**: Workflow execution engine 11 | - **github**: GitHub API integration 12 | - **gitlab**: GitLab API integration 13 | - **logging**: Logging functionality 14 | - **matrix**: Matrix-based parallelization support 15 | - **parser**: Workflow parsing functionality 16 | - **runtime**: Runtime execution environment 17 | - **ui**: User interface components 18 | - **utils**: Utility functions 19 | - **validators**: Validation functionality 20 | 21 | ## Dependencies 22 | 23 | Each crate has its own `Cargo.toml` file that defines its dependencies. The root `Cargo.toml` file defines the workspace and shared dependencies. 24 | 25 | ## Build Instructions 26 | 27 | To build the entire project: 28 | 29 | ```bash 30 | cargo build 31 | ``` 32 | 33 | To build a specific crate: 34 | 35 | ```bash 36 | cargo build -p 37 | ``` 38 | 39 | ## Testing 40 | 41 | To run tests for the entire project: 42 | 43 | ```bash 44 | cargo test 45 | ``` 46 | 47 | To run tests for a specific crate: 48 | 49 | ```bash 50 | cargo test -p 51 | ``` 52 | 53 | ## Rust Best Practices 54 | 55 | When contributing to wrkflw, please follow these Rust best practices: 56 | 57 | ### Code Organization 58 | 59 | - Place modules in their respective crates to maintain separation of concerns 60 | - Use `pub` selectively to expose only the necessary APIs 61 | - Follow the Rust module system conventions (use `mod` and `pub mod` appropriately) 62 | 63 | ### Errors and Error Handling 64 | 65 | - Prefer using the `thiserror` crate for defining custom error types 66 | - Use the `?` operator for error propagation instead of match statements when appropriate 67 | - Implement custom error types that provide context for the error 68 | - Avoid using `.unwrap()` and `.expect()` in production code 69 | 70 | ### Performance 71 | 72 | - Profile code before optimizing using tools like `cargo flamegraph` 73 | - Use `Arc` and `Mutex` judiciously for shared mutable state 74 | - Leverage Rust's zero-cost abstractions (iterators, closures) 75 | - Consider adding benchmark tests using the `criterion` crate for performance-critical code 76 | 77 | ### Security 78 | 79 | - Validate all input, especially from external sources 80 | - Avoid using `unsafe` code unless absolutely necessary 81 | - Handle secrets securely using environment variables 82 | - Check for integer overflows with `checked_` operations 83 | 84 | ### Testing 85 | 86 | - Write unit tests for all public functions 87 | - Use integration tests to verify crate-to-crate interactions 88 | - Consider property-based testing for complex logic 89 | - Structure tests with clear preparation, execution, and verification phases 90 | 91 | ### Tooling 92 | 93 | - Run `cargo clippy` before committing changes to catch common mistakes 94 | - Use `cargo fmt` to maintain consistent code formatting 95 | - Enable compiler warnings with `#![warn(clippy::all)]` 96 | 97 | For more detailed guidance, refer to the project's best practices documentation. -------------------------------------------------------------------------------- /crates/evaluator/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "evaluator" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "Workflow evaluation for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | validators = { path = "../validators" } 12 | 13 | # External dependencies 14 | colored.workspace = true 15 | serde_yaml.workspace = true -------------------------------------------------------------------------------- /crates/evaluator/src/lib.rs: -------------------------------------------------------------------------------- 1 | use colored::*; 2 | use serde_yaml::{self, Value}; 3 | use std::fs; 4 | use std::path::Path; 5 | 6 | use models::ValidationResult; 7 | use validators::{validate_jobs, validate_triggers}; 8 | 9 | pub fn evaluate_workflow_file(path: &Path, verbose: bool) -> Result { 10 | let content = fs::read_to_string(path).map_err(|e| format!("Failed to read file: {}", e))?; 11 | 12 | // Parse YAML content 13 | let workflow: Value = 14 | serde_yaml::from_str(&content).map_err(|e| format!("Invalid YAML: {}", e))?; 15 | 16 | let mut result = ValidationResult::new(); 17 | 18 | // Check for required structure 19 | if !workflow.is_mapping() { 20 | result.add_issue("Workflow file is not a valid YAML mapping".to_string()); 21 | return Ok(result); 22 | } 23 | 24 | // Check if name exists 25 | if workflow.get("name").is_none() { 26 | // Check if this might be a reusable workflow caller before reporting missing name 27 | let has_reusable_workflow_job = if let Some(Value::Mapping(jobs)) = workflow.get("jobs") { 28 | jobs.values().any(|job| { 29 | if let Some(job_config) = job.as_mapping() { 30 | job_config.contains_key(Value::String("uses".to_string())) 31 | } else { 32 | false 33 | } 34 | }) 35 | } else { 36 | false 37 | }; 38 | 39 | // Only report missing name if it's not a workflow with reusable workflow jobs 40 | if !has_reusable_workflow_job { 41 | result.add_issue("Workflow is missing a name".to_string()); 42 | } 43 | } 44 | 45 | // Check if jobs section exists 46 | match workflow.get("jobs") { 47 | Some(jobs) if jobs.is_mapping() => { 48 | validate_jobs(jobs, &mut result); 49 | } 50 | Some(_) => { 51 | result.add_issue("'jobs' section is not a mapping".to_string()); 52 | } 53 | None => { 54 | result.add_issue("Workflow is missing 'jobs' section".to_string()); 55 | } 56 | } 57 | 58 | // Check for valid triggers 59 | match workflow.get("on") { 60 | Some(on) => { 61 | validate_triggers(on, &mut result); 62 | } 63 | None => { 64 | result.add_issue("Workflow is missing 'on' section (triggers)".to_string()); 65 | } 66 | } 67 | 68 | if verbose && result.is_valid { 69 | println!( 70 | "{} Validated structure of workflow: {}", 71 | "✓".green(), 72 | path.display() 73 | ); 74 | } 75 | 76 | Ok(result) 77 | } 78 | -------------------------------------------------------------------------------- /crates/executor/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "executor" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "Workflow executor for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | parser = { path = "../parser" } 12 | runtime = { path = "../runtime" } 13 | logging = { path = "../logging" } 14 | matrix = { path = "../matrix" } 15 | utils = { path = "../utils" } 16 | 17 | # External dependencies 18 | async-trait.workspace = true 19 | bollard.workspace = true 20 | chrono.workspace = true 21 | dirs.workspace = true 22 | futures.workspace = true 23 | futures-util.workspace = true 24 | lazy_static.workspace = true 25 | num_cpus.workspace = true 26 | once_cell.workspace = true 27 | regex.workspace = true 28 | serde.workspace = true 29 | serde_json.workspace = true 30 | serde_yaml.workspace = true 31 | tar.workspace = true 32 | tempfile.workspace = true 33 | thiserror.workspace = true 34 | tokio.workspace = true 35 | uuid.workspace = true 36 | -------------------------------------------------------------------------------- /crates/executor/src/dependency.rs: -------------------------------------------------------------------------------- 1 | use parser::workflow::WorkflowDefinition; 2 | use std::collections::{HashMap, HashSet}; 3 | 4 | pub fn resolve_dependencies(workflow: &WorkflowDefinition) -> Result>, String> { 5 | let jobs = &workflow.jobs; 6 | 7 | // Build adjacency list with String keys 8 | let mut dependencies: HashMap> = HashMap::new(); 9 | let mut dependents: HashMap> = HashMap::new(); 10 | 11 | // Initialize with empty dependencies 12 | for job_name in jobs.keys() { 13 | dependencies.insert(job_name.clone(), HashSet::new()); 14 | dependents.insert(job_name.clone(), HashSet::new()); 15 | } 16 | 17 | // Populate dependencies 18 | for (job_name, job) in jobs { 19 | if let Some(needs) = &job.needs { 20 | for needed_job in needs { 21 | if !jobs.contains_key(needed_job) { 22 | return Err(format!( 23 | "Job '{}' depends on non-existent job '{}'", 24 | job_name, needed_job 25 | )); 26 | } 27 | // Get mutable reference to the dependency set for this job, with error handling 28 | if let Some(deps) = dependencies.get_mut(job_name) { 29 | deps.insert(needed_job.clone()); 30 | } else { 31 | return Err(format!( 32 | "Internal error: Failed to update dependencies for job '{}'", 33 | job_name 34 | )); 35 | } 36 | 37 | // Get mutable reference to the dependents set for the needed job, with error handling 38 | if let Some(deps) = dependents.get_mut(needed_job) { 39 | deps.insert(job_name.clone()); 40 | } else { 41 | return Err(format!( 42 | "Internal error: Failed to update dependents for job '{}'", 43 | needed_job 44 | )); 45 | } 46 | } 47 | } 48 | } 49 | 50 | // Implement topological sort for execution ordering 51 | let mut result = Vec::new(); 52 | let mut no_dependencies: HashSet = dependencies 53 | .iter() 54 | .filter(|(_, deps)| deps.is_empty()) 55 | .map(|(job, _)| job.clone()) 56 | .collect(); 57 | 58 | // Process levels of the dependency graph 59 | while !no_dependencies.is_empty() { 60 | // Current level becomes a batch of jobs that can run in parallel 61 | let current_level: Vec = no_dependencies.iter().cloned().collect(); 62 | result.push(current_level); 63 | 64 | // For the next level 65 | let mut next_no_dependencies = HashSet::new(); 66 | 67 | for job in &no_dependencies { 68 | // For each dependent job of the current job 69 | // Get the set of dependents with error handling 70 | let dependent_jobs = match dependents.get(job) { 71 | Some(deps) => deps.clone(), 72 | None => { 73 | return Err(format!( 74 | "Internal error: Failed to find dependents for job '{}'", 75 | job 76 | )); 77 | } 78 | }; 79 | 80 | for dependent in dependent_jobs { 81 | // Remove the current job from its dependencies 82 | if let Some(deps) = dependencies.get_mut(&dependent) { 83 | deps.remove(job); 84 | 85 | // Check if it's empty now to determine if it should be in the next level 86 | if deps.is_empty() { 87 | next_no_dependencies.insert(dependent); 88 | } 89 | } else { 90 | return Err(format!( 91 | "Internal error: Failed to find dependencies for job '{}'", 92 | dependent 93 | )); 94 | } 95 | } 96 | } 97 | 98 | no_dependencies = next_no_dependencies; 99 | } 100 | 101 | // Check for circular dependencies 102 | let processed_jobs: HashSet = result 103 | .iter() 104 | .flat_map(|level| level.iter().cloned()) 105 | .collect(); 106 | 107 | if processed_jobs.len() < jobs.len() { 108 | return Err("Circular dependency detected in workflow jobs".to_string()); 109 | } 110 | 111 | Ok(result) 112 | } 113 | -------------------------------------------------------------------------------- /crates/executor/src/docker_test.rs: -------------------------------------------------------------------------------- 1 | use bollard::Docker; 2 | use std::{sync::Arc, path::Path}; 3 | use tokio::sync::Mutex; 4 | 5 | use crate::{ 6 | executor::{docker::{self, DockerRuntime}, RuntimeType}, 7 | runtime::container::{ContainerRuntime, ContainerOutput} 8 | }; 9 | 10 | #[cfg(test)] 11 | mod docker_cleanup_tests { 12 | use super::*; 13 | 14 | // Helper function to check if Docker tests should be skipped 15 | fn should_skip_docker_tests() -> bool { 16 | std::env::var("WRKFLW_TEST_SKIP_DOCKER").is_ok() || 17 | !docker::is_available() 18 | } 19 | 20 | /// Helper function to create a Docker container that should be tracked 21 | async fn create_test_container(docker_client: &Docker) -> Option { 22 | if should_skip_docker_tests() { 23 | return None; 24 | } 25 | 26 | // Try to create a container runtime 27 | let runtime = match DockerRuntime::new() { 28 | Ok(rt) => rt, 29 | Err(_) => return None, 30 | }; 31 | 32 | // Run a simple container that finishes quickly 33 | let result = runtime 34 | .run_container( 35 | "alpine:latest", 36 | &["echo", "test"], 37 | &[], 38 | Path::new("/"), 39 | &[], 40 | ) 41 | .await; 42 | 43 | // The container should be automatically removed by the runtime after execution 44 | // but we can verify if it's tracked first 45 | 46 | let running_containers = docker::get_tracked_containers(); 47 | 48 | // Since run_container internally cleans up, we'll simulate tracking a container 49 | if let Some(container_id) = running_containers.first() { 50 | return Some(container_id.clone()); 51 | } 52 | 53 | // Manually track a container for testing 54 | let container_id = format!("test-container-{}", uuid::Uuid::new_v4()); 55 | docker::track_container(&container_id); 56 | 57 | Some(container_id) 58 | } 59 | 60 | /// Helper function to create a Docker network that should be tracked 61 | async fn create_test_network(docker_client: &Docker) -> Option { 62 | if should_skip_docker_tests() { 63 | return None; 64 | } 65 | 66 | // Create a test network 67 | match docker::create_job_network(docker_client).await { 68 | Ok(network_id) => Some(network_id), 69 | Err(_) => None, 70 | } 71 | } 72 | 73 | #[tokio::test] 74 | async fn test_docker_container_cleanup() { 75 | if should_skip_docker_tests() { 76 | println!("Docker tests disabled or Docker not available, skipping test"); 77 | return; 78 | } 79 | 80 | // Connect to Docker 81 | let docker = match Docker::connect_with_local_defaults() { 82 | Ok(client) => client, 83 | Err(_) => { 84 | println!("Could not connect to Docker, skipping test"); 85 | return; 86 | } 87 | }; 88 | 89 | // Create a test container 90 | let container_id = match create_test_container(&docker).await { 91 | Some(id) => id, 92 | None => { 93 | println!("Could not create test container, skipping test"); 94 | return; 95 | } 96 | }; 97 | 98 | // Verify container is tracked 99 | let containers = docker::get_tracked_containers(); 100 | let is_tracked = containers.contains(&container_id); 101 | 102 | assert!(is_tracked, "Container should be tracked for cleanup"); 103 | 104 | // Run cleanup 105 | docker::cleanup_containers(&docker).await; 106 | 107 | // Verify container is removed from tracking 108 | let containers = docker::get_tracked_containers(); 109 | let still_tracked = containers.contains(&container_id); 110 | 111 | assert!(!still_tracked, "Container should be removed from tracking after cleanup"); 112 | } 113 | 114 | #[tokio::test] 115 | async fn test_docker_network_cleanup() { 116 | if should_skip_docker_tests() { 117 | println!("Docker tests disabled or Docker not available, skipping test"); 118 | return; 119 | } 120 | 121 | // Connect to Docker 122 | let docker = match Docker::connect_with_local_defaults() { 123 | Ok(client) => client, 124 | Err(_) => { 125 | println!("Could not connect to Docker, skipping test"); 126 | return; 127 | } 128 | }; 129 | 130 | // Create a test network 131 | let network_id = match create_test_network(&docker).await { 132 | Some(id) => id, 133 | None => { 134 | println!("Could not create test network, skipping test"); 135 | return; 136 | } 137 | }; 138 | 139 | // Verify network is tracked 140 | let networks = docker::get_tracked_networks(); 141 | let is_tracked = networks.contains(&network_id); 142 | 143 | assert!(is_tracked, "Network should be tracked for cleanup"); 144 | 145 | // Run cleanup 146 | docker::cleanup_networks(&docker).await; 147 | 148 | // Verify network is removed from tracking 149 | let networks = docker::get_tracked_networks(); 150 | let still_tracked = networks.contains(&network_id); 151 | 152 | assert!(!still_tracked, "Network should be removed from tracking after cleanup"); 153 | } 154 | 155 | #[tokio::test] 156 | async fn test_full_resource_cleanup() { 157 | if should_skip_docker_tests() { 158 | println!("Docker tests disabled or Docker not available, skipping test"); 159 | return; 160 | } 161 | 162 | // Connect to Docker 163 | let docker = match Docker::connect_with_local_defaults() { 164 | Ok(client) => client, 165 | Err(_) => { 166 | println!("Could not connect to Docker, skipping test"); 167 | return; 168 | } 169 | }; 170 | 171 | // Create a test container 172 | let _ = create_test_container(&docker).await; 173 | 174 | // Create a test network 175 | let _ = create_test_network(&docker).await; 176 | 177 | // Count resources before cleanup 178 | let container_count = docker::get_tracked_containers().len(); 179 | let network_count = docker::get_tracked_networks().len(); 180 | 181 | // Ensure we have at least one resource to clean up 182 | if container_count == 0 && network_count == 0 { 183 | println!("No resources created for testing, skipping test"); 184 | return; 185 | } 186 | 187 | // Run full cleanup 188 | docker::cleanup_resources(&docker).await; 189 | 190 | // Verify all resources are cleaned up 191 | let remaining_containers = docker::get_tracked_containers().len(); 192 | let remaining_networks = docker::get_tracked_networks().len(); 193 | 194 | assert_eq!(remaining_containers, 0, "All containers should be cleaned up"); 195 | assert_eq!(remaining_networks, 0, "All networks should be cleaned up"); 196 | } 197 | } -------------------------------------------------------------------------------- /crates/executor/src/environment.rs: -------------------------------------------------------------------------------- 1 | use chrono::Utc; 2 | use matrix::MatrixCombination; 3 | use parser::workflow::WorkflowDefinition; 4 | use serde_yaml::Value; 5 | use std::{collections::HashMap, fs, io, path::Path}; 6 | 7 | pub fn setup_github_environment_files(workspace_dir: &Path) -> io::Result<()> { 8 | // Create necessary directories 9 | let github_dir = workspace_dir.join("github"); 10 | fs::create_dir_all(&github_dir)?; 11 | 12 | // Create common GitHub environment files 13 | let github_output = github_dir.join("output"); 14 | let github_env = github_dir.join("env"); 15 | let github_path = github_dir.join("path"); 16 | let github_step_summary = github_dir.join("step_summary"); 17 | 18 | // Initialize files with empty content 19 | fs::write(&github_output, "")?; 20 | fs::write(&github_env, "")?; 21 | fs::write(&github_path, "")?; 22 | fs::write(&github_step_summary, "")?; 23 | 24 | Ok(()) 25 | } 26 | 27 | pub fn create_github_context( 28 | workflow: &WorkflowDefinition, 29 | workspace_dir: &Path, 30 | ) -> HashMap { 31 | let mut env = HashMap::new(); 32 | 33 | // Basic GitHub environment variables 34 | env.insert("GITHUB_WORKFLOW".to_string(), workflow.name.clone()); 35 | env.insert("GITHUB_ACTION".to_string(), "run".to_string()); 36 | env.insert("GITHUB_ACTOR".to_string(), "wrkflw".to_string()); 37 | env.insert("GITHUB_REPOSITORY".to_string(), get_repo_name()); 38 | env.insert("GITHUB_EVENT_NAME".to_string(), get_event_name(workflow)); 39 | env.insert("GITHUB_WORKSPACE".to_string(), get_workspace_path()); 40 | env.insert("GITHUB_SHA".to_string(), get_current_sha()); 41 | env.insert("GITHUB_REF".to_string(), get_current_ref()); 42 | 43 | // File paths for GitHub Actions 44 | env.insert( 45 | "GITHUB_OUTPUT".to_string(), 46 | workspace_dir 47 | .join("github") 48 | .join("output") 49 | .to_string_lossy() 50 | .to_string(), 51 | ); 52 | env.insert( 53 | "GITHUB_ENV".to_string(), 54 | workspace_dir 55 | .join("github") 56 | .join("env") 57 | .to_string_lossy() 58 | .to_string(), 59 | ); 60 | env.insert( 61 | "GITHUB_PATH".to_string(), 62 | workspace_dir 63 | .join("github") 64 | .join("path") 65 | .to_string_lossy() 66 | .to_string(), 67 | ); 68 | env.insert( 69 | "GITHUB_STEP_SUMMARY".to_string(), 70 | workspace_dir 71 | .join("github") 72 | .join("step_summary") 73 | .to_string_lossy() 74 | .to_string(), 75 | ); 76 | 77 | // Time-related variables 78 | let now = Utc::now(); 79 | env.insert("GITHUB_RUN_ID".to_string(), format!("{}", now.timestamp())); 80 | env.insert("GITHUB_RUN_NUMBER".to_string(), "1".to_string()); 81 | 82 | // Path-related variables 83 | env.insert("RUNNER_TEMP".to_string(), get_temp_dir()); 84 | env.insert("RUNNER_TOOL_CACHE".to_string(), get_tool_cache_dir()); 85 | 86 | env 87 | } 88 | 89 | /// Add matrix context variables to the environment 90 | pub fn add_matrix_context( 91 | env: &mut HashMap, 92 | matrix_combination: &MatrixCombination, 93 | ) { 94 | // Add each matrix parameter as an environment variable 95 | for (key, value) in &matrix_combination.values { 96 | let env_key = format!("MATRIX_{}", key.to_uppercase()); 97 | let env_value = value_to_string(value); 98 | env.insert(env_key, env_value); 99 | } 100 | 101 | // Also serialize the whole matrix as JSON for potential use 102 | if let Ok(json_value) = serde_json::to_string(&matrix_combination.values) { 103 | env.insert("MATRIX_CONTEXT".to_string(), json_value); 104 | } 105 | } 106 | 107 | /// Convert a serde_yaml::Value to a string for environment variables 108 | fn value_to_string(value: &Value) -> String { 109 | match value { 110 | Value::String(s) => s.clone(), 111 | Value::Number(n) => n.to_string(), 112 | Value::Bool(b) => b.to_string(), 113 | Value::Sequence(seq) => { 114 | let items = seq 115 | .iter() 116 | .map(value_to_string) 117 | .collect::>() 118 | .join(","); 119 | items 120 | } 121 | Value::Mapping(map) => { 122 | let items = map 123 | .iter() 124 | .map(|(k, v)| format!("{}={}", value_to_string(k), value_to_string(v))) 125 | .collect::>() 126 | .join(","); 127 | items 128 | } 129 | Value::Null => "".to_string(), 130 | _ => "".to_string(), 131 | } 132 | } 133 | 134 | fn get_repo_name() -> String { 135 | // Try to detect from git if available 136 | if let Ok(output) = std::process::Command::new("git") 137 | .args(["remote", "get-url", "origin"]) 138 | .output() 139 | { 140 | if output.status.success() { 141 | let url = String::from_utf8_lossy(&output.stdout); 142 | if let Some(repo) = extract_repo_from_url(&url) { 143 | return repo; 144 | } 145 | } 146 | } 147 | 148 | // Fallback to directory name 149 | let current_dir = std::env::current_dir().unwrap_or_default(); 150 | format!( 151 | "wrkflw/{}", 152 | current_dir 153 | .file_name() 154 | .unwrap_or_default() 155 | .to_string_lossy() 156 | ) 157 | } 158 | 159 | fn extract_repo_from_url(url: &str) -> Option { 160 | // Extract owner/repo from common git URLs 161 | let url = url.trim(); 162 | 163 | // Handle SSH URLs: git@github.com:owner/repo.git 164 | if url.starts_with("git@") { 165 | let parts: Vec<&str> = url.split(':').collect(); 166 | if parts.len() == 2 { 167 | let repo_part = parts[1].trim_end_matches(".git"); 168 | return Some(repo_part.to_string()); 169 | } 170 | } 171 | 172 | // Handle HTTPS URLs: https://github.com/owner/repo.git 173 | if url.starts_with("http") { 174 | let without_protocol = url.split("://").nth(1)?; 175 | let parts: Vec<&str> = without_protocol.split('/').collect(); 176 | if parts.len() >= 3 { 177 | let owner = parts[1]; 178 | let repo = parts[2].trim_end_matches(".git"); 179 | return Some(format!("{}/{}", owner, repo)); 180 | } 181 | } 182 | 183 | None 184 | } 185 | 186 | fn get_event_name(workflow: &WorkflowDefinition) -> String { 187 | // Try to extract from the workflow trigger 188 | if let Some(first_trigger) = workflow.on.first() { 189 | return first_trigger.clone(); 190 | } 191 | "workflow_dispatch".to_string() 192 | } 193 | 194 | fn get_workspace_path() -> String { 195 | std::env::current_dir() 196 | .unwrap_or_default() 197 | .to_string_lossy() 198 | .to_string() 199 | } 200 | 201 | fn get_current_sha() -> String { 202 | if let Ok(output) = std::process::Command::new("git") 203 | .args(["rev-parse", "HEAD"]) 204 | .output() 205 | { 206 | if output.status.success() { 207 | return String::from_utf8_lossy(&output.stdout).trim().to_string(); 208 | } 209 | } 210 | 211 | "0000000000000000000000000000000000000000".to_string() 212 | } 213 | 214 | fn get_current_ref() -> String { 215 | if let Ok(output) = std::process::Command::new("git") 216 | .args(["symbolic-ref", "--short", "HEAD"]) 217 | .output() 218 | { 219 | if output.status.success() { 220 | return format!( 221 | "refs/heads/{}", 222 | String::from_utf8_lossy(&output.stdout).trim() 223 | ); 224 | } 225 | } 226 | 227 | "refs/heads/main".to_string() 228 | } 229 | 230 | fn get_temp_dir() -> String { 231 | let temp_dir = std::env::temp_dir(); 232 | temp_dir.join("wrkflw").to_string_lossy().to_string() 233 | } 234 | 235 | fn get_tool_cache_dir() -> String { 236 | let home_dir = dirs::home_dir().unwrap_or_default(); 237 | home_dir 238 | .join(".wrkflw") 239 | .join("tools") 240 | .to_string_lossy() 241 | .to_string() 242 | } 243 | -------------------------------------------------------------------------------- /crates/executor/src/lib.rs: -------------------------------------------------------------------------------- 1 | // executor crate 2 | 3 | #![allow(unused_variables, unused_assignments)] 4 | 5 | pub mod dependency; 6 | pub mod docker; 7 | pub mod engine; 8 | pub mod environment; 9 | pub mod substitution; 10 | 11 | // Re-export public items 12 | pub use docker::cleanup_resources; 13 | pub use engine::{execute_workflow, JobResult, JobStatus, RuntimeType, StepResult, StepStatus}; 14 | -------------------------------------------------------------------------------- /crates/executor/src/substitution.rs: -------------------------------------------------------------------------------- 1 | use lazy_static::lazy_static; 2 | use regex::Regex; 3 | use serde_yaml::Value; 4 | use std::collections::HashMap; 5 | 6 | lazy_static! { 7 | static ref MATRIX_PATTERN: Regex = 8 | Regex::new(r"\$\{\{\s*matrix\.([a-zA-Z0-9_]+)\s*\}\}").unwrap(); 9 | } 10 | 11 | /// Preprocesses a command string to replace GitHub-style matrix variable references 12 | /// with their values from the environment 13 | #[allow(dead_code)] 14 | pub fn preprocess_command(command: &str, matrix_values: &HashMap) -> String { 15 | // Replace matrix references like ${{ matrix.os }} with their values 16 | let result = MATRIX_PATTERN.replace_all(command, |caps: ®ex::Captures| { 17 | let var_name = &caps[1]; 18 | 19 | // Get the value from matrix context 20 | if let Some(value) = matrix_values.get(var_name) { 21 | // Convert value to string 22 | match value { 23 | Value::String(s) => s.clone(), 24 | Value::Number(n) => n.to_string(), 25 | Value::Bool(b) => b.to_string(), 26 | _ => format!("\\${{{{ matrix.{} }}}}", var_name), // Escape $ for shell 27 | } 28 | } else { 29 | // Keep original if not found but escape $ to prevent shell errors 30 | format!("\\${{{{ matrix.{} }}}}", var_name) 31 | } 32 | }); 33 | 34 | result.into_owned() 35 | } 36 | 37 | /// Apply variable substitution to step run commands 38 | #[allow(dead_code)] 39 | pub fn process_step_run(run: &str, matrix_combination: &Option>) -> String { 40 | if let Some(matrix) = matrix_combination { 41 | preprocess_command(run, matrix) 42 | } else { 43 | // Escape $ in GitHub expression syntax to prevent shell interpretation 44 | MATRIX_PATTERN 45 | .replace_all(run, |caps: ®ex::Captures| { 46 | let var_name = &caps[1]; 47 | format!("\\${{{{ matrix.{} }}}}", var_name) 48 | }) 49 | .to_string() 50 | } 51 | } 52 | 53 | #[cfg(test)] 54 | mod tests { 55 | use super::*; 56 | 57 | #[test] 58 | fn test_preprocess_simple_matrix_vars() { 59 | let mut matrix = HashMap::new(); 60 | matrix.insert("os".to_string(), Value::String("ubuntu-latest".to_string())); 61 | matrix.insert( 62 | "node".to_string(), 63 | Value::Number(serde_yaml::Number::from(14)), 64 | ); 65 | 66 | let cmd = "echo \"Running on ${{ matrix.os }} with Node ${{ matrix.node }}\""; 67 | let processed = preprocess_command(cmd, &matrix); 68 | 69 | assert_eq!(processed, "echo \"Running on ubuntu-latest with Node 14\""); 70 | } 71 | 72 | #[test] 73 | fn test_preprocess_with_missing_vars() { 74 | let mut matrix = HashMap::new(); 75 | matrix.insert("os".to_string(), Value::String("ubuntu-latest".to_string())); 76 | 77 | let cmd = "echo \"Running on ${{ matrix.os }} with Node ${{ matrix.node }}\""; 78 | let processed = preprocess_command(cmd, &matrix); 79 | 80 | // Missing vars should be escaped 81 | assert_eq!( 82 | processed, 83 | "echo \"Running on ubuntu-latest with Node \\${{ matrix.node }}\"" 84 | ); 85 | } 86 | 87 | #[test] 88 | fn test_preprocess_preserves_other_text() { 89 | let mut matrix = HashMap::new(); 90 | matrix.insert("os".to_string(), Value::String("ubuntu-latest".to_string())); 91 | 92 | let cmd = "echo \"Starting job\" && echo \"OS: ${{ matrix.os }}\" && echo \"Done!\""; 93 | let processed = preprocess_command(cmd, &matrix); 94 | 95 | assert_eq!( 96 | processed, 97 | "echo \"Starting job\" && echo \"OS: ubuntu-latest\" && echo \"Done!\"" 98 | ); 99 | } 100 | 101 | #[test] 102 | fn test_process_without_matrix() { 103 | let cmd = "echo \"Value: ${{ matrix.value }}\""; 104 | let processed = process_step_run(cmd, &None); 105 | 106 | assert_eq!(processed, "echo \"Value: \\${{ matrix.value }}\""); 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /crates/github/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "github" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "github functionality for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Add other crate dependencies as needed 10 | models = { path = "../models" } 11 | 12 | # External dependencies from workspace 13 | serde.workspace = true 14 | serde_yaml.workspace = true 15 | serde_json.workspace = true 16 | reqwest.workspace = true 17 | thiserror.workspace = true 18 | lazy_static.workspace = true 19 | regex.workspace = true 20 | -------------------------------------------------------------------------------- /crates/gitlab/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "gitlab" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "gitlab functionality for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | 12 | # External dependencies 13 | lazy_static.workspace = true 14 | regex.workspace = true 15 | reqwest.workspace = true 16 | serde.workspace = true 17 | serde_yaml.workspace = true 18 | serde_json.workspace = true 19 | thiserror.workspace = true 20 | urlencoding.workspace = true 21 | -------------------------------------------------------------------------------- /crates/logging/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "logging" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "logging functionality for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | 12 | # External dependencies 13 | chrono.workspace = true 14 | once_cell.workspace = true 15 | serde.workspace = true 16 | serde_yaml.workspace = true 17 | -------------------------------------------------------------------------------- /crates/logging/src/lib.rs: -------------------------------------------------------------------------------- 1 | use chrono::Local; 2 | use once_cell::sync::Lazy; 3 | use std::sync::{Arc, Mutex}; 4 | 5 | // Thread-safe log storage 6 | static LOGS: Lazy>>> = Lazy::new(|| Arc::new(Mutex::new(Vec::new()))); 7 | 8 | // Current log level 9 | static LOG_LEVEL: Lazy>> = Lazy::new(|| Arc::new(Mutex::new(LogLevel::Info))); 10 | 11 | // Log levels 12 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] 13 | pub enum LogLevel { 14 | Debug, 15 | Info, 16 | Warning, 17 | Error, 18 | } 19 | 20 | impl LogLevel { 21 | fn prefix(&self) -> &'static str { 22 | match self { 23 | LogLevel::Debug => "🔍", 24 | LogLevel::Info => "ℹ️", 25 | LogLevel::Warning => "⚠️", 26 | LogLevel::Error => "❌", 27 | } 28 | } 29 | } 30 | 31 | // Set the current log level 32 | pub fn set_log_level(level: LogLevel) { 33 | if let Ok(mut current_level) = LOG_LEVEL.lock() { 34 | *current_level = level; 35 | } 36 | } 37 | 38 | // Get the current log level 39 | pub fn get_log_level() -> LogLevel { 40 | if let Ok(level) = LOG_LEVEL.lock() { 41 | *level 42 | } else { 43 | // Default to Info if we can't get the lock 44 | LogLevel::Info 45 | } 46 | } 47 | 48 | // Log a message with timestamp and level 49 | pub fn log(level: LogLevel, message: &str) { 50 | let timestamp = Local::now().format("%H:%M:%S").to_string(); 51 | 52 | // Always include timestamp in [HH:MM:SS] format to ensure consistency 53 | let formatted = format!("[{}] {} {}", timestamp, level.prefix(), message); 54 | 55 | if let Ok(mut logs) = LOGS.lock() { 56 | logs.push(formatted.clone()); 57 | } 58 | 59 | // Print to console if the message level is >= the current log level 60 | // This ensures Debug messages only show up when the Debug level is set 61 | if let Ok(current_level) = LOG_LEVEL.lock() { 62 | if level >= *current_level { 63 | // Print to stdout/stderr based on level 64 | match level { 65 | LogLevel::Error | LogLevel::Warning => eprintln!("{}", formatted), 66 | _ => println!("{}", formatted), 67 | } 68 | } 69 | } 70 | } 71 | 72 | // Get all logs 73 | pub fn get_logs() -> Vec { 74 | if let Ok(logs) = LOGS.lock() { 75 | logs.clone() 76 | } else { 77 | // If we can't access logs, return an error message with timestamp 78 | let timestamp = Local::now().format("%H:%M:%S").to_string(); 79 | vec![format!("[{}] ❌ Error accessing logs", timestamp)] 80 | } 81 | } 82 | 83 | // Clear all logs 84 | #[allow(dead_code)] 85 | pub fn clear_logs() { 86 | if let Ok(mut logs) = LOGS.lock() { 87 | logs.clear(); 88 | } 89 | } 90 | 91 | // Convenience functions for different log levels 92 | #[allow(dead_code)] 93 | pub fn debug(message: &str) { 94 | log(LogLevel::Debug, message); 95 | } 96 | 97 | pub fn info(message: &str) { 98 | log(LogLevel::Info, message); 99 | } 100 | 101 | pub fn warning(message: &str) { 102 | log(LogLevel::Warning, message); 103 | } 104 | 105 | pub fn error(message: &str) { 106 | log(LogLevel::Error, message); 107 | } 108 | -------------------------------------------------------------------------------- /crates/matrix/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "matrix" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "matrix functionality for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | 12 | # External dependencies 13 | indexmap.workspace = true 14 | serde.workspace = true 15 | serde_yaml.workspace = true 16 | thiserror.workspace = true 17 | -------------------------------------------------------------------------------- /crates/matrix/src/lib.rs: -------------------------------------------------------------------------------- 1 | // matrix crate 2 | 3 | use indexmap::IndexMap; 4 | use serde::{Deserialize, Serialize}; 5 | use serde_yaml::Value; 6 | use std::collections::HashMap; 7 | use thiserror::Error; 8 | 9 | #[derive(Debug, Clone, Deserialize, Serialize)] 10 | pub struct MatrixConfig { 11 | #[serde(flatten)] 12 | pub parameters: IndexMap, 13 | #[serde(default)] 14 | pub include: Vec>, 15 | #[serde(default)] 16 | pub exclude: Vec>, 17 | #[serde(default, rename = "max-parallel")] 18 | pub max_parallel: Option, 19 | #[serde(default, rename = "fail-fast")] 20 | pub fail_fast: Option, 21 | } 22 | 23 | impl Default for MatrixConfig { 24 | fn default() -> Self { 25 | Self { 26 | parameters: IndexMap::new(), 27 | include: Vec::new(), 28 | exclude: Vec::new(), 29 | max_parallel: None, 30 | fail_fast: Some(true), 31 | } 32 | } 33 | } 34 | 35 | #[derive(Debug, Clone, PartialEq)] 36 | pub struct MatrixCombination { 37 | pub values: HashMap, 38 | pub is_included: bool, // Whether this was added via the include section 39 | } 40 | 41 | impl MatrixCombination { 42 | pub fn new(values: HashMap) -> Self { 43 | Self { 44 | values, 45 | is_included: false, 46 | } 47 | } 48 | 49 | pub fn from_include(values: HashMap) -> Self { 50 | Self { 51 | values, 52 | is_included: true, 53 | } 54 | } 55 | } 56 | 57 | #[derive(Error, Debug)] 58 | pub enum MatrixError { 59 | #[error("Invalid matrix parameter format: {0}")] 60 | InvalidParameterFormat(String), 61 | 62 | #[error("Failed to expand matrix: {0}")] 63 | ExpansionError(String), 64 | } 65 | 66 | /// Expands a matrix configuration into a list of all valid combinations 67 | pub fn expand_matrix(matrix: &MatrixConfig) -> Result, MatrixError> { 68 | let mut combinations = Vec::new(); 69 | 70 | // Step 1: Generate base combinations from parameter arrays 71 | let param_combinations = generate_base_combinations(matrix)?; 72 | 73 | // Step 2: Filter out any combinations that match the exclude patterns 74 | let filtered_combinations = apply_exclude_filters(param_combinations, &matrix.exclude); 75 | combinations.extend(filtered_combinations); 76 | 77 | // Step 3: Add any combinations from the include section 78 | for include_item in &matrix.include { 79 | combinations.push(MatrixCombination::from_include(include_item.clone())); 80 | } 81 | 82 | if combinations.is_empty() { 83 | return Err(MatrixError::ExpansionError( 84 | "No valid combinations found after applying filters".to_string(), 85 | )); 86 | } 87 | 88 | Ok(combinations) 89 | } 90 | 91 | /// Generates all possible combinations of the base matrix parameters 92 | fn generate_base_combinations( 93 | matrix: &MatrixConfig, 94 | ) -> Result, MatrixError> { 95 | // Extract parameter arrays and prepare for combination generation 96 | let mut param_arrays: IndexMap> = IndexMap::new(); 97 | 98 | for (param_name, param_value) in &matrix.parameters { 99 | match param_value { 100 | Value::Sequence(array) => { 101 | param_arrays.insert(param_name.clone(), array.clone()); 102 | } 103 | _ => { 104 | // Handle non-array parameters 105 | let single_value = vec![param_value.clone()]; 106 | param_arrays.insert(param_name.clone(), single_value); 107 | } 108 | } 109 | } 110 | 111 | if param_arrays.is_empty() { 112 | return Err(MatrixError::InvalidParameterFormat( 113 | "Matrix has no valid parameters".to_string(), 114 | )); 115 | } 116 | 117 | // Generate the Cartesian product of all parameter arrays 118 | let param_names: Vec = param_arrays.keys().cloned().collect(); 119 | let param_values: Vec> = param_arrays.values().cloned().collect(); 120 | 121 | // Generate all combinations using itertools 122 | let combinations = if !param_values.is_empty() { 123 | generate_combinations(¶m_names, ¶m_values, 0, &mut HashMap::new())? 124 | } else { 125 | vec![] 126 | }; 127 | 128 | Ok(combinations) 129 | } 130 | 131 | /// Recursive function to generate combinations using depth-first approach 132 | fn generate_combinations( 133 | param_names: &[String], 134 | param_values: &[Vec], 135 | current_depth: usize, 136 | current_combination: &mut HashMap, 137 | ) -> Result, MatrixError> { 138 | if current_depth == param_names.len() { 139 | // We've reached a complete combination 140 | return Ok(vec![MatrixCombination::new(current_combination.clone())]); 141 | } 142 | 143 | let mut result = Vec::new(); 144 | let param_name = ¶m_names[current_depth]; 145 | let values = ¶m_values[current_depth]; 146 | 147 | for value in values { 148 | current_combination.insert(param_name.clone(), value.clone()); 149 | 150 | let mut new_combinations = generate_combinations( 151 | param_names, 152 | param_values, 153 | current_depth + 1, 154 | current_combination, 155 | )?; 156 | 157 | result.append(&mut new_combinations); 158 | } 159 | 160 | // Remove this level's parameter to backtrack 161 | current_combination.remove(param_name); 162 | 163 | Ok(result) 164 | } 165 | 166 | /// Filters out combinations that match any of the exclude patterns 167 | fn apply_exclude_filters( 168 | combinations: Vec, 169 | exclude_patterns: &[HashMap], 170 | ) -> Vec { 171 | if exclude_patterns.is_empty() { 172 | return combinations; 173 | } 174 | 175 | combinations 176 | .into_iter() 177 | .filter(|combination| !is_excluded(combination, exclude_patterns)) 178 | .collect() 179 | } 180 | 181 | /// Checks if a combination matches any exclude pattern 182 | fn is_excluded( 183 | combination: &MatrixCombination, 184 | exclude_patterns: &[HashMap], 185 | ) -> bool { 186 | for exclude in exclude_patterns { 187 | let mut excluded = true; 188 | 189 | for (key, value) in exclude { 190 | match combination.values.get(key) { 191 | Some(combo_value) if combo_value == value => { 192 | // This exclude condition matches 193 | continue; 194 | } 195 | _ => { 196 | // This exclude condition doesn't match 197 | excluded = false; 198 | break; 199 | } 200 | } 201 | } 202 | 203 | if excluded { 204 | return true; 205 | } 206 | } 207 | 208 | false 209 | } 210 | 211 | /// Formats a combination name for display, e.g. "test (ubuntu, node 14)" 212 | pub fn format_combination_name(job_name: &str, combination: &MatrixCombination) -> String { 213 | let params = combination 214 | .values 215 | .iter() 216 | .map(|(k, v)| format!("{}: {}", k, value_to_string(v))) 217 | .collect::>() 218 | .join(", "); 219 | 220 | format!("{} ({})", job_name, params) 221 | } 222 | 223 | /// Converts a serde_yaml::Value to a string for display 224 | fn value_to_string(value: &Value) -> String { 225 | match value { 226 | Value::String(s) => s.clone(), 227 | Value::Number(n) => n.to_string(), 228 | Value::Bool(b) => b.to_string(), 229 | Value::Sequence(seq) => { 230 | let items = seq 231 | .iter() 232 | .map(value_to_string) 233 | .collect::>() 234 | .join(", "); 235 | format!("[{}]", items) 236 | } 237 | Value::Mapping(map) => { 238 | let items = map 239 | .iter() 240 | .map(|(k, v)| format!("{}: {}", value_to_string(k), value_to_string(v))) 241 | .collect::>() 242 | .join(", "); 243 | format!("{{{}}}", items) 244 | } 245 | Value::Null => "null".to_string(), 246 | _ => "unknown".to_string(), 247 | } 248 | } 249 | -------------------------------------------------------------------------------- /crates/models/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "models" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "Data models for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | serde.workspace = true 10 | serde_yaml.workspace = true 11 | serde_json.workspace = true 12 | thiserror.workspace = true -------------------------------------------------------------------------------- /crates/parser/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "parser" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "Parser functionality for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | matrix = { path = "../matrix" } 12 | 13 | # External dependencies 14 | jsonschema.workspace = true 15 | serde.workspace = true 16 | serde_yaml.workspace = true 17 | serde_json.workspace = true 18 | thiserror.workspace = true 19 | 20 | [dev-dependencies] 21 | tempfile = "3.7" 22 | -------------------------------------------------------------------------------- /crates/parser/src/gitlab.rs: -------------------------------------------------------------------------------- 1 | use crate::schema::{SchemaType, SchemaValidator}; 2 | use crate::workflow; 3 | use models::gitlab::Pipeline; 4 | use models::ValidationResult; 5 | use std::collections::HashMap; 6 | use std::fs; 7 | use std::path::Path; 8 | use thiserror::Error; 9 | 10 | #[derive(Error, Debug)] 11 | pub enum GitlabParserError { 12 | #[error("I/O error: {0}")] 13 | IoError(#[from] std::io::Error), 14 | 15 | #[error("YAML parsing error: {0}")] 16 | YamlError(#[from] serde_yaml::Error), 17 | 18 | #[error("Invalid pipeline structure: {0}")] 19 | InvalidStructure(String), 20 | 21 | #[error("Schema validation error: {0}")] 22 | SchemaValidationError(String), 23 | } 24 | 25 | /// Parse a GitLab CI/CD pipeline file 26 | pub fn parse_pipeline(pipeline_path: &Path) -> Result { 27 | // Read the pipeline file 28 | let pipeline_content = fs::read_to_string(pipeline_path)?; 29 | 30 | // Validate against schema 31 | let validator = SchemaValidator::new().map_err(GitlabParserError::SchemaValidationError)?; 32 | 33 | validator 34 | .validate_with_specific_schema(&pipeline_content, SchemaType::GitLab) 35 | .map_err(GitlabParserError::SchemaValidationError)?; 36 | 37 | // Parse the pipeline YAML 38 | let pipeline: Pipeline = serde_yaml::from_str(&pipeline_content)?; 39 | 40 | // Return the parsed pipeline 41 | Ok(pipeline) 42 | } 43 | 44 | /// Validate the basic structure of a GitLab CI/CD pipeline 45 | pub fn validate_pipeline_structure(pipeline: &Pipeline) -> ValidationResult { 46 | let mut result = ValidationResult::new(); 47 | 48 | // Check for at least one job 49 | if pipeline.jobs.is_empty() { 50 | result.add_issue("Pipeline must contain at least one job".to_string()); 51 | } 52 | 53 | // Check for script in jobs 54 | for (job_name, job) in &pipeline.jobs { 55 | // Skip template jobs 56 | if let Some(true) = job.template { 57 | continue; 58 | } 59 | 60 | // Check for script or extends 61 | if job.script.is_none() && job.extends.is_none() { 62 | result.add_issue(format!( 63 | "Job '{}' must have a script section or extend another job", 64 | job_name 65 | )); 66 | } 67 | } 68 | 69 | // Check that referenced stages are defined 70 | if let Some(stages) = &pipeline.stages { 71 | for (job_name, job) in &pipeline.jobs { 72 | if let Some(stage) = &job.stage { 73 | if !stages.contains(stage) { 74 | result.add_issue(format!( 75 | "Job '{}' references undefined stage '{}'", 76 | job_name, stage 77 | )); 78 | } 79 | } 80 | } 81 | } 82 | 83 | // Check that job dependencies exist 84 | for (job_name, job) in &pipeline.jobs { 85 | if let Some(dependencies) = &job.dependencies { 86 | for dependency in dependencies { 87 | if !pipeline.jobs.contains_key(dependency) { 88 | result.add_issue(format!( 89 | "Job '{}' depends on undefined job '{}'", 90 | job_name, dependency 91 | )); 92 | } 93 | } 94 | } 95 | } 96 | 97 | // Check that job extensions exist 98 | for (job_name, job) in &pipeline.jobs { 99 | if let Some(extends) = &job.extends { 100 | for extend in extends { 101 | if !pipeline.jobs.contains_key(extend) { 102 | result.add_issue(format!( 103 | "Job '{}' extends undefined job '{}'", 104 | job_name, extend 105 | )); 106 | } 107 | } 108 | } 109 | } 110 | 111 | result 112 | } 113 | 114 | /// Convert a GitLab CI/CD pipeline to a format compatible with the workflow executor 115 | pub fn convert_to_workflow_format(pipeline: &Pipeline) -> workflow::WorkflowDefinition { 116 | // Create a new workflow with required fields 117 | let mut workflow = workflow::WorkflowDefinition { 118 | name: "Converted GitLab CI Pipeline".to_string(), 119 | on: vec!["push".to_string()], // Default trigger 120 | on_raw: serde_yaml::Value::String("push".to_string()), 121 | jobs: HashMap::new(), 122 | }; 123 | 124 | // Convert each GitLab job to a GitHub Actions job 125 | for (job_name, gitlab_job) in &pipeline.jobs { 126 | // Skip template jobs 127 | if let Some(true) = gitlab_job.template { 128 | continue; 129 | } 130 | 131 | // Create a new job 132 | let mut job = workflow::Job { 133 | runs_on: "ubuntu-latest".to_string(), // Default runner 134 | needs: None, 135 | steps: Vec::new(), 136 | env: HashMap::new(), 137 | matrix: None, 138 | services: HashMap::new(), 139 | }; 140 | 141 | // Add job-specific environment variables 142 | if let Some(variables) = &gitlab_job.variables { 143 | job.env.extend(variables.clone()); 144 | } 145 | 146 | // Add global variables if they exist 147 | if let Some(variables) = &pipeline.variables { 148 | // Only add if not already defined at job level 149 | for (key, value) in variables { 150 | job.env.entry(key.clone()).or_insert_with(|| value.clone()); 151 | } 152 | } 153 | 154 | // Convert before_script to steps if it exists 155 | if let Some(before_script) = &gitlab_job.before_script { 156 | for (i, cmd) in before_script.iter().enumerate() { 157 | let step = workflow::Step { 158 | name: Some(format!("Before script {}", i + 1)), 159 | uses: None, 160 | run: Some(cmd.clone()), 161 | with: None, 162 | env: HashMap::new(), 163 | continue_on_error: None, 164 | }; 165 | job.steps.push(step); 166 | } 167 | } 168 | 169 | // Convert main script to steps 170 | if let Some(script) = &gitlab_job.script { 171 | for (i, cmd) in script.iter().enumerate() { 172 | let step = workflow::Step { 173 | name: Some(format!("Run script line {}", i + 1)), 174 | uses: None, 175 | run: Some(cmd.clone()), 176 | with: None, 177 | env: HashMap::new(), 178 | continue_on_error: None, 179 | }; 180 | job.steps.push(step); 181 | } 182 | } 183 | 184 | // Convert after_script to steps if it exists 185 | if let Some(after_script) = &gitlab_job.after_script { 186 | for (i, cmd) in after_script.iter().enumerate() { 187 | let step = workflow::Step { 188 | name: Some(format!("After script {}", i + 1)), 189 | uses: None, 190 | run: Some(cmd.clone()), 191 | with: None, 192 | env: HashMap::new(), 193 | continue_on_error: Some(true), // After script should continue even if previous steps fail 194 | }; 195 | job.steps.push(step); 196 | } 197 | } 198 | 199 | // Add services if they exist 200 | if let Some(services) = &gitlab_job.services { 201 | for (i, service) in services.iter().enumerate() { 202 | let service_name = format!("service-{}", i); 203 | let service_image = match service { 204 | models::gitlab::Service::Simple(name) => name.clone(), 205 | models::gitlab::Service::Detailed { name, .. } => name.clone(), 206 | }; 207 | 208 | let service = workflow::Service { 209 | image: service_image, 210 | ports: None, 211 | env: HashMap::new(), 212 | volumes: None, 213 | options: None, 214 | }; 215 | 216 | job.services.insert(service_name, service); 217 | } 218 | } 219 | 220 | // Add the job to the workflow 221 | workflow.jobs.insert(job_name.clone(), job); 222 | } 223 | 224 | workflow 225 | } 226 | 227 | #[cfg(test)] 228 | mod tests { 229 | use super::*; 230 | use std::path::PathBuf; 231 | use tempfile::NamedTempFile; 232 | 233 | #[test] 234 | fn test_parse_simple_pipeline() { 235 | // Create a temporary file with a simple GitLab CI/CD pipeline 236 | let mut file = NamedTempFile::new().unwrap(); 237 | let content = r#" 238 | stages: 239 | - build 240 | - test 241 | 242 | build_job: 243 | stage: build 244 | script: 245 | - echo "Building..." 246 | - make build 247 | 248 | test_job: 249 | stage: test 250 | script: 251 | - echo "Testing..." 252 | - make test 253 | "#; 254 | fs::write(&file, content).unwrap(); 255 | 256 | // Parse the pipeline 257 | let pipeline = parse_pipeline(&file.path()).unwrap(); 258 | 259 | // Validate basic structure 260 | assert_eq!(pipeline.stages.as_ref().unwrap().len(), 2); 261 | assert_eq!(pipeline.jobs.len(), 2); 262 | 263 | // Check job contents 264 | let build_job = pipeline.jobs.get("build_job").unwrap(); 265 | assert_eq!(build_job.stage.as_ref().unwrap(), "build"); 266 | assert_eq!(build_job.script.as_ref().unwrap().len(), 2); 267 | 268 | let test_job = pipeline.jobs.get("test_job").unwrap(); 269 | assert_eq!(test_job.stage.as_ref().unwrap(), "test"); 270 | assert_eq!(test_job.script.as_ref().unwrap().len(), 2); 271 | } 272 | } 273 | -------------------------------------------------------------------------------- /crates/parser/src/lib.rs: -------------------------------------------------------------------------------- 1 | // parser crate 2 | 3 | pub mod gitlab; 4 | pub mod schema; 5 | pub mod workflow; 6 | -------------------------------------------------------------------------------- /crates/parser/src/schema.rs: -------------------------------------------------------------------------------- 1 | use jsonschema::JSONSchema; 2 | use serde_json::Value; 3 | use std::fs; 4 | use std::path::Path; 5 | 6 | const GITHUB_WORKFLOW_SCHEMA: &str = include_str!("../../../schemas/github-workflow.json"); 7 | const GITLAB_CI_SCHEMA: &str = include_str!("../../../schemas/gitlab-ci.json"); 8 | 9 | #[derive(Debug, Clone, Copy)] 10 | pub enum SchemaType { 11 | GitHub, 12 | GitLab, 13 | } 14 | 15 | pub struct SchemaValidator { 16 | github_schema: JSONSchema, 17 | gitlab_schema: JSONSchema, 18 | } 19 | 20 | impl SchemaValidator { 21 | pub fn new() -> Result { 22 | let github_schema_json: Value = serde_json::from_str(GITHUB_WORKFLOW_SCHEMA) 23 | .map_err(|e| format!("Failed to parse GitHub workflow schema: {}", e))?; 24 | 25 | let gitlab_schema_json: Value = serde_json::from_str(GITLAB_CI_SCHEMA) 26 | .map_err(|e| format!("Failed to parse GitLab CI schema: {}", e))?; 27 | 28 | let github_schema = JSONSchema::compile(&github_schema_json) 29 | .map_err(|e| format!("Failed to compile GitHub JSON schema: {}", e))?; 30 | 31 | let gitlab_schema = JSONSchema::compile(&gitlab_schema_json) 32 | .map_err(|e| format!("Failed to compile GitLab JSON schema: {}", e))?; 33 | 34 | Ok(Self { 35 | github_schema, 36 | gitlab_schema, 37 | }) 38 | } 39 | 40 | pub fn validate_workflow(&self, workflow_path: &Path) -> Result<(), String> { 41 | // Determine the schema type based on the filename 42 | let schema_type = if workflow_path.file_name().is_some_and(|name| { 43 | let name_str = name.to_string_lossy(); 44 | name_str.ends_with(".gitlab-ci.yml") || name_str.ends_with(".gitlab-ci.yaml") 45 | }) { 46 | SchemaType::GitLab 47 | } else { 48 | SchemaType::GitHub 49 | }; 50 | 51 | // Read the workflow file 52 | let content = fs::read_to_string(workflow_path) 53 | .map_err(|e| format!("Failed to read workflow file: {}", e))?; 54 | 55 | // Parse YAML to JSON Value 56 | let workflow_json: Value = serde_yaml::from_str(&content) 57 | .map_err(|e| format!("Failed to parse workflow YAML: {}", e))?; 58 | 59 | // Validate against the appropriate schema 60 | let validation_result = match schema_type { 61 | SchemaType::GitHub => self.github_schema.validate(&workflow_json), 62 | SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json), 63 | }; 64 | 65 | // Handle validation errors 66 | if let Err(errors) = validation_result { 67 | let schema_name = match schema_type { 68 | SchemaType::GitHub => "GitHub workflow", 69 | SchemaType::GitLab => "GitLab CI", 70 | }; 71 | let mut error_msg = format!("{} validation failed:\n", schema_name); 72 | for error in errors { 73 | error_msg.push_str(&format!("- {}\n", error)); 74 | } 75 | return Err(error_msg); 76 | } 77 | 78 | Ok(()) 79 | } 80 | 81 | pub fn validate_with_specific_schema( 82 | &self, 83 | content: &str, 84 | schema_type: SchemaType, 85 | ) -> Result<(), String> { 86 | // Parse YAML to JSON Value 87 | let workflow_json: Value = 88 | serde_yaml::from_str(content).map_err(|e| format!("Failed to parse YAML: {}", e))?; 89 | 90 | // Validate against the appropriate schema 91 | let validation_result = match schema_type { 92 | SchemaType::GitHub => self.github_schema.validate(&workflow_json), 93 | SchemaType::GitLab => self.gitlab_schema.validate(&workflow_json), 94 | }; 95 | 96 | // Handle validation errors 97 | if let Err(errors) = validation_result { 98 | let schema_name = match schema_type { 99 | SchemaType::GitHub => "GitHub workflow", 100 | SchemaType::GitLab => "GitLab CI", 101 | }; 102 | let mut error_msg = format!("{} validation failed:\n", schema_name); 103 | for error in errors { 104 | error_msg.push_str(&format!("- {}\n", error)); 105 | } 106 | return Err(error_msg); 107 | } 108 | 109 | Ok(()) 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /crates/parser/src/workflow.rs: -------------------------------------------------------------------------------- 1 | use matrix::MatrixConfig; 2 | use serde::{Deserialize, Serialize}; 3 | use std::collections::HashMap; 4 | use std::fs; 5 | use std::path::Path; 6 | 7 | use super::schema::SchemaValidator; 8 | 9 | #[derive(Debug, Deserialize, Serialize)] 10 | pub struct WorkflowDefinition { 11 | pub name: String, 12 | #[serde(skip, default)] // Skip deserialization of the 'on' field directly 13 | pub on: Vec, 14 | #[serde(rename = "on")] // Raw access to the 'on' field for custom handling 15 | pub on_raw: serde_yaml::Value, 16 | pub jobs: HashMap, 17 | } 18 | 19 | #[derive(Debug, Deserialize, Serialize)] 20 | pub struct Job { 21 | #[serde(rename = "runs-on")] 22 | pub runs_on: String, 23 | #[serde(default)] 24 | pub needs: Option>, 25 | pub steps: Vec, 26 | #[serde(default)] 27 | pub env: HashMap, 28 | #[serde(default)] 29 | pub matrix: Option, 30 | #[serde(default)] 31 | pub services: HashMap, 32 | } 33 | 34 | #[derive(Debug, Deserialize, Serialize)] 35 | pub struct Service { 36 | pub image: String, 37 | #[serde(default)] 38 | pub ports: Option>, 39 | #[serde(default)] 40 | pub env: HashMap, 41 | #[serde(default)] 42 | pub volumes: Option>, 43 | #[serde(default)] 44 | pub options: Option, 45 | } 46 | 47 | #[derive(Debug, Deserialize, Serialize)] 48 | pub struct Step { 49 | #[serde(default)] 50 | pub name: Option, 51 | #[serde(default)] 52 | pub uses: Option, 53 | #[serde(default)] 54 | pub run: Option, 55 | #[serde(default)] 56 | pub with: Option>, 57 | #[serde(default)] 58 | pub env: HashMap, 59 | #[serde(default)] 60 | pub continue_on_error: Option, 61 | } 62 | 63 | impl WorkflowDefinition { 64 | pub fn resolve_action(&self, action_ref: &str) -> ActionInfo { 65 | // Parse GitHub action reference like "actions/checkout@v3" 66 | let parts: Vec<&str> = action_ref.split('@').collect(); 67 | 68 | let (repo, _) = if parts.len() > 1 { 69 | (parts[0], parts[1]) 70 | } else { 71 | (parts[0], "main") // Default to main if no version specified 72 | }; 73 | 74 | ActionInfo { 75 | repository: repo.to_string(), 76 | is_docker: repo.starts_with("docker://"), 77 | is_local: repo.starts_with("./"), 78 | } 79 | } 80 | } 81 | 82 | #[derive(Debug, Clone)] 83 | pub struct ActionInfo { 84 | pub repository: String, 85 | pub is_docker: bool, 86 | pub is_local: bool, 87 | } 88 | 89 | pub fn parse_workflow(path: &Path) -> Result { 90 | // First validate against schema 91 | let validator = SchemaValidator::new()?; 92 | validator.validate_workflow(path)?; 93 | 94 | // If validation passes, parse the workflow 95 | let content = 96 | fs::read_to_string(path).map_err(|e| format!("Failed to read workflow file: {}", e))?; 97 | 98 | // Parse the YAML content 99 | let mut workflow: WorkflowDefinition = serde_yaml::from_str(&content) 100 | .map_err(|e| format!("Failed to parse workflow structure: {}", e))?; 101 | 102 | // Normalize the trigger events 103 | workflow.on = normalize_triggers(&workflow.on_raw)?; 104 | 105 | Ok(workflow) 106 | } 107 | 108 | fn normalize_triggers(on_value: &serde_yaml::Value) -> Result, String> { 109 | let mut triggers = Vec::new(); 110 | 111 | match on_value { 112 | // Simple string trigger: on: push 113 | serde_yaml::Value::String(event) => { 114 | triggers.push(event.clone()); 115 | } 116 | // Array of triggers: on: [push, pull_request] 117 | serde_yaml::Value::Sequence(events) => { 118 | for event in events { 119 | if let Some(event_str) = event.as_str() { 120 | triggers.push(event_str.to_string()); 121 | } 122 | } 123 | } 124 | // Map of triggers with configuration: on: {push: {branches: [main]}} 125 | serde_yaml::Value::Mapping(events_map) => { 126 | for (event, _) in events_map { 127 | if let Some(event_str) = event.as_str() { 128 | triggers.push(event_str.to_string()); 129 | } 130 | } 131 | } 132 | _ => { 133 | return Err("'on' section has invalid format".to_string()); 134 | } 135 | } 136 | 137 | Ok(triggers) 138 | } 139 | -------------------------------------------------------------------------------- /crates/runtime/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "runtime" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "Runtime environment for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | logging = { path = "../logging", version = "0.4.0" } 12 | 13 | # External dependencies 14 | async-trait.workspace = true 15 | once_cell = "1.19" 16 | serde.workspace = true 17 | serde_yaml.workspace = true 18 | tempfile = "3.9" 19 | tokio.workspace = true 20 | futures = "0.3" 21 | utils = { path = "../utils", version = "0.4.0" } 22 | which = "4.4" 23 | -------------------------------------------------------------------------------- /crates/runtime/src/container.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use std::path::Path; 3 | 4 | #[async_trait] 5 | pub trait ContainerRuntime { 6 | async fn run_container( 7 | &self, 8 | image: &str, 9 | cmd: &[&str], 10 | env_vars: &[(&str, &str)], 11 | working_dir: &Path, 12 | volumes: &[(&Path, &Path)], 13 | ) -> Result; 14 | 15 | async fn pull_image(&self, image: &str) -> Result<(), ContainerError>; 16 | 17 | async fn build_image(&self, dockerfile: &Path, tag: &str) -> Result<(), ContainerError>; 18 | 19 | async fn prepare_language_environment( 20 | &self, 21 | language: &str, 22 | version: Option<&str>, 23 | additional_packages: Option>, 24 | ) -> Result; 25 | } 26 | 27 | pub struct ContainerOutput { 28 | pub stdout: String, 29 | pub stderr: String, 30 | pub exit_code: i32, 31 | } 32 | 33 | use std::fmt; 34 | 35 | #[derive(Debug)] 36 | pub enum ContainerError { 37 | ImagePull(String), 38 | ImageBuild(String), 39 | ContainerStart(String), 40 | ContainerExecution(String), 41 | NetworkCreation(String), 42 | NetworkOperation(String), 43 | } 44 | 45 | impl fmt::Display for ContainerError { 46 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 47 | match self { 48 | ContainerError::ImagePull(msg) => write!(f, "Failed to pull image: {}", msg), 49 | ContainerError::ImageBuild(msg) => write!(f, "Failed to build image: {}", msg), 50 | ContainerError::ContainerStart(msg) => { 51 | write!(f, "Failed to start container: {}", msg) 52 | } 53 | ContainerError::ContainerExecution(msg) => { 54 | write!(f, "Container execution failed: {}", msg) 55 | } 56 | ContainerError::NetworkCreation(msg) => { 57 | write!(f, "Failed to create Docker network: {}", msg) 58 | } 59 | ContainerError::NetworkOperation(msg) => { 60 | write!(f, "Network operation failed: {}", msg) 61 | } 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /crates/runtime/src/emulation_test.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | use std::process::Command; 3 | use std::fs; 4 | use tokio::sync::Mutex; 5 | use once_cell::sync::Lazy; 6 | 7 | use crate::runtime::{ 8 | container::{ContainerRuntime, ContainerOutput, ContainerError}, 9 | emulation::{self, EmulationRuntime}, 10 | }; 11 | 12 | #[cfg(test)] 13 | mod emulation_cleanup_tests { 14 | use super::*; 15 | 16 | /// Create a process and workspace that need to be tracked for cleanup 17 | async fn setup_emulation_resources() -> (Option, Option) { 18 | // Create an emulation runtime to generate a workspace 19 | let runtime = EmulationRuntime::new(); 20 | 21 | // Get the workspace path (normally this is tracked automatically) 22 | let workspaces = emulation::get_tracked_workspaces(); 23 | let workspace_path = if !workspaces.is_empty() { 24 | Some(workspaces[0].clone()) 25 | } else { 26 | None 27 | }; 28 | 29 | // Try to spawn a long-running background process for testing 30 | let process_id = if cfg!(unix) { 31 | // Use sleep on Unix to create a long-running process 32 | let child = Command::new("sh") 33 | .arg("-c") 34 | .arg("sleep 300 &") // Run sleep for 300 seconds in background 35 | .spawn(); 36 | 37 | match child { 38 | Ok(child) => { 39 | // Get the PID and track it 40 | let pid = child.id(); 41 | emulation::track_process(pid); 42 | Some(pid) 43 | }, 44 | Err(_) => None 45 | } 46 | } else if cfg!(windows) { 47 | // Use timeout on Windows (equivalent to sleep) 48 | let child = Command::new("cmd") 49 | .arg("/C") 50 | .arg("start /b timeout /t 300") // Run timeout for 300 seconds 51 | .spawn(); 52 | 53 | match child { 54 | Ok(child) => { 55 | // Get the PID and track it 56 | let pid = child.id(); 57 | emulation::track_process(pid); 58 | Some(pid) 59 | }, 60 | Err(_) => None 61 | } 62 | } else { 63 | None 64 | }; 65 | 66 | (process_id, workspace_path) 67 | } 68 | 69 | /// Check if a process with the given PID is still running 70 | fn is_process_running(pid: u32) -> bool { 71 | if cfg!(unix) { 72 | // On Unix, use kill -0 to check if process exists 73 | let output = Command::new("kill") 74 | .arg("-0") 75 | .arg(&pid.to_string()) 76 | .output(); 77 | 78 | matches!(output, Ok(output) if output.status.success()) 79 | } else if cfg!(windows) { 80 | // On Windows, use tasklist to find the process 81 | let output = Command::new("tasklist") 82 | .arg("/FI") 83 | .arg(format!("PID eq {}", pid)) 84 | .arg("/NH") 85 | .output(); 86 | 87 | matches!(output, Ok(output) if String::from_utf8_lossy(&output.stdout).contains(&pid.to_string())) 88 | } else { 89 | false 90 | } 91 | } 92 | 93 | #[tokio::test] 94 | async fn test_emulation_process_cleanup() { 95 | // Skip tests on CI or environments where spawning processes might be restricted 96 | if std::env::var("CI").is_ok() { 97 | println!("Running in CI environment, skipping test"); 98 | return; 99 | } 100 | 101 | // Set up test resources 102 | let (process_id, _) = setup_emulation_resources().await; 103 | 104 | // Skip if we couldn't create a process 105 | let process_id = match process_id { 106 | Some(id) => id, 107 | None => { 108 | println!("Could not create test process, skipping test"); 109 | return; 110 | } 111 | }; 112 | 113 | // Verify process is tracked 114 | let processes = emulation::get_tracked_processes(); 115 | let is_tracked = processes.contains(&process_id); 116 | 117 | assert!(is_tracked, "Process should be tracked for cleanup"); 118 | 119 | // Run cleanup 120 | emulation::cleanup_resources().await; 121 | 122 | // Verify process is removed from tracking 123 | let processes = emulation::get_tracked_processes(); 124 | let still_tracked = processes.contains(&process_id); 125 | 126 | assert!(!still_tracked, "Process should be removed from tracking after cleanup"); 127 | 128 | // Verify process is no longer running 129 | assert!(!is_process_running(process_id), "Process should be terminated after cleanup"); 130 | } 131 | 132 | #[tokio::test] 133 | async fn test_emulation_workspace_cleanup() { 134 | // Create an emulation runtime instance which will automatically create and track a workspace 135 | let runtime = EmulationRuntime::new(); 136 | 137 | // Get the workspace path 138 | let workspaces = emulation::get_tracked_workspaces(); 139 | if workspaces.is_empty() { 140 | println!("No workspace was tracked, skipping test"); 141 | return; 142 | } 143 | 144 | let workspace_path = &workspaces[0]; 145 | 146 | // Verify workspace exists 147 | assert!(workspace_path.exists(), "Workspace should exist before cleanup"); 148 | 149 | // Run cleanup 150 | emulation::cleanup_resources().await; 151 | 152 | // Verify workspace is removed from tracking 153 | let workspaces = emulation::get_tracked_workspaces(); 154 | let still_tracked = workspaces.iter().any(|w| w == workspace_path); 155 | 156 | assert!(!still_tracked, "Workspace should be removed from tracking after cleanup"); 157 | 158 | // Verify workspace directory is deleted 159 | assert!(!workspace_path.exists(), "Workspace directory should be deleted after cleanup"); 160 | } 161 | 162 | #[tokio::test] 163 | async fn test_run_container_with_emulation() { 164 | // Create an emulation runtime 165 | let runtime = EmulationRuntime::new(); 166 | 167 | // Run a simple command in emulation mode 168 | let result = runtime 169 | .run_container( 170 | "alpine:latest", // In emulation mode, image is just for logging 171 | &["echo", "test cleanup"], 172 | &[], 173 | Path::new("/"), 174 | &[(Path::new("."), Path::new("/github/workspace"))], 175 | ) 176 | .await; 177 | 178 | // Verify command executed successfully 179 | match result { 180 | Ok(output) => { 181 | assert!(output.stdout.contains("test cleanup"), "Command output should contain test message"); 182 | assert_eq!(output.exit_code, 0, "Command should exit with status 0"); 183 | }, 184 | Err(e) => { 185 | panic!("Failed to run command in emulation mode: {}", e); 186 | } 187 | } 188 | 189 | // Count resources before cleanup 190 | let workspaces_count = emulation::get_tracked_workspaces().len(); 191 | 192 | assert!(workspaces_count > 0, "At least one workspace should be tracked"); 193 | 194 | // Run cleanup 195 | emulation::cleanup_resources().await; 196 | 197 | // Verify all resources are cleaned up 198 | let remaining_workspaces = emulation::get_tracked_workspaces().len(); 199 | 200 | assert_eq!(remaining_workspaces, 0, "All workspaces should be cleaned up"); 201 | } 202 | 203 | #[tokio::test] 204 | async fn test_full_resource_cleanup() { 205 | // Skip tests on CI or environments where spawning processes might be restricted 206 | if std::env::var("CI").is_ok() { 207 | println!("Running in CI environment, skipping test"); 208 | return; 209 | } 210 | 211 | // Set up test resources 212 | let (process_id, _) = setup_emulation_resources().await; 213 | 214 | // Create an additional emulation runtime to have more workspaces 215 | let runtime = EmulationRuntime::new(); 216 | 217 | // Count resources before cleanup 218 | let process_count = emulation::get_tracked_processes().len(); 219 | let workspace_count = emulation::get_tracked_workspaces().len(); 220 | 221 | // Ensure we have at least one resource to clean up 222 | assert!(process_count > 0 || workspace_count > 0, 223 | "At least one process or workspace should be tracked"); 224 | 225 | // Run full cleanup 226 | emulation::cleanup_resources().await; 227 | 228 | // Verify all resources are cleaned up 229 | let remaining_processes = emulation::get_tracked_processes().len(); 230 | let remaining_workspaces = emulation::get_tracked_workspaces().len(); 231 | 232 | assert_eq!(remaining_processes, 0, "All processes should be cleaned up"); 233 | assert_eq!(remaining_workspaces, 0, "All workspaces should be cleaned up"); 234 | 235 | // If we had a process, verify it's not running anymore 236 | if let Some(pid) = process_id { 237 | assert!(!is_process_running(pid), "Process should be terminated after cleanup"); 238 | } 239 | } 240 | } -------------------------------------------------------------------------------- /crates/runtime/src/lib.rs: -------------------------------------------------------------------------------- 1 | // runtime crate 2 | 3 | pub mod container; 4 | pub mod emulation; 5 | -------------------------------------------------------------------------------- /crates/ui/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ui" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "user interface functionality for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | evaluator = { path = "../evaluator" } 12 | executor = { path = "../executor" } 13 | logging = { path = "../logging" } 14 | utils = { path = "../utils" } 15 | github = { path = "../github" } 16 | 17 | # External dependencies 18 | chrono.workspace = true 19 | crossterm.workspace = true 20 | ratatui.workspace = true 21 | serde.workspace = true 22 | serde_yaml.workspace = true 23 | tokio.workspace = true 24 | serde_json.workspace = true 25 | reqwest = { workspace = true, features = ["json"] } 26 | regex.workspace = true 27 | futures.workspace = true 28 | -------------------------------------------------------------------------------- /crates/ui/src/components/button.rs: -------------------------------------------------------------------------------- 1 | // Button component 2 | use ratatui::{ 3 | style::{Color, Modifier, Style}, 4 | text::{Line, Span}, 5 | widgets::Paragraph, 6 | }; 7 | 8 | /// A simple button component for the TUI 9 | pub struct Button { 10 | pub label: String, 11 | pub is_selected: bool, 12 | pub is_active: bool, 13 | } 14 | 15 | impl Button { 16 | /// Create a new button 17 | pub fn new(label: &str) -> Self { 18 | Button { 19 | label: label.to_string(), 20 | is_selected: false, 21 | is_active: true, 22 | } 23 | } 24 | 25 | /// Set selected state 26 | pub fn selected(mut self, is_selected: bool) -> Self { 27 | self.is_selected = is_selected; 28 | self 29 | } 30 | 31 | /// Set active state 32 | pub fn active(mut self, is_active: bool) -> Self { 33 | self.is_active = is_active; 34 | self 35 | } 36 | 37 | /// Render the button 38 | pub fn render(&self) -> Paragraph { 39 | let (fg, bg) = match (self.is_selected, self.is_active) { 40 | (true, true) => (Color::Black, Color::Yellow), 41 | (true, false) => (Color::Black, Color::DarkGray), 42 | (false, true) => (Color::White, Color::Blue), 43 | (false, false) => (Color::DarkGray, Color::Black), 44 | }; 45 | 46 | let style = Style::default().fg(fg).bg(bg).add_modifier(Modifier::BOLD); 47 | 48 | Paragraph::new(Line::from(vec![Span::styled( 49 | format!(" {} ", self.label), 50 | style, 51 | )])) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /crates/ui/src/components/checkbox.rs: -------------------------------------------------------------------------------- 1 | // Checkbox component 2 | use ratatui::{ 3 | style::{Color, Modifier, Style}, 4 | text::{Line, Span}, 5 | widgets::Paragraph, 6 | }; 7 | 8 | /// A simple checkbox component for the TUI 9 | pub struct Checkbox { 10 | pub label: String, 11 | pub is_checked: bool, 12 | pub is_selected: bool, 13 | } 14 | 15 | impl Checkbox { 16 | /// Create a new checkbox 17 | pub fn new(label: &str) -> Self { 18 | Checkbox { 19 | label: label.to_string(), 20 | is_checked: false, 21 | is_selected: false, 22 | } 23 | } 24 | 25 | /// Set checked state 26 | pub fn checked(mut self, is_checked: bool) -> Self { 27 | self.is_checked = is_checked; 28 | self 29 | } 30 | 31 | /// Set selected state 32 | pub fn selected(mut self, is_selected: bool) -> Self { 33 | self.is_selected = is_selected; 34 | self 35 | } 36 | 37 | /// Toggle checked state 38 | pub fn toggle(&mut self) { 39 | self.is_checked = !self.is_checked; 40 | } 41 | 42 | /// Render the checkbox 43 | pub fn render(&self) -> Paragraph { 44 | let checkbox = if self.is_checked { "[✓]" } else { "[ ]" }; 45 | 46 | let style = if self.is_selected { 47 | Style::default() 48 | .fg(Color::Yellow) 49 | .add_modifier(Modifier::BOLD) 50 | } else { 51 | Style::default().fg(Color::White) 52 | }; 53 | 54 | Paragraph::new(Line::from(vec![ 55 | Span::styled(checkbox, style), 56 | Span::raw(" "), 57 | Span::styled(&self.label, style), 58 | ])) 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /crates/ui/src/components/mod.rs: -------------------------------------------------------------------------------- 1 | // UI Components 2 | mod button; 3 | mod checkbox; 4 | mod progress_bar; 5 | 6 | // Re-export components for easier access 7 | pub use button::Button; 8 | pub use checkbox::Checkbox; 9 | pub use progress_bar::ProgressBar; 10 | 11 | // This module will contain smaller reusable UI elements that 12 | // can be shared between different views of the application. 13 | -------------------------------------------------------------------------------- /crates/ui/src/components/progress_bar.rs: -------------------------------------------------------------------------------- 1 | // Progress bar component 2 | use ratatui::{ 3 | style::{Color, Style}, 4 | widgets::Gauge, 5 | }; 6 | 7 | /// A simple progress bar component for the TUI 8 | pub struct ProgressBar { 9 | pub progress: f64, 10 | pub label: Option, 11 | pub color: Color, 12 | } 13 | 14 | impl ProgressBar { 15 | /// Create a new progress bar 16 | pub fn new(progress: f64) -> Self { 17 | ProgressBar { 18 | progress: progress.clamp(0.0, 1.0), 19 | label: None, 20 | color: Color::Blue, 21 | } 22 | } 23 | 24 | /// Set label 25 | pub fn label(mut self, label: &str) -> Self { 26 | self.label = Some(label.to_string()); 27 | self 28 | } 29 | 30 | /// Set color 31 | pub fn color(mut self, color: Color) -> Self { 32 | self.color = color; 33 | self 34 | } 35 | 36 | /// Update progress value 37 | pub fn update(&mut self, progress: f64) { 38 | self.progress = progress.clamp(0.0, 1.0); 39 | } 40 | 41 | /// Render the progress bar 42 | pub fn render(&self) -> Gauge { 43 | let label = match &self.label { 44 | Some(lbl) => format!("{} {:.0}%", lbl, self.progress * 100.0), 45 | None => format!("{:.0}%", self.progress * 100.0), 46 | }; 47 | 48 | Gauge::default() 49 | .gauge_style(Style::default().fg(self.color).bg(Color::Black)) 50 | .label(label) 51 | .ratio(self.progress) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /crates/ui/src/handlers/mod.rs: -------------------------------------------------------------------------------- 1 | // Handlers for the UI 2 | 3 | pub mod workflow; 4 | -------------------------------------------------------------------------------- /crates/ui/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Modular UI crate for wrkflw 2 | // 3 | // This crate is organized into several modules: 4 | // - app: Contains the main App state and TUI entry point 5 | // - models: Contains the data structures for the UI 6 | // - components: Contains reusable UI elements 7 | // - handlers: Contains workflow handling logic 8 | // - utils: Contains utility functions 9 | // - views: Contains UI rendering code 10 | 11 | // Re-export public modules 12 | pub mod app; 13 | pub mod components; 14 | pub mod handlers; 15 | pub mod models; 16 | pub mod utils; 17 | pub mod views; 18 | 19 | // Re-export main entry points 20 | pub use app::run_wrkflw_tui; 21 | pub use handlers::workflow::execute_workflow_cli; 22 | pub use handlers::workflow::validate_workflow; 23 | -------------------------------------------------------------------------------- /crates/ui/src/models/mod.rs: -------------------------------------------------------------------------------- 1 | // UI Models for wrkflw 2 | use chrono::Local; 3 | use executor::{JobStatus, StepStatus}; 4 | use std::path::PathBuf; 5 | 6 | /// Type alias for the complex execution result type 7 | pub type ExecutionResultMsg = (usize, Result<(Vec, ()), String>); 8 | 9 | /// Represents an individual workflow file 10 | pub struct Workflow { 11 | pub name: String, 12 | pub path: PathBuf, 13 | pub selected: bool, 14 | pub status: WorkflowStatus, 15 | pub execution_details: Option, 16 | } 17 | 18 | /// Status of a workflow 19 | #[derive(Debug, Clone, PartialEq)] 20 | pub enum WorkflowStatus { 21 | NotStarted, 22 | Running, 23 | Success, 24 | Failed, 25 | Skipped, 26 | } 27 | 28 | /// Detailed execution information 29 | pub struct WorkflowExecution { 30 | pub jobs: Vec, 31 | pub start_time: chrono::DateTime, 32 | pub end_time: Option>, 33 | pub logs: Vec, 34 | pub progress: f64, // 0.0 - 1.0 for progress bar 35 | } 36 | 37 | /// Job execution details 38 | pub struct JobExecution { 39 | pub name: String, 40 | pub status: JobStatus, 41 | pub steps: Vec, 42 | pub logs: Vec, 43 | } 44 | 45 | /// Step execution details 46 | pub struct StepExecution { 47 | pub name: String, 48 | pub status: StepStatus, 49 | pub output: String, 50 | } 51 | 52 | /// Log filter levels 53 | pub enum LogFilterLevel { 54 | Info, 55 | Warning, 56 | Error, 57 | Success, 58 | Trigger, 59 | All, 60 | } 61 | 62 | impl LogFilterLevel { 63 | pub fn matches(&self, log: &str) -> bool { 64 | match self { 65 | LogFilterLevel::Info => { 66 | log.contains("ℹ️") || (log.contains("INFO") && !log.contains("SUCCESS")) 67 | } 68 | LogFilterLevel::Warning => log.contains("⚠️") || log.contains("WARN"), 69 | LogFilterLevel::Error => log.contains("❌") || log.contains("ERROR"), 70 | LogFilterLevel::Success => log.contains("SUCCESS") || log.contains("success"), 71 | LogFilterLevel::Trigger => { 72 | log.contains("Triggering") || log.contains("triggered") || log.contains("TRIG") 73 | } 74 | LogFilterLevel::All => true, 75 | } 76 | } 77 | 78 | pub fn next(&self) -> Self { 79 | match self { 80 | LogFilterLevel::All => LogFilterLevel::Info, 81 | LogFilterLevel::Info => LogFilterLevel::Warning, 82 | LogFilterLevel::Warning => LogFilterLevel::Error, 83 | LogFilterLevel::Error => LogFilterLevel::Success, 84 | LogFilterLevel::Success => LogFilterLevel::Trigger, 85 | LogFilterLevel::Trigger => LogFilterLevel::All, 86 | } 87 | } 88 | 89 | pub fn to_string(&self) -> &str { 90 | match self { 91 | LogFilterLevel::All => "ALL", 92 | LogFilterLevel::Info => "INFO", 93 | LogFilterLevel::Warning => "WARNING", 94 | LogFilterLevel::Error => "ERROR", 95 | LogFilterLevel::Success => "SUCCESS", 96 | LogFilterLevel::Trigger => "TRIGGER", 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /crates/ui/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | // UI utilities 2 | use crate::models::{Workflow, WorkflowStatus}; 3 | use std::path::{Path, PathBuf}; 4 | use utils::is_workflow_file; 5 | 6 | /// Find and load all workflow files in a directory 7 | pub fn load_workflows(dir_path: &Path) -> Vec { 8 | let mut workflows = Vec::new(); 9 | 10 | // Default path is .github/workflows 11 | let default_workflows_dir = Path::new(".github").join("workflows"); 12 | let is_default_dir = dir_path == default_workflows_dir || dir_path.ends_with("workflows"); 13 | 14 | if let Ok(entries) = std::fs::read_dir(dir_path) { 15 | for entry in entries.flatten() { 16 | let path = entry.path(); 17 | if path.is_file() && (is_workflow_file(&path) || !is_default_dir) { 18 | // Get just the base name without extension 19 | let name = path.file_stem().map_or_else( 20 | || "[unknown]".to_string(), 21 | |fname| fname.to_string_lossy().into_owned(), 22 | ); 23 | 24 | workflows.push(Workflow { 25 | name, 26 | path, 27 | selected: false, 28 | status: WorkflowStatus::NotStarted, 29 | execution_details: None, 30 | }); 31 | } 32 | } 33 | } 34 | 35 | // Check for GitLab CI pipeline file in the root directory if we're in the default GitHub workflows dir 36 | if is_default_dir { 37 | // Look for .gitlab-ci.yml in the repository root 38 | let gitlab_ci_path = PathBuf::from(".gitlab-ci.yml"); 39 | if gitlab_ci_path.exists() && gitlab_ci_path.is_file() { 40 | workflows.push(Workflow { 41 | name: "gitlab-ci".to_string(), 42 | path: gitlab_ci_path, 43 | selected: false, 44 | status: WorkflowStatus::NotStarted, 45 | execution_details: None, 46 | }); 47 | } 48 | } 49 | 50 | // Sort workflows by name 51 | workflows.sort_by(|a, b| a.name.cmp(&b.name)); 52 | workflows 53 | } 54 | -------------------------------------------------------------------------------- /crates/ui/src/views/help_overlay.rs: -------------------------------------------------------------------------------- 1 | // Help overlay rendering 2 | use ratatui::{ 3 | backend::CrosstermBackend, 4 | layout::Rect, 5 | style::{Color, Modifier, Style}, 6 | text::{Line, Span}, 7 | widgets::{Block, BorderType, Borders, Paragraph, Wrap}, 8 | Frame, 9 | }; 10 | use std::io; 11 | 12 | // Render the help tab 13 | pub fn render_help_tab(f: &mut Frame>, area: Rect) { 14 | let help_text = vec![ 15 | Line::from(Span::styled( 16 | "Keyboard Controls", 17 | Style::default() 18 | .fg(Color::Cyan) 19 | .add_modifier(Modifier::BOLD), 20 | )), 21 | Line::from(""), 22 | Line::from(vec![ 23 | Span::styled( 24 | "Tab", 25 | Style::default() 26 | .fg(Color::Yellow) 27 | .add_modifier(Modifier::BOLD), 28 | ), 29 | Span::raw(" - Switch between tabs"), 30 | ]), 31 | // More help text would follow... 32 | ]; 33 | 34 | let help_widget = Paragraph::new(help_text) 35 | .block( 36 | Block::default() 37 | .borders(Borders::ALL) 38 | .border_type(BorderType::Rounded) 39 | .title(Span::styled(" Help ", Style::default().fg(Color::Yellow))), 40 | ) 41 | .wrap(Wrap { trim: true }); 42 | 43 | f.render_widget(help_widget, area); 44 | } 45 | 46 | // Render a help overlay 47 | pub fn render_help_overlay(f: &mut Frame>) { 48 | let size = f.size(); 49 | 50 | // Create a slightly smaller centered modal 51 | let width = size.width.min(60); 52 | let height = size.height.min(20); 53 | let x = (size.width - width) / 2; 54 | let y = (size.height - height) / 2; 55 | 56 | let help_area = Rect { 57 | x, 58 | y, 59 | width, 60 | height, 61 | }; 62 | 63 | // Create a clear background 64 | let clear = Block::default().style(Style::default().bg(Color::Black)); 65 | f.render_widget(clear, size); 66 | 67 | // Render the help content 68 | render_help_tab(f, help_area); 69 | } 70 | -------------------------------------------------------------------------------- /crates/ui/src/views/mod.rs: -------------------------------------------------------------------------------- 1 | // UI Views module 2 | mod execution_tab; 3 | mod help_overlay; 4 | mod job_detail; 5 | mod logs_tab; 6 | mod status_bar; 7 | mod title_bar; 8 | mod workflows_tab; 9 | 10 | use crate::app::App; 11 | use ratatui::{backend::CrosstermBackend, Frame}; 12 | use std::io; 13 | 14 | // Main render function for the UI 15 | pub fn render_ui(f: &mut Frame>, app: &mut App) { 16 | // Check if help should be shown as an overlay 17 | if app.show_help { 18 | help_overlay::render_help_overlay(f); 19 | return; 20 | } 21 | 22 | let size = f.size(); 23 | 24 | // Create main layout 25 | let main_chunks = ratatui::layout::Layout::default() 26 | .direction(ratatui::layout::Direction::Vertical) 27 | .constraints( 28 | [ 29 | ratatui::layout::Constraint::Length(3), // Title bar and tabs 30 | ratatui::layout::Constraint::Min(5), // Main content 31 | ratatui::layout::Constraint::Length(2), // Status bar 32 | ] 33 | .as_ref(), 34 | ) 35 | .split(size); 36 | 37 | // Render title bar with tabs 38 | title_bar::render_title_bar(f, app, main_chunks[0]); 39 | 40 | // Render main content based on selected tab 41 | match app.selected_tab { 42 | 0 => workflows_tab::render_workflows_tab(f, app, main_chunks[1]), 43 | 1 => { 44 | if app.detailed_view { 45 | job_detail::render_job_detail_view(f, app, main_chunks[1]) 46 | } else { 47 | execution_tab::render_execution_tab(f, app, main_chunks[1]) 48 | } 49 | } 50 | 2 => logs_tab::render_logs_tab(f, app, main_chunks[1]), 51 | 3 => help_overlay::render_help_tab(f, main_chunks[1]), 52 | _ => {} 53 | } 54 | 55 | // Render status bar 56 | status_bar::render_status_bar(f, app, main_chunks[2]); 57 | } 58 | -------------------------------------------------------------------------------- /crates/ui/src/views/status_bar.rs: -------------------------------------------------------------------------------- 1 | // Status bar rendering 2 | use crate::app::App; 3 | use executor::RuntimeType; 4 | use ratatui::{ 5 | backend::CrosstermBackend, 6 | layout::{Alignment, Rect}, 7 | style::{Color, Style}, 8 | text::{Line, Span}, 9 | widgets::Paragraph, 10 | Frame, 11 | }; 12 | use std::io; 13 | 14 | // Render the status bar 15 | pub fn render_status_bar(f: &mut Frame>, app: &App, area: Rect) { 16 | // If we have a status message, show it instead of the normal status bar 17 | if let Some(message) = &app.status_message { 18 | // Determine if this is a success message (starts with ✅) 19 | let is_success = message.starts_with("✅"); 20 | 21 | let status_message = Paragraph::new(Line::from(vec![Span::styled( 22 | format!(" {} ", message), 23 | Style::default() 24 | .bg(if is_success { Color::Green } else { Color::Red }) 25 | .fg(Color::White) 26 | .add_modifier(ratatui::style::Modifier::BOLD), 27 | )])) 28 | .alignment(Alignment::Center); 29 | 30 | f.render_widget(status_message, area); 31 | return; 32 | } 33 | 34 | // Normal status bar 35 | let mut status_items = vec![]; 36 | 37 | // Add mode info 38 | status_items.push(Span::styled( 39 | format!(" {} ", app.runtime_type_name()), 40 | Style::default() 41 | .bg(match app.runtime_type { 42 | RuntimeType::Docker => Color::Blue, 43 | RuntimeType::Emulation => Color::Magenta, 44 | }) 45 | .fg(Color::White), 46 | )); 47 | 48 | // Add Docker status if relevant 49 | if app.runtime_type == RuntimeType::Docker { 50 | // Check Docker silently using safe FD redirection 51 | let is_docker_available = 52 | match utils::fd::with_stderr_to_null(executor::docker::is_available) { 53 | Ok(result) => result, 54 | Err(_) => { 55 | logging::debug("Failed to redirect stderr when checking Docker availability."); 56 | false 57 | } 58 | }; 59 | 60 | status_items.push(Span::raw(" ")); 61 | status_items.push(Span::styled( 62 | if is_docker_available { 63 | " Docker: Connected " 64 | } else { 65 | " Docker: Not Available " 66 | }, 67 | Style::default() 68 | .bg(if is_docker_available { 69 | Color::Green 70 | } else { 71 | Color::Red 72 | }) 73 | .fg(Color::White), 74 | )); 75 | } 76 | 77 | // Add validation/execution mode 78 | status_items.push(Span::raw(" ")); 79 | status_items.push(Span::styled( 80 | format!( 81 | " {} ", 82 | if app.validation_mode { 83 | "Validation" 84 | } else { 85 | "Execution" 86 | } 87 | ), 88 | Style::default() 89 | .bg(if app.validation_mode { 90 | Color::Yellow 91 | } else { 92 | Color::Green 93 | }) 94 | .fg(Color::Black), 95 | )); 96 | 97 | // Add context-specific help based on current tab 98 | status_items.push(Span::raw(" ")); 99 | let help_text = match app.selected_tab { 100 | 0 => { 101 | if let Some(idx) = app.workflow_list_state.selected() { 102 | if idx < app.workflows.len() { 103 | let workflow = &app.workflows[idx]; 104 | match workflow.status { 105 | crate::models::WorkflowStatus::NotStarted => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [t] Trigger Workflow [Shift+R] Reset workflow", 106 | crate::models::WorkflowStatus::Running => "[Space] Toggle selection [Enter] Run selected [r] Run all selected (Workflow running...)", 107 | crate::models::WorkflowStatus::Success | crate::models::WorkflowStatus::Failed | crate::models::WorkflowStatus::Skipped => "[Space] Toggle selection [Enter] Run selected [r] Run all selected [Shift+R] Reset workflow", 108 | } 109 | } else { 110 | "[Space] Toggle selection [Enter] Run selected [r] Run all selected" 111 | } 112 | } else { 113 | "[Space] Toggle selection [Enter] Run selected [r] Run all selected" 114 | } 115 | } 116 | 1 => { 117 | if app.detailed_view { 118 | "[Esc] Back to jobs [↑/↓] Navigate steps" 119 | } else { 120 | "[Enter] View details [↑/↓] Navigate jobs" 121 | } 122 | } 123 | 2 => { 124 | // For logs tab, show scrolling instructions 125 | let log_count = app.logs.len() + logging::get_logs().len(); 126 | if log_count > 0 { 127 | // Convert to a static string for consistent return type 128 | let scroll_text = format!( 129 | "[↑/↓] Scroll logs ({}/{}) [s] Search [f] Filter", 130 | app.log_scroll + 1, 131 | log_count 132 | ); 133 | Box::leak(scroll_text.into_boxed_str()) 134 | } else { 135 | "[No logs to display]" 136 | } 137 | } 138 | 3 => "[?] Toggle help overlay", 139 | _ => "", 140 | }; 141 | status_items.push(Span::styled( 142 | format!(" {} ", help_text), 143 | Style::default().fg(Color::White), 144 | )); 145 | 146 | // Show keybindings for common actions 147 | status_items.push(Span::raw(" ")); 148 | status_items.push(Span::styled( 149 | " [Tab] Switch tabs ", 150 | Style::default().fg(Color::White), 151 | )); 152 | status_items.push(Span::styled( 153 | " [?] Help ", 154 | Style::default().fg(Color::White), 155 | )); 156 | status_items.push(Span::styled( 157 | " [q] Quit ", 158 | Style::default().fg(Color::White), 159 | )); 160 | 161 | let status_bar = Paragraph::new(Line::from(status_items)) 162 | .style(Style::default().bg(Color::DarkGray)) 163 | .alignment(Alignment::Left); 164 | 165 | f.render_widget(status_bar, area); 166 | } 167 | -------------------------------------------------------------------------------- /crates/ui/src/views/title_bar.rs: -------------------------------------------------------------------------------- 1 | // Title bar rendering 2 | use crate::app::App; 3 | use ratatui::{ 4 | backend::CrosstermBackend, 5 | layout::{Alignment, Rect}, 6 | style::{Color, Modifier, Style}, 7 | text::{Line, Span}, 8 | widgets::{Block, BorderType, Borders, Tabs}, 9 | Frame, 10 | }; 11 | use std::io; 12 | 13 | // Render the title bar with tabs 14 | pub fn render_title_bar(f: &mut Frame>, app: &App, area: Rect) { 15 | let titles = ["Workflows", "Execution", "Logs", "Help"]; 16 | let tabs = Tabs::new( 17 | titles 18 | .iter() 19 | .enumerate() 20 | .map(|(i, t)| { 21 | if i == 1 { 22 | // Special case for "Execution" 23 | let e_part = &t[0..1]; // "E" 24 | let x_part = &t[1..2]; // "x" 25 | let rest = &t[2..]; // "ecution" 26 | Line::from(vec![ 27 | Span::styled(e_part, Style::default().fg(Color::White)), 28 | Span::styled( 29 | x_part, 30 | Style::default() 31 | .fg(Color::Yellow) 32 | .add_modifier(Modifier::UNDERLINED), 33 | ), 34 | Span::styled(rest, Style::default().fg(Color::White)), 35 | ]) 36 | } else { 37 | // Original styling for other tabs 38 | let (first, rest) = t.split_at(1); 39 | Line::from(vec![ 40 | Span::styled( 41 | first, 42 | Style::default() 43 | .fg(Color::Yellow) 44 | .add_modifier(Modifier::UNDERLINED), 45 | ), 46 | Span::styled(rest, Style::default().fg(Color::White)), 47 | ]) 48 | } 49 | }) 50 | .collect(), 51 | ) 52 | .block( 53 | Block::default() 54 | .borders(Borders::ALL) 55 | .border_type(BorderType::Rounded) 56 | .title(Span::styled( 57 | " wrkflw ", 58 | Style::default() 59 | .fg(Color::Cyan) 60 | .add_modifier(Modifier::BOLD), 61 | )) 62 | .title_alignment(Alignment::Center), 63 | ) 64 | .highlight_style( 65 | Style::default() 66 | .bg(Color::DarkGray) 67 | .fg(Color::Yellow) 68 | .add_modifier(Modifier::BOLD), 69 | ) 70 | .select(app.selected_tab) 71 | .divider(Span::raw("|")); 72 | 73 | f.render_widget(tabs, area); 74 | } 75 | -------------------------------------------------------------------------------- /crates/ui/src/views/workflows_tab.rs: -------------------------------------------------------------------------------- 1 | // Workflows tab rendering 2 | use crate::app::App; 3 | use crate::models::WorkflowStatus; 4 | use ratatui::{ 5 | backend::CrosstermBackend, 6 | layout::{Alignment, Constraint, Direction, Layout, Rect}, 7 | style::{Color, Modifier, Style}, 8 | text::{Line, Span}, 9 | widgets::{Block, BorderType, Borders, Cell, Paragraph, Row, Table, TableState}, 10 | Frame, 11 | }; 12 | use std::io; 13 | 14 | // Render the workflow list tab 15 | pub fn render_workflows_tab( 16 | f: &mut Frame>, 17 | app: &mut App, 18 | area: Rect, 19 | ) { 20 | // Create a more structured layout for the workflow tab 21 | let chunks = Layout::default() 22 | .direction(Direction::Vertical) 23 | .constraints( 24 | [ 25 | Constraint::Length(3), // Header with instructions 26 | Constraint::Min(5), // Workflow list 27 | ] 28 | .as_ref(), 29 | ) 30 | .margin(1) 31 | .split(area); 32 | 33 | // Render header with instructions 34 | let header_text = vec![ 35 | Line::from(vec![Span::styled( 36 | "Available Workflows", 37 | Style::default() 38 | .fg(Color::Yellow) 39 | .add_modifier(Modifier::BOLD), 40 | )]), 41 | Line::from(vec![ 42 | Span::styled("Space", Style::default().fg(Color::Cyan)), 43 | Span::raw(": Toggle selection "), 44 | Span::styled("Enter", Style::default().fg(Color::Cyan)), 45 | Span::raw(": Run "), 46 | Span::styled("t", Style::default().fg(Color::Cyan)), 47 | Span::raw(": Trigger remotely"), 48 | ]), 49 | ]; 50 | 51 | let header = Paragraph::new(header_text) 52 | .block( 53 | Block::default() 54 | .borders(Borders::ALL) 55 | .border_type(BorderType::Rounded), 56 | ) 57 | .alignment(Alignment::Center); 58 | 59 | f.render_widget(header, chunks[0]); 60 | 61 | // Create a table for workflows instead of a list for better organization 62 | let selected_style = Style::default() 63 | .bg(Color::DarkGray) 64 | .add_modifier(Modifier::BOLD); 65 | 66 | // Normal style definition removed as it was unused 67 | 68 | let header_cells = ["", "Status", "Workflow Name", "Path"] 69 | .iter() 70 | .map(|h| Cell::from(*h).style(Style::default().fg(Color::Yellow))); 71 | 72 | let header = Row::new(header_cells) 73 | .style(Style::default().add_modifier(Modifier::BOLD)) 74 | .height(1); 75 | 76 | let rows = app.workflows.iter().map(|workflow| { 77 | // Create cells for each column 78 | let checkbox = if workflow.selected { "✓" } else { " " }; 79 | 80 | let (status_symbol, status_style) = match workflow.status { 81 | WorkflowStatus::NotStarted => ("○", Style::default().fg(Color::Gray)), 82 | WorkflowStatus::Running => ("⟳", Style::default().fg(Color::Cyan)), 83 | WorkflowStatus::Success => ("✅", Style::default().fg(Color::Green)), 84 | WorkflowStatus::Failed => ("❌", Style::default().fg(Color::Red)), 85 | WorkflowStatus::Skipped => ("⏭", Style::default().fg(Color::Yellow)), 86 | }; 87 | 88 | let path_display = workflow.path.to_string_lossy(); 89 | let path_shortened = if path_display.len() > 30 { 90 | format!("...{}", &path_display[path_display.len() - 30..]) 91 | } else { 92 | path_display.to_string() 93 | }; 94 | 95 | Row::new(vec![ 96 | Cell::from(checkbox).style(Style::default().fg(Color::Green)), 97 | Cell::from(status_symbol).style(status_style), 98 | Cell::from(workflow.name.clone()), 99 | Cell::from(path_shortened).style(Style::default().fg(Color::DarkGray)), 100 | ]) 101 | }); 102 | 103 | let workflows_table = Table::new(rows) 104 | .header(header) 105 | .block( 106 | Block::default() 107 | .borders(Borders::ALL) 108 | .border_type(BorderType::Rounded) 109 | .title(Span::styled( 110 | " Workflows ", 111 | Style::default().fg(Color::Yellow), 112 | )), 113 | ) 114 | .highlight_style(selected_style) 115 | .highlight_symbol("» ") 116 | .widths(&[ 117 | Constraint::Length(3), // Checkbox column 118 | Constraint::Length(4), // Status icon column 119 | Constraint::Percentage(45), // Name column 120 | Constraint::Percentage(45), // Path column 121 | ]); 122 | 123 | // We need to convert ListState to TableState 124 | let mut table_state = TableState::default(); 125 | table_state.select(app.workflow_list_state.selected()); 126 | 127 | f.render_stateful_widget(workflows_table, chunks[1], &mut table_state); 128 | 129 | // Update the app list state to match the table state 130 | app.workflow_list_state.select(table_state.selected()); 131 | } 132 | -------------------------------------------------------------------------------- /crates/utils/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "utils" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "utility functions for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | 12 | # External dependencies 13 | serde.workspace = true 14 | serde_yaml.workspace = true 15 | nix.workspace = true 16 | -------------------------------------------------------------------------------- /crates/utils/src/lib.rs: -------------------------------------------------------------------------------- 1 | // utils crate 2 | 3 | use std::path::Path; 4 | 5 | pub fn is_workflow_file(path: &Path) -> bool { 6 | // First, check for GitLab CI files by name 7 | if let Some(file_name) = path.file_name() { 8 | let file_name_str = file_name.to_string_lossy().to_lowercase(); 9 | if file_name_str == ".gitlab-ci.yml" || file_name_str.ends_with("gitlab-ci.yml") { 10 | return true; 11 | } 12 | } 13 | 14 | // Then check for GitHub Actions workflows 15 | if let Some(ext) = path.extension() { 16 | if ext == "yml" || ext == "yaml" { 17 | // Check if the file is in a .github/workflows directory 18 | if let Some(parent) = path.parent() { 19 | return parent.ends_with(".github/workflows") || parent.ends_with("workflows"); 20 | } else { 21 | // Check if filename contains workflow indicators 22 | let filename = path 23 | .file_name() 24 | .map(|f| f.to_string_lossy().to_lowercase()) 25 | .unwrap_or_default(); 26 | 27 | return filename.contains("workflow") 28 | || filename.contains("action") 29 | || filename.contains("ci") 30 | || filename.contains("cd"); 31 | } 32 | } 33 | } 34 | false 35 | } 36 | 37 | /// Module for safely handling file descriptor redirection 38 | pub mod fd { 39 | use nix::fcntl::{open, OFlag}; 40 | use nix::sys::stat::Mode; 41 | use nix::unistd::{close, dup, dup2}; 42 | use std::io::{self, Result}; 43 | use std::os::unix::io::RawFd; 44 | use std::path::Path; 45 | 46 | /// Standard file descriptors 47 | const STDERR_FILENO: RawFd = 2; 48 | 49 | /// Represents a redirected stderr that can be restored 50 | pub struct RedirectedStderr { 51 | original_fd: Option, 52 | null_fd: Option, 53 | } 54 | 55 | impl RedirectedStderr { 56 | /// Creates a new RedirectedStderr that redirects stderr to /dev/null 57 | pub fn to_null() -> Result { 58 | // Duplicate the current stderr fd 59 | let stderr_backup = match dup(STDERR_FILENO) { 60 | Ok(fd) => fd, 61 | Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)), 62 | }; 63 | 64 | // Open /dev/null 65 | let null_fd = match open(Path::new("/dev/null"), OFlag::O_WRONLY, Mode::empty()) { 66 | Ok(fd) => fd, 67 | Err(e) => { 68 | let _ = close(stderr_backup); // Clean up on error 69 | return Err(io::Error::new(io::ErrorKind::Other, e)); 70 | } 71 | }; 72 | 73 | // Redirect stderr to /dev/null 74 | if let Err(e) = dup2(null_fd, STDERR_FILENO) { 75 | let _ = close(stderr_backup); // Clean up on error 76 | let _ = close(null_fd); 77 | return Err(io::Error::new(io::ErrorKind::Other, e)); 78 | } 79 | 80 | Ok(RedirectedStderr { 81 | original_fd: Some(stderr_backup), 82 | null_fd: Some(null_fd), 83 | }) 84 | } 85 | } 86 | 87 | impl Drop for RedirectedStderr { 88 | /// Automatically restores stderr when the RedirectedStderr is dropped 89 | fn drop(&mut self) { 90 | if let Some(orig_fd) = self.original_fd.take() { 91 | // Restore the original stderr 92 | let _ = dup2(orig_fd, STDERR_FILENO); 93 | let _ = close(orig_fd); 94 | } 95 | 96 | // Close the null fd 97 | if let Some(null_fd) = self.null_fd.take() { 98 | let _ = close(null_fd); 99 | } 100 | } 101 | } 102 | 103 | /// Run a function with stderr redirected to /dev/null, then restore stderr 104 | pub fn with_stderr_to_null(f: F) -> Result 105 | where 106 | F: FnOnce() -> T, 107 | { 108 | let _redirected = RedirectedStderr::to_null()?; 109 | Ok(f()) 110 | } 111 | } 112 | 113 | #[cfg(test)] 114 | mod tests { 115 | use super::*; 116 | 117 | #[test] 118 | fn test_fd_redirection() { 119 | // This test will write to stderr, which should be redirected 120 | let result = fd::with_stderr_to_null(|| { 121 | // This would normally appear in stderr 122 | eprintln!("This should be redirected to /dev/null"); 123 | // Return a test value to verify the function passes through the result 124 | 42 125 | }); 126 | 127 | // The function should succeed and return our test value 128 | assert!(result.is_ok()); 129 | assert_eq!(result.unwrap(), 42); 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /crates/validators/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "validators" 3 | version.workspace = true 4 | edition.workspace = true 5 | description = "validation functionality for wrkflw" 6 | license.workspace = true 7 | 8 | [dependencies] 9 | # Internal crates 10 | models = { path = "../models" } 11 | matrix = { path = "../matrix" } 12 | 13 | # External dependencies 14 | serde.workspace = true 15 | serde_yaml.workspace = true 16 | -------------------------------------------------------------------------------- /crates/validators/src/actions.rs: -------------------------------------------------------------------------------- 1 | use models::ValidationResult; 2 | 3 | pub fn validate_action_reference( 4 | action_ref: &str, 5 | job_name: &str, 6 | step_idx: usize, 7 | result: &mut ValidationResult, 8 | ) { 9 | // Check if it's a local action (starts with ./) 10 | let is_local_action = action_ref.starts_with("./"); 11 | 12 | // For non-local actions, enforce standard format 13 | if !is_local_action && !action_ref.contains('/') && !action_ref.contains('.') { 14 | result.add_issue(format!( 15 | "Job '{}', step {}: Invalid action reference format '{}'", 16 | job_name, 17 | step_idx + 1, 18 | action_ref 19 | )); 20 | return; 21 | } 22 | 23 | // Check for version tag or commit SHA, but only for non-local actions 24 | if !is_local_action && action_ref.contains('@') { 25 | let parts: Vec<&str> = action_ref.split('@').collect(); 26 | if parts.len() != 2 || parts[1].is_empty() { 27 | result.add_issue(format!( 28 | "Job '{}', step {}: Action '{}' has invalid version/ref format", 29 | job_name, 30 | step_idx + 1, 31 | action_ref 32 | )); 33 | } 34 | } else if !is_local_action { 35 | // Missing version tag is not recommended for non-local actions 36 | result.add_issue(format!( 37 | "Job '{}', step {}: Action '{}' is missing version tag (@v2, @main, etc.)", 38 | job_name, 39 | step_idx + 1, 40 | action_ref 41 | )); 42 | } 43 | 44 | // For local actions, verify the path exists 45 | if is_local_action { 46 | let action_path = std::path::Path::new(action_ref); 47 | if !action_path.exists() { 48 | // We can't reliably check this during validation since the working directory 49 | // might not be the repository root, but we'll add a warning 50 | result.add_issue(format!( 51 | "Job '{}', step {}: Local action path '{}' may not exist at runtime", 52 | job_name, 53 | step_idx + 1, 54 | action_ref 55 | )); 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /crates/validators/src/gitlab.rs: -------------------------------------------------------------------------------- 1 | use models::gitlab::{Job, Pipeline}; 2 | use models::ValidationResult; 3 | use std::collections::HashMap; 4 | 5 | /// Validate a GitLab CI/CD pipeline 6 | pub fn validate_gitlab_pipeline(pipeline: &Pipeline) -> ValidationResult { 7 | let mut result = ValidationResult::new(); 8 | 9 | // Basic structure validation 10 | if pipeline.jobs.is_empty() { 11 | result.add_issue("Pipeline must contain at least one job".to_string()); 12 | } 13 | 14 | // Validate jobs 15 | validate_jobs(&pipeline.jobs, &mut result); 16 | 17 | // Validate stages if defined 18 | if let Some(stages) = &pipeline.stages { 19 | validate_stages(stages, &pipeline.jobs, &mut result); 20 | } 21 | 22 | // Validate dependencies 23 | validate_dependencies(&pipeline.jobs, &mut result); 24 | 25 | // Validate extends 26 | validate_extends(&pipeline.jobs, &mut result); 27 | 28 | // Validate artifacts 29 | validate_artifacts(&pipeline.jobs, &mut result); 30 | 31 | result 32 | } 33 | 34 | /// Validate GitLab CI/CD jobs 35 | fn validate_jobs(jobs: &HashMap, result: &mut ValidationResult) { 36 | for (job_name, job) in jobs { 37 | // Skip template jobs 38 | if let Some(true) = job.template { 39 | continue; 40 | } 41 | 42 | // Check for script or extends 43 | if job.script.is_none() && job.extends.is_none() { 44 | result.add_issue(format!( 45 | "Job '{}' must have a script section or extend another job", 46 | job_name 47 | )); 48 | } 49 | 50 | // Check when value if present 51 | if let Some(when) = &job.when { 52 | match when.as_str() { 53 | "on_success" | "on_failure" | "always" | "manual" | "never" => { 54 | // Valid when value 55 | } 56 | _ => { 57 | result.add_issue(format!( 58 | "Job '{}' has invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always, manual, never", 59 | job_name, when 60 | )); 61 | } 62 | } 63 | } 64 | 65 | // Check retry configuration 66 | if let Some(retry) = &job.retry { 67 | match retry { 68 | models::gitlab::Retry::MaxAttempts(attempts) => { 69 | if *attempts > 10 { 70 | result.add_issue(format!( 71 | "Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste", 72 | job_name, attempts 73 | )); 74 | } 75 | } 76 | models::gitlab::Retry::Detailed { max, when: _ } => { 77 | if *max > 10 { 78 | result.add_issue(format!( 79 | "Job '{}' has excessive retry count: {}. Consider reducing to avoid resource waste", 80 | job_name, max 81 | )); 82 | } 83 | } 84 | } 85 | } 86 | } 87 | } 88 | 89 | /// Validate GitLab CI/CD stages 90 | fn validate_stages(stages: &[String], jobs: &HashMap, result: &mut ValidationResult) { 91 | // Check that all jobs reference existing stages 92 | for (job_name, job) in jobs { 93 | if let Some(stage) = &job.stage { 94 | if !stages.contains(stage) { 95 | result.add_issue(format!( 96 | "Job '{}' references undefined stage '{}'. Available stages are: {}", 97 | job_name, 98 | stage, 99 | stages.join(", ") 100 | )); 101 | } 102 | } 103 | } 104 | 105 | // Check for unused stages 106 | for stage in stages { 107 | let used = jobs.values().any(|job| { 108 | if let Some(job_stage) = &job.stage { 109 | job_stage == stage 110 | } else { 111 | false 112 | } 113 | }); 114 | 115 | if !used { 116 | result.add_issue(format!( 117 | "Stage '{}' is defined but not used by any job", 118 | stage 119 | )); 120 | } 121 | } 122 | } 123 | 124 | /// Validate GitLab CI/CD job dependencies 125 | fn validate_dependencies(jobs: &HashMap, result: &mut ValidationResult) { 126 | for (job_name, job) in jobs { 127 | if let Some(dependencies) = &job.dependencies { 128 | for dependency in dependencies { 129 | if !jobs.contains_key(dependency) { 130 | result.add_issue(format!( 131 | "Job '{}' depends on undefined job '{}'", 132 | job_name, dependency 133 | )); 134 | } else if job_name == dependency { 135 | result.add_issue(format!("Job '{}' cannot depend on itself", job_name)); 136 | } 137 | } 138 | } 139 | } 140 | } 141 | 142 | /// Validate GitLab CI/CD job extends 143 | fn validate_extends(jobs: &HashMap, result: &mut ValidationResult) { 144 | // Check for circular extends 145 | for (job_name, job) in jobs { 146 | if let Some(extends) = &job.extends { 147 | // Check that all extended jobs exist 148 | for extend in extends { 149 | if !jobs.contains_key(extend) { 150 | result.add_issue(format!( 151 | "Job '{}' extends undefined job '{}'", 152 | job_name, extend 153 | )); 154 | continue; 155 | } 156 | 157 | // Check for circular extends 158 | let mut visited = vec![job_name.clone()]; 159 | check_circular_extends(extend, jobs, &mut visited, result); 160 | } 161 | } 162 | } 163 | } 164 | 165 | /// Helper function to detect circular extends 166 | fn check_circular_extends( 167 | job_name: &str, 168 | jobs: &HashMap, 169 | visited: &mut Vec, 170 | result: &mut ValidationResult, 171 | ) { 172 | visited.push(job_name.to_string()); 173 | 174 | if let Some(job) = jobs.get(job_name) { 175 | if let Some(extends) = &job.extends { 176 | for extend in extends { 177 | if visited.contains(&extend.to_string()) { 178 | // Circular dependency detected 179 | let cycle = visited 180 | .iter() 181 | .skip(visited.iter().position(|x| x == extend).unwrap()) 182 | .chain(std::iter::once(extend)) 183 | .cloned() 184 | .collect::>() 185 | .join(" -> "); 186 | 187 | result.add_issue(format!("Circular extends detected: {}", cycle)); 188 | return; 189 | } 190 | 191 | check_circular_extends(extend, jobs, visited, result); 192 | } 193 | } 194 | } 195 | 196 | visited.pop(); 197 | } 198 | 199 | /// Validate GitLab CI/CD job artifacts 200 | fn validate_artifacts(jobs: &HashMap, result: &mut ValidationResult) { 201 | for (job_name, job) in jobs { 202 | if let Some(artifacts) = &job.artifacts { 203 | // Check that paths are specified 204 | if let Some(paths) = &artifacts.paths { 205 | if paths.is_empty() { 206 | result.add_issue(format!( 207 | "Job '{}' has artifacts section with empty paths", 208 | job_name 209 | )); 210 | } 211 | } else { 212 | result.add_issue(format!( 213 | "Job '{}' has artifacts section without specifying paths", 214 | job_name 215 | )); 216 | } 217 | 218 | // Check for valid 'when' value if present 219 | if let Some(when) = &artifacts.when { 220 | match when.as_str() { 221 | "on_success" | "on_failure" | "always" => { 222 | // Valid when value 223 | } 224 | _ => { 225 | result.add_issue(format!( 226 | "Job '{}' has artifacts with invalid 'when' value: '{}'. Valid values are: on_success, on_failure, always", 227 | job_name, when 228 | )); 229 | } 230 | } 231 | } 232 | } 233 | } 234 | } 235 | -------------------------------------------------------------------------------- /crates/validators/src/jobs.rs: -------------------------------------------------------------------------------- 1 | use crate::{validate_matrix, validate_steps}; 2 | use models::ValidationResult; 3 | use serde_yaml::Value; 4 | 5 | pub fn validate_jobs(jobs: &Value, result: &mut ValidationResult) { 6 | if let Value::Mapping(jobs_map) = jobs { 7 | if jobs_map.is_empty() { 8 | result.add_issue("'jobs' section is empty".to_string()); 9 | return; 10 | } 11 | 12 | for (job_name, job_config) in jobs_map { 13 | if let Some(job_name) = job_name.as_str() { 14 | if let Some(job_config) = job_config.as_mapping() { 15 | // Check if this is a reusable workflow job (has 'uses' field) 16 | let is_reusable_workflow = 17 | job_config.contains_key(Value::String("uses".to_string())); 18 | 19 | // Only check for 'runs-on' if it's not a reusable workflow 20 | if !is_reusable_workflow 21 | && !job_config.contains_key(Value::String("runs-on".to_string())) 22 | { 23 | result.add_issue(format!("Job '{}' is missing 'runs-on' field", job_name)); 24 | } 25 | 26 | // Only check for steps if it's not a reusable workflow 27 | if !is_reusable_workflow { 28 | match job_config.get(Value::String("steps".to_string())) { 29 | Some(Value::Sequence(steps)) => { 30 | if steps.is_empty() { 31 | result.add_issue(format!( 32 | "Job '{}' has empty 'steps' section", 33 | job_name 34 | )); 35 | } else { 36 | validate_steps(steps, job_name, result); 37 | } 38 | } 39 | Some(_) => { 40 | result.add_issue(format!( 41 | "Job '{}': 'steps' section is not a sequence", 42 | job_name 43 | )); 44 | } 45 | None => { 46 | result.add_issue(format!( 47 | "Job '{}' is missing 'steps' section", 48 | job_name 49 | )); 50 | } 51 | } 52 | } else { 53 | // For reusable workflows, validate the 'uses' field format 54 | if let Some(Value::String(uses)) = 55 | job_config.get(Value::String("uses".to_string())) 56 | { 57 | // Simple validation for reusable workflow reference format 58 | if !uses.contains('/') || !uses.contains('.') { 59 | result.add_issue(format!( 60 | "Job '{}': Invalid reusable workflow reference format '{}'", 61 | job_name, uses 62 | )); 63 | } 64 | } 65 | } 66 | 67 | // Check for job dependencies 68 | if let Some(Value::Sequence(needs)) = 69 | job_config.get(Value::String("needs".to_string())) 70 | { 71 | for need in needs { 72 | if let Some(need_str) = need.as_str() { 73 | if !jobs_map.contains_key(Value::String(need_str.to_string())) { 74 | result.add_issue(format!( 75 | "Job '{}' depends on non-existent job '{}'", 76 | job_name, need_str 77 | )); 78 | } 79 | } 80 | } 81 | } else if let Some(Value::String(need)) = 82 | job_config.get(Value::String("needs".to_string())) 83 | { 84 | if !jobs_map.contains_key(Value::String(need.clone())) { 85 | result.add_issue(format!( 86 | "Job '{}' depends on non-existent job '{}'", 87 | job_name, need 88 | )); 89 | } 90 | } 91 | 92 | // Validate matrix configuration if present 93 | if let Some(matrix) = job_config.get(Value::String("matrix".to_string())) { 94 | validate_matrix(matrix, result); 95 | } 96 | } else { 97 | result.add_issue(format!("Job '{}' configuration is not a mapping", job_name)); 98 | } 99 | } 100 | } 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /crates/validators/src/lib.rs: -------------------------------------------------------------------------------- 1 | // validators crate 2 | 3 | mod actions; 4 | mod gitlab; 5 | mod jobs; 6 | mod matrix; 7 | mod steps; 8 | mod triggers; 9 | 10 | pub use actions::validate_action_reference; 11 | pub use gitlab::validate_gitlab_pipeline; 12 | pub use jobs::validate_jobs; 13 | pub use matrix::validate_matrix; 14 | pub use steps::validate_steps; 15 | pub use triggers::validate_triggers; 16 | -------------------------------------------------------------------------------- /crates/validators/src/matrix.rs: -------------------------------------------------------------------------------- 1 | use models::ValidationResult; 2 | use serde_yaml::Value; 3 | 4 | pub fn validate_matrix(matrix: &Value, result: &mut ValidationResult) { 5 | // Check if matrix is a mapping 6 | if !matrix.is_mapping() { 7 | result.add_issue("Matrix must be a mapping".to_string()); 8 | return; 9 | } 10 | 11 | // Check for include and exclude sections 12 | if let Some(include) = matrix.get("include") { 13 | validate_include_exclude(include, "include", result); 14 | } 15 | 16 | if let Some(exclude) = matrix.get("exclude") { 17 | validate_include_exclude(exclude, "exclude", result); 18 | } 19 | 20 | // Check max-parallel 21 | if let Some(max_parallel) = matrix.get("max-parallel") { 22 | if !max_parallel.is_number() { 23 | result.add_issue("max-parallel must be a number".to_string()); 24 | } else if let Some(value) = max_parallel.as_u64() { 25 | if value == 0 { 26 | result.add_issue("max-parallel must be greater than 0".to_string()); 27 | } 28 | } 29 | } 30 | 31 | // Check fail-fast 32 | if let Some(fail_fast) = matrix.get("fail-fast") { 33 | if !fail_fast.is_bool() { 34 | result.add_issue("fail-fast must be a boolean".to_string()); 35 | } 36 | } 37 | 38 | // Validate the main matrix parameters (excluding special keywords) 39 | let special_keys = ["include", "exclude", "max-parallel", "fail-fast"]; 40 | 41 | // Use if let to avoid unwrap 42 | if let Some(mapping) = matrix.as_mapping() { 43 | for (key, value) in mapping { 44 | // Safely get the key string, using an empty string as fallback 45 | let key_str = key.as_str().unwrap_or(""); 46 | if !special_keys.contains(&key_str) { 47 | validate_matrix_parameter(key_str, value, result); 48 | } 49 | } 50 | } else { 51 | // This is a safeguard, though we already checked if it's a mapping above 52 | result.add_issue("Failed to process matrix mapping".to_string()); 53 | } 54 | } 55 | 56 | fn validate_include_exclude(section: &Value, section_name: &str, result: &mut ValidationResult) { 57 | if !section.is_sequence() { 58 | result.add_issue(format!("{} must be an array of objects", section_name)); 59 | return; 60 | } 61 | 62 | // Check each item in the include/exclude array 63 | // Use if let to avoid unwrap 64 | if let Some(sequence) = section.as_sequence() { 65 | for (index, item) in sequence.iter().enumerate() { 66 | if !item.is_mapping() { 67 | result.add_issue(format!( 68 | "{} item at index {} must be an object", 69 | section_name, index 70 | )); 71 | } 72 | } 73 | } else { 74 | // This is a safeguard, though we already checked if it's a sequence above 75 | result.add_issue(format!("Failed to process {} sequence", section_name)); 76 | } 77 | } 78 | 79 | fn validate_matrix_parameter(name: &str, value: &Value, result: &mut ValidationResult) { 80 | // Basic matrix parameters should be arrays or simple values 81 | match value { 82 | Value::Sequence(_) => { 83 | // Check that each item in the array has a consistent type 84 | if let Some(seq) = value.as_sequence() { 85 | if !seq.is_empty() { 86 | let first_type = get_value_type(&seq[0]); 87 | 88 | for (i, item) in seq.iter().enumerate().skip(1) { 89 | let item_type = get_value_type(item); 90 | if item_type != first_type { 91 | result.add_issue(format!( 92 | "Matrix parameter '{}' has inconsistent types: item at index {} is {}, but expected {}", 93 | name, i, item_type, first_type 94 | )); 95 | } 96 | } 97 | } 98 | } 99 | } 100 | Value::Mapping(_) => { 101 | // For object-based parameters, make sure they have valid structure 102 | // Here we just check if it's a mapping, but could add more validation 103 | } 104 | // Other types (string, number, bool) are valid as single values 105 | _ => (), 106 | } 107 | } 108 | 109 | fn get_value_type(value: &Value) -> &'static str { 110 | match value { 111 | Value::Null => "null", 112 | Value::Bool(_) => "boolean", 113 | Value::Number(_) => "number", 114 | Value::String(_) => "string", 115 | Value::Sequence(_) => "array", 116 | Value::Mapping(_) => "object", 117 | _ => "unknown", 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /crates/validators/src/steps.rs: -------------------------------------------------------------------------------- 1 | use crate::validate_action_reference; 2 | use models::ValidationResult; 3 | use serde_yaml::Value; 4 | 5 | pub fn validate_steps(steps: &[Value], job_name: &str, result: &mut ValidationResult) { 6 | for (i, step) in steps.iter().enumerate() { 7 | if let Some(step_map) = step.as_mapping() { 8 | if !step_map.contains_key(Value::String("name".to_string())) 9 | && !step_map.contains_key(Value::String("uses".to_string())) 10 | && !step_map.contains_key(Value::String("run".to_string())) 11 | { 12 | result.add_issue(format!( 13 | "Job '{}', step {}: Missing 'name', 'uses', or 'run' field", 14 | job_name, 15 | i + 1 16 | )); 17 | } 18 | 19 | // Check for both 'uses' and 'run' in the same step 20 | if step_map.contains_key(Value::String("uses".to_string())) 21 | && step_map.contains_key(Value::String("run".to_string())) 22 | { 23 | result.add_issue(format!( 24 | "Job '{}', step {}: Contains both 'uses' and 'run' (should only use one)", 25 | job_name, 26 | i + 1 27 | )); 28 | } 29 | 30 | // Validate action reference if 'uses' is present 31 | if let Some(Value::String(uses)) = step_map.get(Value::String("uses".to_string())) { 32 | validate_action_reference(uses, job_name, i, result); 33 | } 34 | } else { 35 | result.add_issue(format!( 36 | "Job '{}', step {}: Not a valid mapping", 37 | job_name, 38 | i + 1 39 | )); 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /crates/validators/src/triggers.rs: -------------------------------------------------------------------------------- 1 | use models::ValidationResult; 2 | use serde_yaml::Value; 3 | 4 | pub fn validate_triggers(on: &Value, result: &mut ValidationResult) { 5 | let valid_events = vec![ 6 | "branch_protection_rule", 7 | "check_run", 8 | "check_suite", 9 | "create", 10 | "delete", 11 | "deployment", 12 | "deployment_status", 13 | "discussion", 14 | "discussion_comment", 15 | "fork", 16 | "gollum", 17 | "issue_comment", // Covers comments on PRs that are not part of a diff 18 | "issues", 19 | "label", 20 | "merge_group", 21 | "milestone", 22 | "page_build", 23 | "public", 24 | "pull_request", 25 | "pull_request_review", 26 | "pull_request_review_comment", 27 | "pull_request_target", 28 | "push", 29 | "registry_package", 30 | "release", 31 | "repository_dispatch", 32 | "schedule", 33 | "status", 34 | "watch", 35 | "workflow_call", 36 | "workflow_dispatch", 37 | "workflow_run", 38 | ]; 39 | 40 | match on { 41 | Value::String(event) => { 42 | if !valid_events.contains(&event.as_str()) { 43 | result.add_issue(format!("Unknown trigger event: '{}'", event)); 44 | } 45 | } 46 | Value::Sequence(events) => { 47 | for event in events { 48 | if let Some(event_str) = event.as_str() { 49 | if !valid_events.contains(&event_str) { 50 | result.add_issue(format!("Unknown trigger event: '{}'", event_str)); 51 | } 52 | } 53 | } 54 | } 55 | Value::Mapping(event_map) => { 56 | for (event, _) in event_map { 57 | if let Some(event_str) = event.as_str() { 58 | if !valid_events.contains(&event_str) { 59 | result.add_issue(format!("Unknown trigger event: '{}'", event_str)); 60 | } 61 | } 62 | } 63 | 64 | // Check schedule syntax if present 65 | if let Some(Value::Sequence(schedules)) = 66 | event_map.get(Value::String("schedule".to_string())) 67 | { 68 | for schedule in schedules { 69 | if let Some(schedule_map) = schedule.as_mapping() { 70 | if let Some(Value::String(cron)) = 71 | schedule_map.get(Value::String("cron".to_string())) 72 | { 73 | validate_cron_syntax(cron, result); 74 | } else { 75 | result.add_issue("Schedule is missing 'cron' expression".to_string()); 76 | } 77 | } 78 | } 79 | } 80 | } 81 | _ => { 82 | result.add_issue("'on' section has invalid format".to_string()); 83 | } 84 | } 85 | } 86 | 87 | fn validate_cron_syntax(cron: &str, result: &mut ValidationResult) { 88 | // Basic validation of cron syntax 89 | let parts: Vec<&str> = cron.split_whitespace().collect(); 90 | if parts.len() != 5 { 91 | result.add_issue(format!( 92 | "Invalid cron syntax '{}': should have 5 components", 93 | cron 94 | )); 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /crates/wrkflw/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "wrkflw" 3 | version.workspace = true 4 | edition.workspace = true 5 | description.workspace = true 6 | documentation.workspace = true 7 | homepage.workspace = true 8 | repository.workspace = true 9 | keywords.workspace = true 10 | categories.workspace = true 11 | license.workspace = true 12 | 13 | [dependencies] 14 | # Workspace crates 15 | models = { path = "../models" } 16 | executor = { path = "../executor" } 17 | github = { path = "../github" } 18 | gitlab = { path = "../gitlab" } 19 | logging = { path = "../logging" } 20 | matrix = { path = "../matrix" } 21 | parser = { path = "../parser" } 22 | runtime = { path = "../runtime" } 23 | ui = { path = "../ui" } 24 | utils = { path = "../utils" } 25 | validators = { path = "../validators" } 26 | evaluator = { path = "../evaluator" } 27 | 28 | # External dependencies 29 | clap.workspace = true 30 | bollard.workspace = true 31 | tokio.workspace = true 32 | futures-util.workspace = true 33 | futures.workspace = true 34 | chrono.workspace = true 35 | uuid.workspace = true 36 | tempfile.workspace = true 37 | dirs.workspace = true 38 | thiserror.workspace = true 39 | log.workspace = true 40 | regex.workspace = true 41 | lazy_static.workspace = true 42 | reqwest.workspace = true 43 | libc.workspace = true 44 | nix.workspace = true 45 | urlencoding.workspace = true 46 | serde.workspace = true 47 | serde_yaml.workspace = true 48 | serde_json.workspace = true 49 | colored.workspace = true 50 | indexmap.workspace = true 51 | rayon.workspace = true 52 | num_cpus.workspace = true 53 | itertools.workspace = true 54 | once_cell.workspace = true 55 | crossterm.workspace = true 56 | ratatui.workspace = true 57 | walkdir = "2.4" 58 | 59 | [lib] 60 | name = "wrkflw_lib" 61 | path = "src/lib.rs" 62 | 63 | [[bin]] 64 | name = "wrkflw" 65 | path = "src/main.rs" -------------------------------------------------------------------------------- /crates/wrkflw/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub use evaluator; 2 | pub use executor; 3 | pub use github; 4 | pub use gitlab; 5 | pub use logging; 6 | pub use matrix; 7 | pub use models; 8 | pub use parser; 9 | pub use runtime; 10 | pub use ui; 11 | pub use utils; 12 | pub use validators; 13 | -------------------------------------------------------------------------------- /demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bahdotsh/wrkflw/d5d1904d0a45477fe6cb6818e1e79a1971d8e789/demo.gif -------------------------------------------------------------------------------- /test-workflows/1-basic-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Basic Workflow 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Set up Node.js 15 | uses: actions/setup-node@v3 16 | with: 17 | node-version: '16' 18 | - name: Install dependencies 19 | run: npm ci 20 | - name: Run tests 21 | run: npm test -------------------------------------------------------------------------------- /test-workflows/2-reusable-workflow-caller.yml: -------------------------------------------------------------------------------- 1 | name: Reusable Workflow Caller 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | jobs: 8 | call-workflow-1: 9 | uses: octo-org/example-repo/.github/workflows/workflow-A.yml@v1 10 | 11 | call-workflow-2: 12 | uses: ./local-workflows/build.yml 13 | with: 14 | config-path: ./config/test.yml 15 | secrets: 16 | token: ${{ secrets.GITHUB_TOKEN }} 17 | 18 | call-workflow-3: 19 | uses: octo-org/example-repo/.github/workflows/workflow-B.yml@main 20 | needs: [call-workflow-1] -------------------------------------------------------------------------------- /test-workflows/3-reusable-workflow-definition.yml: -------------------------------------------------------------------------------- 1 | name: Reusable Workflow Definition 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | config-path: 7 | required: true 8 | type: string 9 | description: "Path to the configuration file" 10 | environment: 11 | required: false 12 | type: string 13 | default: "production" 14 | description: "Environment to run in" 15 | secrets: 16 | token: 17 | required: true 18 | description: "GitHub token for authentication" 19 | 20 | jobs: 21 | reusable-job: 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Load configuration 26 | run: echo "Loading configuration from ${{ inputs.config-path }}" 27 | - name: Run in environment 28 | run: echo "Running in ${{ inputs.environment }} environment" 29 | - name: Use secret 30 | run: echo "Using secret with length ${#TOKEN}" 31 | env: 32 | TOKEN: ${{ secrets.token }} -------------------------------------------------------------------------------- /test-workflows/4-mixed-jobs.yml: -------------------------------------------------------------------------------- 1 | name: Mixed Regular and Reusable Jobs 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | jobs: 8 | regular-job: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3 12 | - name: Run regular task 13 | run: echo "This is a regular job" 14 | 15 | reusable-job: 16 | uses: octo-org/example-repo/.github/workflows/reusable.yml@main 17 | with: 18 | parameter: "value" 19 | 20 | dependent-job: 21 | runs-on: ubuntu-latest 22 | needs: [regular-job, reusable-job] 23 | steps: 24 | - name: Run dependent task 25 | run: echo "This job depends on both a regular and reusable job" -------------------------------------------------------------------------------- /test-workflows/5-no-name-reusable-caller.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: [main] 4 | 5 | jobs: 6 | call-workflow-1: 7 | uses: octo-org/example-repo/.github/workflows/workflow-A.yml@v1 8 | 9 | call-workflow-2: 10 | uses: ./local-workflows/build.yml 11 | with: 12 | config-path: ./config/test.yml -------------------------------------------------------------------------------- /test-workflows/6-invalid-reusable-format.yml: -------------------------------------------------------------------------------- 1 | name: Invalid Reusable Format 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | jobs: 8 | valid-job: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Test step 12 | run: echo "This is a valid job" 13 | 14 | invalid-reusable-job: 15 | uses: invalid-format 16 | with: 17 | param: "value" -------------------------------------------------------------------------------- /test-workflows/7-invalid-regular-job.yml: -------------------------------------------------------------------------------- 1 | name: Invalid Regular Job 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | jobs: 8 | job-missing-runs-on: 9 | # Missing runs-on field 10 | steps: 11 | - name: Test step 12 | run: echo "This job is missing runs-on field" 13 | 14 | job-missing-steps: 15 | runs-on: ubuntu-latest 16 | # Missing steps section 17 | 18 | valid-reusable-job: 19 | uses: octo-org/example-repo/.github/workflows/reusable.yml@main -------------------------------------------------------------------------------- /test-workflows/8-cyclic-dependencies.yml: -------------------------------------------------------------------------------- 1 | name: Cyclic Dependencies 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | jobs: 8 | job-a: 9 | runs-on: ubuntu-latest 10 | needs: [job-c] 11 | steps: 12 | - name: Job A 13 | run: echo "Job A" 14 | 15 | job-b: 16 | runs-on: ubuntu-latest 17 | needs: [job-a] 18 | steps: 19 | - name: Job B 20 | run: echo "Job B" 21 | 22 | job-c: 23 | runs-on: ubuntu-latest 24 | needs: [job-b] 25 | steps: 26 | - name: Job C 27 | run: echo "Job C" 28 | 29 | reusable-job: 30 | uses: octo-org/example-repo/.github/workflows/reusable.yml@main 31 | needs: [job-a] -------------------------------------------------------------------------------- /test-workflows/cpp-test.yml: -------------------------------------------------------------------------------- 1 | name: C++ Test 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | 8 | jobs: 9 | test: 10 | name: Test C++ 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v3 16 | 17 | - name: Setup GCC 18 | uses: egor-tensin/setup-gcc@v1 19 | with: 20 | version: 11 21 | 22 | - name: Check GCC version 23 | run: g++ --version 24 | 25 | - name: Create simple program 26 | run: | 27 | echo '#include ' > hello.cpp 28 | echo 'int main() {' >> hello.cpp 29 | echo ' std::cout << "Hello from C++!" << std::endl;' >> hello.cpp 30 | echo ' std::cout << "Running on GCC" << std::endl;' >> hello.cpp 31 | echo ' return 0;' >> hello.cpp 32 | echo '}' >> hello.cpp 33 | 34 | - name: Build C++ program 35 | run: g++ hello.cpp -o hello 36 | 37 | - name: Run C++ program 38 | run: ./hello -------------------------------------------------------------------------------- /test-workflows/example.yml: -------------------------------------------------------------------------------- 1 | name: Basic Workflow Example 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["main"] 8 | 9 | env: 10 | GLOBAL_VAR: "global value" 11 | 12 | jobs: 13 | test-job: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Echo Hello 17 | run: echo "Hello World" 18 | 19 | - name: Show Environment 20 | run: echo "Using global var: $GLOBAL_VAR" 21 | 22 | - name: Run Multiple Commands 23 | run: | 24 | echo "This is a multi-line command" 25 | echo "Current directory: $PWD" 26 | ls -la -------------------------------------------------------------------------------- /test-workflows/matrix-example.yml: -------------------------------------------------------------------------------- 1 | name: Matrix Example 2 | 3 | triggers: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["main"] 8 | 9 | env: 10 | GLOBAL_VAR: "This applies to all jobs" 11 | 12 | jobs: 13 | test: 14 | name: "Test" 15 | strategy: 16 | matrix: 17 | os: [ubuntu-latest, windows-latest, macos-latest] 18 | node-version: [14, 16, 18] 19 | include: 20 | - os: ubuntu-latest 21 | node-version: 20 22 | experimental: true 23 | exclude: 24 | - os: windows-latest 25 | node-version: 14 26 | fail-fast: false 27 | max-parallel: 2 28 | 29 | steps: 30 | - name: Checkout code 31 | uses: actions/checkout@v3 32 | 33 | - name: Setup Node.js 34 | uses: actions/setup-node@v3 35 | with: 36 | node-version: ${{ matrix.node-version }} 37 | 38 | - name: Show configuration 39 | run: | 40 | echo "Running on: ${{ matrix.os }}" 41 | echo "Node version: ${{ matrix.node-version }}" 42 | if [ "${{ matrix.experimental }}" = "true" ]; then 43 | echo "This is an experimental configuration" 44 | fi -------------------------------------------------------------------------------- /test-workflows/node-test.yml: -------------------------------------------------------------------------------- 1 | name: Node.js Test 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | 8 | jobs: 9 | test: 10 | name: Test Node.js 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v3 16 | 17 | - name: Setup Node.js 18 | uses: actions/setup-node@v3 19 | with: 20 | node-version: '16.x' 21 | 22 | - name: Check Node.js version 23 | run: node --version 24 | 25 | - name: Create simple script 26 | run: | 27 | echo 'console.log("Hello from Node.js!");' > test.js 28 | echo 'console.log(`Node.js version: ${process.version}`);' >> test.js 29 | 30 | - name: Run Node.js script 31 | run: node test.js -------------------------------------------------------------------------------- /test-workflows/python-test.yml: -------------------------------------------------------------------------------- 1 | name: Python Test 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | 8 | jobs: 9 | test: 10 | name: Test Python 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v3 16 | 17 | - name: Setup Python 18 | uses: actions/setup-python@v4 19 | with: 20 | python-version: '3.9' 21 | 22 | - name: Check Python version 23 | run: python3 --version 24 | 25 | - name: Create simple script 26 | run: | 27 | echo 'print("Hello from Python!")' > test.py 28 | echo 'import sys; print(f"Python version: {sys.version}")' >> test.py 29 | 30 | - name: Run Python script 31 | run: python3 test.py -------------------------------------------------------------------------------- /test-workflows/rust-test.yml: -------------------------------------------------------------------------------- 1 | name: Rust Test 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | 8 | jobs: 9 | test: 10 | name: Test Rust 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v3 16 | 17 | - name: Setup Rust 18 | uses: actions-rs/toolchain@v1 19 | with: 20 | toolchain: stable 21 | profile: minimal 22 | override: true 23 | 24 | - name: Check Rust version 25 | run: rustc --version 26 | 27 | - name: Create simple program 28 | run: | 29 | echo 'fn main() {' > hello.rs 30 | echo ' println!("Hello from Rust!");' >> hello.rs 31 | echo ' println!("Running on Rust");' >> hello.rs 32 | echo '}' >> hello.rs 33 | 34 | - name: Build Rust program 35 | run: rustc hello.rs -o hello 36 | 37 | - name: Run Rust program 38 | run: ./hello -------------------------------------------------------------------------------- /test-workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | test: 8 | runs-on: 'ubuntu-latest' 9 | steps: 10 | - name: Hey 11 | run: | 12 | echo hello && echo world -------------------------------------------------------------------------------- /test-workflows/trigger_gitlab.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Example script to trigger GitLab pipelines using wrkflw 3 | 4 | # Check if GITLAB_TOKEN is set 5 | if [ -z "${GITLAB_TOKEN}" ]; then 6 | echo "Error: GITLAB_TOKEN environment variable is not set." 7 | echo "Please set it with: export GITLAB_TOKEN=your_token_here" 8 | exit 1 9 | fi 10 | 11 | # Ensure we're in a Git repository 12 | if ! git rev-parse --is-inside-work-tree > /dev/null 2>&1; then 13 | echo "Error: Not in a Git repository." 14 | echo "Please run this script from within a Git repository with a GitLab remote." 15 | exit 1 16 | fi 17 | 18 | # Check for .gitlab-ci.yml file 19 | if [ ! -f .gitlab-ci.yml ]; then 20 | echo "Warning: No .gitlab-ci.yml file found in the current directory." 21 | echo "The pipeline trigger might fail if there is no pipeline configuration." 22 | fi 23 | 24 | # Function to display help 25 | show_help() { 26 | echo "GitLab Pipeline Trigger Examples" 27 | echo "--------------------------------" 28 | echo "Usage: $0 [example-number]" 29 | echo "" 30 | echo "Available examples:" 31 | echo " 1: Trigger default pipeline on the current branch" 32 | echo " 2: Trigger pipeline on main branch" 33 | echo " 3: Trigger release build" 34 | echo " 4: Trigger documentation build" 35 | echo " 5: Trigger pipeline with multiple variables" 36 | echo "" 37 | echo "For custom commands, modify this script or run wrkflw directly:" 38 | echo " wrkflw trigger-gitlab [options]" 39 | } 40 | 41 | # No arguments, show help 42 | if [ $# -eq 0 ]; then 43 | show_help 44 | exit 0 45 | fi 46 | 47 | # Handle examples 48 | case "$1" in 49 | "1") 50 | echo "Triggering default pipeline on the current branch..." 51 | wrkflw trigger-gitlab 52 | ;; 53 | 54 | "2") 55 | echo "Triggering pipeline on main branch..." 56 | wrkflw trigger-gitlab --branch main 57 | ;; 58 | 59 | "3") 60 | echo "Triggering release build..." 61 | wrkflw trigger-gitlab --variable BUILD_RELEASE=true 62 | ;; 63 | 64 | "4") 65 | echo "Triggering documentation build..." 66 | wrkflw trigger-gitlab --variable BUILD_DOCS=true 67 | ;; 68 | 69 | "5") 70 | echo "Triggering pipeline with multiple variables..." 71 | wrkflw trigger-gitlab --variable BUILD_RELEASE=true --variable BUILD_DOCS=true 72 | ;; 73 | 74 | *) 75 | echo "Unknown example: $1" 76 | show_help 77 | exit 1 78 | ;; 79 | esac -------------------------------------------------------------------------------- /test_gitlab_ci/.gitlab/ci/build.yml: -------------------------------------------------------------------------------- 1 | .build-template: 2 | stage: build 3 | script: 4 | - cargo build --release 5 | artifacts: 6 | paths: 7 | - target/release/ 8 | expire_in: 1 week 9 | cache: 10 | key: 11 | files: 12 | - Cargo.lock 13 | paths: 14 | - ${CARGO_HOME} 15 | - target/ 16 | 17 | # Normal build job 18 | build: 19 | extends: .build-template 20 | rules: 21 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 22 | - if: $CI_COMMIT_BRANCH == "main" 23 | 24 | # Debug build with additional flags 25 | debug-build: 26 | extends: .build-template 27 | script: 28 | - cargo build --features debug 29 | variables: 30 | RUSTFLAGS: "-Z debug-info=2" 31 | rules: 32 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $DEBUG_BUILD == "true" 33 | when: manual -------------------------------------------------------------------------------- /test_gitlab_ci/.gitlab/ci/test.yml: -------------------------------------------------------------------------------- 1 | .test-template: 2 | stage: test 3 | dependencies: 4 | - build 5 | cache: 6 | key: 7 | files: 8 | - Cargo.lock 9 | paths: 10 | - ${CARGO_HOME} 11 | - target/ 12 | 13 | # Unit tests 14 | unit-tests: 15 | extends: .test-template 16 | script: 17 | - cargo test --lib 18 | rules: 19 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 20 | - if: $CI_COMMIT_BRANCH == "main" 21 | 22 | # Integration tests 23 | integration-tests: 24 | extends: .test-template 25 | script: 26 | - cargo test --test '*' 27 | rules: 28 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 29 | - if: $CI_COMMIT_BRANCH == "main" 30 | 31 | # Lint with clippy 32 | lint: 33 | extends: .test-template 34 | dependencies: [] # No dependencies needed for linting 35 | script: 36 | - rustup component add clippy 37 | - cargo clippy -- -D warnings 38 | rules: 39 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 40 | - if: $CI_COMMIT_BRANCH == "main" 41 | 42 | # Check formatting 43 | format: 44 | extends: .test-template 45 | dependencies: [] # No dependencies needed for formatting 46 | script: 47 | - rustup component add rustfmt 48 | - cargo fmt -- --check 49 | rules: 50 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 51 | - if: $CI_COMMIT_BRANCH == "main" 52 | 53 | # Deployment template 54 | .deploy-template: 55 | stage: deploy 56 | script: 57 | - echo "Deploying to ${ENVIRONMENT} environment" 58 | - cp target/release/wrkflw /tmp/wrkflw-${ENVIRONMENT} 59 | artifacts: 60 | paths: 61 | - /tmp/wrkflw-${ENVIRONMENT} 62 | dependencies: 63 | - build -------------------------------------------------------------------------------- /test_gitlab_ci/advanced.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - setup 3 | - build 4 | - test 5 | - package 6 | - deploy 7 | 8 | variables: 9 | CARGO_HOME: "${CI_PROJECT_DIR}/.cargo" 10 | RUST_BACKTRACE: "1" 11 | 12 | workflow: 13 | rules: 14 | - if: $CI_COMMIT_BRANCH == "main" 15 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 16 | - if: $CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/ 17 | - if: $CI_COMMIT_BRANCH =~ /^feature\/.*/ 18 | - if: $CI_COMMIT_BRANCH == "staging" 19 | 20 | # Default image and settings for all jobs 21 | default: 22 | image: rust:1.76 23 | interruptible: true 24 | retry: 25 | max: 2 26 | when: 27 | - runner_system_failure 28 | - stuck_or_timeout_failure 29 | 30 | # Cache configuration 31 | .cargo-cache: 32 | cache: 33 | key: 34 | files: 35 | - Cargo.lock 36 | paths: 37 | - ${CARGO_HOME} 38 | - target/ 39 | policy: pull-push 40 | 41 | # Job to initialize the environment 42 | setup: 43 | stage: setup 44 | extends: .cargo-cache 45 | cache: 46 | policy: push 47 | script: 48 | - cargo --version 49 | - rustc --version 50 | - cargo fetch 51 | artifacts: 52 | paths: 53 | - Cargo.lock 54 | 55 | # Matrix build for multiple platforms 56 | .build-matrix: 57 | stage: build 58 | extends: .cargo-cache 59 | needs: 60 | - setup 61 | parallel: 62 | matrix: 63 | - TARGET: 64 | - x86_64-unknown-linux-gnu 65 | - x86_64-apple-darwin 66 | - aarch64-apple-darwin 67 | - x86_64-pc-windows-msvc 68 | RUST_VERSION: 69 | - "1.75" 70 | - "1.76" 71 | script: 72 | - rustup target add $TARGET 73 | - cargo build --release --target $TARGET 74 | artifacts: 75 | paths: 76 | - target/$TARGET/release/ 77 | expire_in: 1 week 78 | rules: 79 | - if: $CI_COMMIT_BRANCH == "main" || $CI_COMMIT_TAG 80 | when: always 81 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 82 | when: manual 83 | allow_failure: true 84 | 85 | # Regular build job for most cases 86 | build: 87 | stage: build 88 | extends: .cargo-cache 89 | needs: 90 | - setup 91 | script: 92 | - cargo build --release 93 | artifacts: 94 | paths: 95 | - target/release/ 96 | expire_in: 1 week 97 | rules: 98 | - if: $CI_COMMIT_BRANCH != "main" && !$CI_COMMIT_TAG 99 | when: always 100 | 101 | # Test with different feature combinations 102 | .test-template: 103 | stage: test 104 | extends: .cargo-cache 105 | needs: 106 | - build 107 | artifacts: 108 | reports: 109 | junit: test-results.xml 110 | when: always 111 | 112 | test-default: 113 | extends: .test-template 114 | script: 115 | - cargo test -- -Z unstable-options --format json | tee test-output.json 116 | - cat test-output.json | jq -r '.[]' > test-results.xml 117 | 118 | test-all-features: 119 | extends: .test-template 120 | script: 121 | - cargo test --all-features -- -Z unstable-options --format json | tee test-output.json 122 | - cat test-output.json | jq -r '.[]' > test-results.xml 123 | 124 | test-no-features: 125 | extends: .test-template 126 | script: 127 | - cargo test --no-default-features -- -Z unstable-options --format json | tee test-output.json 128 | - cat test-output.json | jq -r '.[]' > test-results.xml 129 | 130 | # Security scanning 131 | security: 132 | stage: test 133 | extends: .cargo-cache 134 | needs: 135 | - build 136 | script: 137 | - cargo install cargo-audit || true 138 | - cargo audit 139 | allow_failure: true 140 | 141 | # Linting 142 | lint: 143 | stage: test 144 | extends: .cargo-cache 145 | script: 146 | - rustup component add clippy 147 | - cargo clippy -- -D warnings 148 | rules: 149 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 150 | 151 | # Package for different targets 152 | package: 153 | stage: package 154 | extends: .cargo-cache 155 | needs: 156 | - job: build 157 | artifacts: true 158 | - test-default 159 | - test-all-features 160 | script: 161 | - mkdir -p packages 162 | - tar -czf packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz -C target/release wrkflw 163 | artifacts: 164 | paths: 165 | - packages/ 166 | only: 167 | - main 168 | - tags 169 | 170 | # Deploy to staging 171 | deploy-staging: 172 | stage: deploy 173 | image: alpine 174 | needs: 175 | - package 176 | environment: 177 | name: staging 178 | script: 179 | - apk add --no-cache curl 180 | - curl -X POST -F "file=@packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz" ${STAGING_DEPLOY_URL} 181 | only: 182 | - staging 183 | 184 | # Deploy to production 185 | deploy-production: 186 | stage: deploy 187 | image: alpine 188 | needs: 189 | - package 190 | environment: 191 | name: production 192 | script: 193 | - apk add --no-cache curl 194 | - curl -X POST -F "file=@packages/wrkflw-${CI_COMMIT_REF_SLUG}.tar.gz" ${PROD_DEPLOY_URL} 195 | only: 196 | - tags 197 | when: manual -------------------------------------------------------------------------------- /test_gitlab_ci/basic.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - build 3 | - test 4 | - deploy 5 | 6 | variables: 7 | CARGO_HOME: "${CI_PROJECT_DIR}/.cargo" 8 | 9 | # Default image for all jobs 10 | image: rust:1.76 11 | 12 | build: 13 | stage: build 14 | script: 15 | - cargo build --release 16 | artifacts: 17 | paths: 18 | - target/release/ 19 | expire_in: 1 week 20 | 21 | test: 22 | stage: test 23 | script: 24 | - cargo test 25 | dependencies: 26 | - build 27 | 28 | lint: 29 | stage: test 30 | script: 31 | - rustup component add clippy 32 | - cargo clippy -- -D warnings 33 | - cargo fmt -- --check 34 | 35 | deploy: 36 | stage: deploy 37 | script: 38 | - echo "Deploying application..." 39 | - cp target/release/wrkflw /usr/local/bin/ 40 | only: 41 | - main 42 | environment: 43 | name: production 44 | dependencies: 45 | - build -------------------------------------------------------------------------------- /test_gitlab_ci/docker.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - build 3 | - test 4 | - deploy 5 | 6 | variables: 7 | DOCKER_DRIVER: overlay2 8 | DOCKER_TLS_CERTDIR: "/certs" 9 | CONTAINER_IMAGE: ${CI_REGISTRY_IMAGE}:${CI_COMMIT_REF_SLUG} 10 | CONTAINER_IMAGE_LATEST: ${CI_REGISTRY_IMAGE}:latest 11 | 12 | # Use Docker-in-Docker for building and testing 13 | .docker: 14 | image: docker:20.10 15 | services: 16 | - docker:20.10-dind 17 | variables: 18 | DOCKER_HOST: tcp://docker:2376 19 | DOCKER_TLS_VERIFY: 1 20 | DOCKER_CERT_PATH: $DOCKER_TLS_CERTDIR/client 21 | before_script: 22 | - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY 23 | 24 | # Build the Docker image 25 | build-docker: 26 | extends: .docker 27 | stage: build 28 | script: 29 | - docker build --pull -t $CONTAINER_IMAGE -t $CONTAINER_IMAGE_LATEST . 30 | - docker push $CONTAINER_IMAGE 31 | - docker push $CONTAINER_IMAGE_LATEST 32 | only: 33 | - main 34 | - tags 35 | 36 | # Run tests inside Docker 37 | test-docker: 38 | extends: .docker 39 | stage: test 40 | script: 41 | - docker pull $CONTAINER_IMAGE 42 | - docker run --rm $CONTAINER_IMAGE cargo test 43 | dependencies: 44 | - build-docker 45 | 46 | # Security scan the Docker image 47 | security-scan: 48 | extends: .docker 49 | stage: test 50 | image: aquasec/trivy:latest 51 | script: 52 | - trivy image --no-progress --exit-code 1 --severity HIGH,CRITICAL $CONTAINER_IMAGE 53 | allow_failure: true 54 | 55 | # Run a Docker container with our app in the staging environment 56 | deploy-staging: 57 | extends: .docker 58 | stage: deploy 59 | environment: 60 | name: staging 61 | url: https://staging.example.com 62 | script: 63 | - docker pull $CONTAINER_IMAGE 64 | - docker tag $CONTAINER_IMAGE wrkflw-staging 65 | - | 66 | cat > deploy.sh << 'EOF' 67 | docker stop wrkflw-staging || true 68 | docker rm wrkflw-staging || true 69 | docker run -d --name wrkflw-staging -p 8080:8080 wrkflw-staging 70 | EOF 71 | - chmod +x deploy.sh 72 | - ssh $STAGING_USER@$STAGING_HOST 'bash -s' < deploy.sh 73 | only: 74 | - main 75 | when: manual 76 | 77 | # Run a Docker container with our app in the production environment 78 | deploy-production: 79 | extends: .docker 80 | stage: deploy 81 | environment: 82 | name: production 83 | url: https://wrkflw.example.com 84 | script: 85 | - docker pull $CONTAINER_IMAGE 86 | - docker tag $CONTAINER_IMAGE wrkflw-production 87 | - | 88 | cat > deploy.sh << 'EOF' 89 | docker stop wrkflw-production || true 90 | docker rm wrkflw-production || true 91 | docker run -d --name wrkflw-production -p 80:8080 wrkflw-production 92 | EOF 93 | - chmod +x deploy.sh 94 | - ssh $PRODUCTION_USER@$PRODUCTION_HOST 'bash -s' < deploy.sh 95 | only: 96 | - tags 97 | when: manual -------------------------------------------------------------------------------- /test_gitlab_ci/includes.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - build 3 | - test 4 | - deploy 5 | 6 | # Including external files 7 | include: 8 | - local: '.gitlab/ci/build.yml' # Will be created in a moment 9 | - local: '.gitlab/ci/test.yml' # Will be created in a moment 10 | - template: 'Workflows/MergeRequest-Pipelines.gitlab-ci.yml' # Built-in template 11 | 12 | variables: 13 | RUST_VERSION: "1.76" 14 | CARGO_HOME: "${CI_PROJECT_DIR}/.cargo" 15 | 16 | # Default settings for all jobs 17 | default: 18 | image: rust:${RUST_VERSION} 19 | before_script: 20 | - rustc --version 21 | - cargo --version 22 | 23 | # Main pipeline jobs that use the included templates 24 | production_deploy: 25 | stage: deploy 26 | extends: .deploy-template # This template is defined in one of the included files 27 | variables: 28 | ENVIRONMENT: production 29 | only: 30 | - main 31 | when: manual 32 | 33 | staging_deploy: 34 | stage: deploy 35 | extends: .deploy-template 36 | variables: 37 | ENVIRONMENT: staging 38 | only: 39 | - staging 40 | when: manual -------------------------------------------------------------------------------- /test_gitlab_ci/invalid.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | # Invalid GitLab CI file with common mistakes 2 | 3 | # Missing stages definition 4 | # stages: 5 | # - build 6 | # - test 7 | 8 | variables: 9 | CARGO_HOME: ${CI_PROJECT_DIR}/.cargo # Missing quotes around value with variables 10 | 11 | # Invalid job definition (missing script) 12 | build: 13 | stage: build # Referring to undefined stage 14 | # Missing required script section 15 | artifacts: 16 | paths: 17 | - target/release/ 18 | expire_in: 1 week 19 | 20 | # Invalid job with incorrect when value 21 | test: 22 | stage: test 23 | script: 24 | - cargo test 25 | when: never # Invalid value for when (should be always, manual, or delayed) 26 | dependencies: 27 | - non_existent_job # Dependency on non-existent job 28 | 29 | # Improperly structured job with invalid keys 30 | deploy: 31 | stagee: deploy # Typo in stage key 32 | scriptt: # Typo in script key 33 | - echo "Deploying..." 34 | only: 35 | - main 36 | environment: 37 | production # Incorrect format for environment 38 | retry: hello # Incorrect type for retry (should be integer or object) 39 | 40 | # Invalid rules section 41 | lint: 42 | stage: test 43 | script: 44 | - cargo clippy 45 | rules: 46 | - equals: $CI_COMMIT_BRANCH == "main" # Invalid rule (should be if, changes, exists, etc.) 47 | 48 | # Job with invalid cache configuration 49 | cache-test: 50 | stage: test 51 | script: 52 | - echo "Testing cache" 53 | cache: 54 | paths: 55 | - ${CARGO_HOME} 56 | key: [invalid, key, type] # Invalid type for key (should be string) 57 | policy: invalid-policy # Invalid policy value -------------------------------------------------------------------------------- /test_gitlab_ci/minimal.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | # Minimal GitLab CI configuration 2 | 3 | image: rust:latest 4 | 5 | build: 6 | script: 7 | - cargo build 8 | 9 | test: 10 | script: 11 | - cargo test -------------------------------------------------------------------------------- /test_gitlab_ci/services.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - build 3 | - test 4 | - deploy 5 | 6 | variables: 7 | POSTGRES_DB: test_db 8 | POSTGRES_USER: postgres 9 | POSTGRES_PASSWORD: postgres 10 | POSTGRES_HOST: postgres 11 | REDIS_HOST: redis 12 | MONGO_HOST: mongo 13 | RUST_BACKTRACE: 1 14 | 15 | # Default settings 16 | default: 17 | image: rust:1.76 18 | 19 | # Build the application 20 | build: 21 | stage: build 22 | script: 23 | - cargo build --release 24 | artifacts: 25 | paths: 26 | - target/release/ 27 | cache: 28 | key: 29 | files: 30 | - Cargo.lock 31 | paths: 32 | - ${CI_PROJECT_DIR}/.cargo 33 | - target/ 34 | 35 | # Run unit tests (no services needed) 36 | unit-tests: 37 | stage: test 38 | needs: 39 | - build 40 | script: 41 | - cargo test --lib 42 | cache: 43 | key: 44 | files: 45 | - Cargo.lock 46 | paths: 47 | - ${CI_PROJECT_DIR}/.cargo 48 | - target/ 49 | policy: pull 50 | 51 | # Run integration tests with a PostgreSQL service 52 | postgres-tests: 53 | stage: test 54 | needs: 55 | - build 56 | services: 57 | - name: postgres:14-alpine 58 | alias: postgres 59 | variables: 60 | # Service-specific variables 61 | POSTGRES_DB: test_db 62 | POSTGRES_USER: postgres 63 | POSTGRES_PASSWORD: postgres 64 | DATABASE_URL: postgres://postgres:postgres@postgres:5432/test_db 65 | script: 66 | - apt-get update && apt-get install -y postgresql-client 67 | - cd target/release && ./wrkflw test-postgres 68 | - psql -h postgres -U postgres -d test_db -c "SELECT 1;" 69 | cache: 70 | key: 71 | files: 72 | - Cargo.lock 73 | paths: 74 | - ${CI_PROJECT_DIR}/.cargo 75 | - target/ 76 | policy: pull 77 | 78 | # Run integration tests with Redis service 79 | redis-tests: 80 | stage: test 81 | needs: 82 | - build 83 | services: 84 | - name: redis:alpine 85 | alias: redis 86 | variables: 87 | REDIS_URL: redis://redis:6379 88 | script: 89 | - apt-get update && apt-get install -y redis-tools 90 | - cd target/release && ./wrkflw test-redis 91 | - redis-cli -h redis PING 92 | cache: 93 | key: 94 | files: 95 | - Cargo.lock 96 | paths: 97 | - ${CI_PROJECT_DIR}/.cargo 98 | - target/ 99 | policy: pull 100 | 101 | # Run integration tests with MongoDB service 102 | mongo-tests: 103 | stage: test 104 | needs: 105 | - build 106 | services: 107 | - name: mongo:5 108 | alias: mongo 109 | variables: 110 | MONGO_URL: mongodb://mongo:27017 111 | script: 112 | - apt-get update && apt-get install -y mongodb-clients 113 | - cd target/release && ./wrkflw test-mongo 114 | - mongosh --host mongo --eval "db.version()" 115 | cache: 116 | key: 117 | files: 118 | - Cargo.lock 119 | paths: 120 | - ${CI_PROJECT_DIR}/.cargo 121 | - target/ 122 | policy: pull 123 | 124 | # Run multi-service integration tests 125 | all-services-test: 126 | stage: test 127 | needs: 128 | - build 129 | services: 130 | - name: postgres:14-alpine 131 | alias: postgres 132 | - name: redis:alpine 133 | alias: redis 134 | - name: mongo:5 135 | alias: mongo 136 | - name: rabbitmq:3-management 137 | alias: rabbitmq 138 | variables: 139 | DATABASE_URL: postgres://postgres:postgres@postgres:5432/test_db 140 | REDIS_URL: redis://redis:6379 141 | MONGO_URL: mongodb://mongo:27017 142 | RABBITMQ_URL: amqp://guest:guest@rabbitmq:5672 143 | script: 144 | - apt-get update && apt-get install -y postgresql-client redis-tools mongodb-clients 145 | - cd target/release && ./wrkflw test-all-services 146 | cache: 147 | key: 148 | files: 149 | - Cargo.lock 150 | paths: 151 | - ${CI_PROJECT_DIR}/.cargo 152 | - target/ 153 | policy: pull 154 | 155 | # Deploy to production 156 | deploy: 157 | stage: deploy 158 | needs: 159 | - unit-tests 160 | - postgres-tests 161 | - redis-tests 162 | - mongo-tests 163 | script: 164 | - echo "Deploying application..." 165 | - cp target/release/wrkflw /tmp/ 166 | only: 167 | - main -------------------------------------------------------------------------------- /test_gitlab_ci/workflow.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - prepare 3 | - build 4 | - test 5 | - deploy 6 | 7 | # Global workflow rules to control when pipelines run 8 | workflow: 9 | rules: 10 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 11 | when: always 12 | - if: $CI_COMMIT_BRANCH == "main" 13 | when: always 14 | - if: $CI_COMMIT_TAG 15 | when: always 16 | - if: $CI_COMMIT_BRANCH == "develop" 17 | when: always 18 | - if: $CI_COMMIT_BRANCH =~ /^release\/.*/ 19 | when: always 20 | - if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/ 21 | when: always 22 | - when: never # Skip all other branches 23 | 24 | variables: 25 | RUST_VERSION: "1.76" 26 | CARGO_HOME: "${CI_PROJECT_DIR}/.cargo" 27 | 28 | # Default settings 29 | default: 30 | image: "rust:${RUST_VERSION}" 31 | interruptible: true 32 | 33 | # Cache definition to be used by other jobs 34 | .cargo-cache: 35 | cache: 36 | key: 37 | files: 38 | - Cargo.lock 39 | paths: 40 | - ${CARGO_HOME} 41 | - target/ 42 | 43 | # Prepare the dependencies (runs on all branches) 44 | prepare: 45 | stage: prepare 46 | extends: .cargo-cache 47 | script: 48 | - cargo fetch --locked 49 | artifacts: 50 | paths: 51 | - Cargo.lock 52 | 53 | # Build only on main branch and MRs 54 | build: 55 | stage: build 56 | extends: .cargo-cache 57 | needs: 58 | - prepare 59 | script: 60 | - cargo build --release 61 | artifacts: 62 | paths: 63 | - target/release/ 64 | rules: 65 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 66 | - if: $CI_COMMIT_BRANCH == "main" 67 | - if: $CI_COMMIT_TAG 68 | 69 | # Build with debug symbols on develop branch 70 | debug-build: 71 | stage: build 72 | extends: .cargo-cache 73 | needs: 74 | - prepare 75 | script: 76 | - cargo build 77 | artifacts: 78 | paths: 79 | - target/debug/ 80 | rules: 81 | - if: $CI_COMMIT_BRANCH == "develop" 82 | 83 | # Test job - run on all branches except release and hotfix 84 | test: 85 | stage: test 86 | extends: .cargo-cache 87 | needs: 88 | - job: build 89 | optional: true 90 | - job: debug-build 91 | optional: true 92 | script: 93 | - | 94 | if [ -d "target/release" ]; then 95 | cargo test --release 96 | else 97 | cargo test 98 | fi 99 | rules: 100 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 101 | - if: $CI_COMMIT_BRANCH == "main" 102 | - if: $CI_COMMIT_BRANCH == "develop" 103 | - if: $CI_COMMIT_TAG 104 | - if: $CI_COMMIT_BRANCH =~ /^feature\/.*/ 105 | 106 | # Only lint on MRs and develop 107 | lint: 108 | stage: test 109 | extends: .cargo-cache 110 | script: 111 | - rustup component add clippy 112 | - cargo clippy -- -D warnings 113 | rules: 114 | - if: $CI_PIPELINE_SOURCE == "merge_request_event" 115 | - if: $CI_COMMIT_BRANCH == "develop" 116 | 117 | # Run benchmarks only on main branch 118 | benchmark: 119 | stage: test 120 | extends: .cargo-cache 121 | needs: 122 | - build 123 | script: 124 | - cargo bench 125 | rules: 126 | - if: $CI_COMMIT_BRANCH == "main" 127 | - if: $CI_COMMIT_TAG 128 | 129 | # Deploy to staging on develop branch pushes 130 | deploy-staging: 131 | stage: deploy 132 | needs: 133 | - test 134 | environment: 135 | name: staging 136 | url: https://staging.example.com 137 | script: 138 | - echo "Deploying to staging..." 139 | - cp target/release/wrkflw /tmp/wrkflw-staging 140 | rules: 141 | - if: $CI_COMMIT_BRANCH == "develop" 142 | when: on_success 143 | - if: $CI_COMMIT_BRANCH =~ /^release\/.*/ 144 | when: manual 145 | 146 | # Deploy to production on main branch and tags 147 | deploy-prod: 148 | stage: deploy 149 | needs: 150 | - test 151 | - benchmark 152 | environment: 153 | name: production 154 | url: https://example.com 155 | script: 156 | - echo "Deploying to production..." 157 | - cp target/release/wrkflw /tmp/wrkflw-prod 158 | rules: 159 | - if: $CI_COMMIT_BRANCH == "main" 160 | when: manual 161 | - if: $CI_COMMIT_TAG =~ /^v\d+\.\d+\.\d+$/ 162 | when: manual 163 | - if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/ 164 | when: manual 165 | 166 | # Notify slack only when deploy succeeded or failed 167 | notify: 168 | stage: .post 169 | image: curlimages/curl:latest 170 | needs: 171 | - job: deploy-staging 172 | optional: true 173 | - job: deploy-prod 174 | optional: true 175 | script: 176 | - | 177 | if [ "$CI_JOB_STATUS" == "success" ]; then 178 | curl -X POST -H 'Content-type: application/json' --data '{"text":"Deployment succeeded! :tada:"}' $SLACK_WEBHOOK_URL 179 | else 180 | curl -X POST -H 'Content-type: application/json' --data '{"text":"Deployment failed! :boom:"}' $SLACK_WEBHOOK_URL 181 | fi 182 | rules: 183 | - if: $CI_COMMIT_BRANCH == "main" && $CI_PIPELINE_SOURCE != "merge_request_event" 184 | - if: $CI_COMMIT_BRANCH == "develop" && $CI_PIPELINE_SOURCE != "merge_request_event" 185 | - if: $CI_COMMIT_TAG 186 | - if: $CI_COMMIT_BRANCH =~ /^hotfix\/.*/ -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # Testing Strategy 2 | 3 | This directory contains integration tests for the `wrkflw` project. We follow the Rust testing best practices by organizing tests as follows: 4 | 5 | ## Test Organization 6 | 7 | - **Unit Tests**: Located alongside the source files in `src/` using `#[cfg(test)]` modules 8 | - **Integration Tests**: Located directly in this `tests/` directory 9 | - `matrix_test.rs` - Tests for matrix expansion functionality 10 | - `reusable_workflow_test.rs` - Tests for reusable workflow validation 11 | - **End-to-End Tests**: Also located in this `tests/` directory 12 | - `cleanup_test.rs` - Tests for cleanup functionality with Docker resources 13 | 14 | ## Running Tests 15 | 16 | To run all tests: 17 | ```bash 18 | cargo test 19 | ``` 20 | 21 | To run only unit tests: 22 | ```bash 23 | cargo test --lib 24 | ``` 25 | 26 | To run only integration tests: 27 | ```bash 28 | cargo test --test matrix_test --test reusable_workflow_test 29 | ``` 30 | 31 | To run only end-to-end tests: 32 | ```bash 33 | cargo test --test cleanup_test 34 | ``` 35 | 36 | To run a specific test: 37 | ```bash 38 | cargo test test_name 39 | ``` 40 | 41 | ## Writing Tests 42 | 43 | Please follow these guidelines when writing tests: 44 | 45 | 1. Use meaningful test names that describe what is being tested 46 | 2. Group related tests together in modules 47 | 3. Use helper functions to reduce duplication 48 | 4. Test both success and failure cases 49 | 5. Use `#[should_panic]` for tests that expect a panic 50 | 6. Avoid test interdependencies -------------------------------------------------------------------------------- /tests/cleanup_test.rs: -------------------------------------------------------------------------------- 1 | use bollard::Docker; 2 | use std::process::Command; 3 | use std::time::Duration; 4 | use uuid::Uuid; 5 | use wrkflw::{ 6 | cleanup_on_exit, 7 | executor::docker, 8 | runtime::emulation::{self, EmulationRuntime}, 9 | }; 10 | 11 | // Skip the tests when running cargo test with --skip docker 12 | fn should_skip_docker_tests() -> bool { 13 | std::env::var("TEST_SKIP_DOCKER").is_ok() || !docker::is_available() 14 | } 15 | 16 | // Skip the tests when running cargo test with --skip processes 17 | fn should_skip_process_tests() -> bool { 18 | std::env::var("TEST_SKIP_PROCESSES").is_ok() || std::env::var("CI").is_ok() 19 | } 20 | 21 | #[tokio::test] 22 | async fn test_docker_container_cleanup() { 23 | // Skip test based on flags or environment 24 | if should_skip_docker_tests() { 25 | println!("Skipping Docker container cleanup test"); 26 | return; 27 | } 28 | 29 | // Skip if running in CI environment for Linux 30 | if cfg!(target_os = "linux") && std::env::var("CI").is_ok() { 31 | println!("Skipping Docker container cleanup test in Linux CI environment"); 32 | return; 33 | } 34 | 35 | // Connect to Docker 36 | let docker = match Docker::connect_with_local_defaults() { 37 | Ok(client) => client, 38 | Err(_) => { 39 | println!("Could not connect to Docker, skipping test"); 40 | return; 41 | } 42 | }; 43 | 44 | // Create a test container by manually tracking it 45 | // In a real test, we would create an actual container, but we're just simulating that here 46 | let container_id = format!("test-container-{}", Uuid::new_v4()); 47 | docker::track_container(&container_id); 48 | 49 | // Run cleanup 50 | let _ = docker::cleanup_containers(&docker).await; 51 | 52 | // Since we can't directly check the tracking status, 53 | // we'll use cleanup_on_exit and check for any errors 54 | match cleanup_on_exit().await { 55 | () => println!("Cleanup completed successfully"), 56 | } 57 | } 58 | 59 | #[tokio::test] 60 | async fn test_docker_network_cleanup() { 61 | // Skip test based on flags or environment 62 | if should_skip_docker_tests() { 63 | println!("Skipping Docker network cleanup test"); 64 | return; 65 | } 66 | 67 | // Skip if running in CI environment for Linux 68 | if cfg!(target_os = "linux") && std::env::var("CI").is_ok() { 69 | println!("Skipping Docker network cleanup test in Linux CI environment"); 70 | return; 71 | } 72 | 73 | // Connect to Docker 74 | let docker = match Docker::connect_with_local_defaults() { 75 | Ok(client) => client, 76 | Err(_) => { 77 | println!("Could not connect to Docker, skipping test"); 78 | return; 79 | } 80 | }; 81 | 82 | // Create a test network 83 | let network_id = match docker::create_job_network(&docker).await { 84 | Ok(id) => id, 85 | Err(_) => { 86 | println!("Could not create test network, skipping test"); 87 | return; 88 | } 89 | }; 90 | 91 | // Run cleanup 92 | match docker::cleanup_networks(&docker).await { 93 | Ok(_) => println!("Network cleanup completed successfully"), 94 | Err(e) => println!("Network cleanup error: {}", e), 95 | } 96 | 97 | // Attempt to remove the network again - this should fail if cleanup worked 98 | match docker.remove_network(&network_id).await { 99 | Ok(_) => println!("Network still exists, cleanup may not have worked"), 100 | Err(_) => println!("Network was properly cleaned up"), 101 | } 102 | } 103 | 104 | #[tokio::test] 105 | async fn test_emulation_workspace_cleanup() { 106 | // Create an emulation runtime instance 107 | let _runtime = EmulationRuntime::new(); 108 | 109 | // Run cleanup 110 | emulation::cleanup_resources().await; 111 | 112 | // We can only verify that the cleanup operation doesn't crash 113 | // since we can't access the private tracking collections 114 | println!("Emulation workspace cleanup completed"); 115 | } 116 | 117 | #[tokio::test] 118 | #[ignore] // This test uses process manipulation which can be problematic 119 | async fn test_emulation_process_cleanup() { 120 | // Skip tests on CI or environments where spawning processes might be restricted 121 | if should_skip_process_tests() { 122 | println!("Skipping process cleanup test"); 123 | return; 124 | } 125 | 126 | // Create a process for testing 127 | let process_id = if cfg!(unix) { 128 | // Use sleep on Unix but DO NOT use & to background 129 | // Instead run it directly and track the actual process 130 | let child = Command::new("sleep") 131 | .arg("10") // Shorter sleep time 132 | .spawn(); 133 | 134 | match child { 135 | Ok(child) => { 136 | let pid = child.id(); 137 | // Track the process for cleanup 138 | emulation::track_process(pid as u32); 139 | pid as u32 140 | } 141 | Err(_) => { 142 | println!("Could not create test process, skipping test"); 143 | return; 144 | } 145 | } 146 | } else if cfg!(windows) { 147 | // On Windows, use a different long-running command 148 | let child = Command::new("timeout") 149 | .args(&["/t", "10", "/nobreak"]) 150 | .spawn(); 151 | 152 | match child { 153 | Ok(child) => { 154 | let pid = child.id(); 155 | // Track the process for cleanup 156 | emulation::track_process(pid as u32); 157 | pid as u32 158 | } 159 | Err(_) => { 160 | println!("Could not create test process, skipping test"); 161 | return; 162 | } 163 | } 164 | } else { 165 | println!("Unsupported platform, skipping test"); 166 | return; 167 | }; 168 | 169 | // Run cleanup resources which includes process cleanup 170 | emulation::cleanup_resources().await; 171 | 172 | // On Unix, verify process is no longer running 173 | if cfg!(unix) { 174 | // Allow a short delay for process termination 175 | tokio::time::sleep(Duration::from_millis(100)).await; 176 | 177 | // Check if process exists 178 | let process_exists = unsafe { 179 | libc::kill(process_id as i32, 0) == 0 180 | || std::io::Error::last_os_error().raw_os_error() != Some(libc::ESRCH) 181 | }; 182 | 183 | assert!( 184 | !process_exists, 185 | "Process should be terminated after cleanup" 186 | ); 187 | } 188 | } 189 | 190 | #[tokio::test] 191 | async fn test_cleanup_on_exit_function() { 192 | // Skip if Docker is not available 193 | if should_skip_docker_tests() { 194 | println!("Docker not available, skipping test"); 195 | return; 196 | } 197 | 198 | // Connect to Docker 199 | let docker = match Docker::connect_with_local_defaults() { 200 | Ok(client) => client, 201 | Err(_) => { 202 | println!("Could not connect to Docker, skipping test"); 203 | return; 204 | } 205 | }; 206 | 207 | // Create some resources for cleanup 208 | 209 | // Track a container 210 | let container_id = format!("test-container-{}", Uuid::new_v4()); 211 | docker::track_container(&container_id); 212 | 213 | // Create a network 214 | let _ = match docker::create_job_network(&docker).await { 215 | Ok(id) => id, 216 | Err(_) => { 217 | println!("Could not create test network, skipping test"); 218 | return; 219 | } 220 | }; 221 | 222 | // Create an emulation workspace 223 | let _runtime = EmulationRuntime::new(); 224 | 225 | // Run cleanup function 226 | match tokio::time::timeout(Duration::from_secs(15), cleanup_on_exit()).await { 227 | Ok(_) => println!("Cleanup completed successfully"), 228 | Err(_) => { 229 | println!("Cleanup timed out after 15 seconds"); 230 | // Attempt manual cleanup 231 | let _ = docker::cleanup_containers(&docker).await; 232 | let _ = docker::cleanup_networks(&docker).await; 233 | emulation::cleanup_resources().await; 234 | } 235 | } 236 | } 237 | -------------------------------------------------------------------------------- /tests/matrix_test.rs: -------------------------------------------------------------------------------- 1 | use indexmap::IndexMap; 2 | use serde_yaml::Value; 3 | use std::collections::HashMap; 4 | use wrkflw::matrix::{self, MatrixCombination, MatrixConfig}; 5 | 6 | fn create_test_matrix() -> MatrixConfig { 7 | let mut matrix = MatrixConfig::default(); 8 | 9 | // Add basic parameters 10 | let mut params = IndexMap::new(); 11 | 12 | // Add 'os' parameter with array values 13 | let os_array = vec![ 14 | Value::String("ubuntu".to_string()), 15 | Value::String("windows".to_string()), 16 | Value::String("macos".to_string()), 17 | ]; 18 | params.insert("os".to_string(), Value::Sequence(os_array)); 19 | 20 | // Add 'node' parameter with array values 21 | let node_array = vec![ 22 | Value::Number(serde_yaml::Number::from(14)), 23 | Value::Number(serde_yaml::Number::from(16)), 24 | ]; 25 | params.insert("node".to_string(), Value::Sequence(node_array)); 26 | 27 | matrix.parameters = params; 28 | 29 | // Add exclude pattern 30 | let mut exclude_item = HashMap::new(); 31 | exclude_item.insert("os".to_string(), Value::String("windows".to_string())); 32 | exclude_item.insert( 33 | "node".to_string(), 34 | Value::Number(serde_yaml::Number::from(14)), 35 | ); 36 | matrix.exclude = vec![exclude_item]; 37 | 38 | // Add include pattern 39 | let mut include_item = HashMap::new(); 40 | include_item.insert("os".to_string(), Value::String("ubuntu".to_string())); 41 | include_item.insert( 42 | "node".to_string(), 43 | Value::Number(serde_yaml::Number::from(18)), 44 | ); 45 | include_item.insert("experimental".to_string(), Value::Bool(true)); 46 | matrix.include = vec![include_item]; 47 | 48 | // Set max-parallel 49 | matrix.max_parallel = Some(2); 50 | 51 | // Set fail-fast 52 | matrix.fail_fast = Some(true); 53 | 54 | matrix 55 | } 56 | 57 | #[test] 58 | fn test_matrix_expansion() { 59 | let matrix = create_test_matrix(); 60 | 61 | // Expand the matrix 62 | let combinations = matrix::expand_matrix(&matrix).unwrap(); 63 | 64 | // We should have 6 combinations: 65 | // 3 OS x 2 Node versions = 6 base combinations 66 | // - 1 excluded (windows + node 14) 67 | // + 1 included (ubuntu + node 18 + experimental) 68 | // = 6 total combinations 69 | assert_eq!(combinations.len(), 6); 70 | 71 | // Check that the excluded combination is not present 72 | let excluded = combinations 73 | .iter() 74 | .find(|c| match (c.values.get("os"), c.values.get("node")) { 75 | (Some(Value::String(os)), Some(Value::Number(node))) => { 76 | os == "windows" && node.as_u64() == Some(14) 77 | } 78 | _ => false, 79 | }); 80 | assert!( 81 | excluded.is_none(), 82 | "Excluded combination should not be present" 83 | ); 84 | 85 | // Check that the included combination is present 86 | let included = combinations.iter().find(|c| { 87 | match ( 88 | c.values.get("os"), 89 | c.values.get("node"), 90 | c.values.get("experimental"), 91 | ) { 92 | (Some(Value::String(os)), Some(Value::Number(node)), Some(Value::Bool(exp))) => { 93 | os == "ubuntu" && node.as_u64() == Some(18) && *exp 94 | } 95 | _ => false, 96 | } 97 | }); 98 | assert!(included.is_some(), "Included combination should be present"); 99 | assert!( 100 | included.unwrap().is_included, 101 | "Combination should be marked as included" 102 | ); 103 | } 104 | 105 | #[test] 106 | fn test_format_combination_name() { 107 | let mut values = HashMap::new(); 108 | values.insert("os".to_string(), Value::String("ubuntu".to_string())); 109 | values.insert( 110 | "node".to_string(), 111 | Value::Number(serde_yaml::Number::from(14)), 112 | ); 113 | 114 | let combination = MatrixCombination { 115 | values, 116 | is_included: false, 117 | }; 118 | 119 | let formatted = matrix::format_combination_name("test-job", &combination); 120 | 121 | // Should format as "test-job (os: ubuntu, node: 14)" or similar 122 | assert!(formatted.contains("test-job")); 123 | assert!(formatted.contains("os: ubuntu")); 124 | assert!(formatted.contains("node: 14")); 125 | } 126 | -------------------------------------------------------------------------------- /tests/reusable_workflow_test.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use tempfile::tempdir; 3 | use wrkflw::evaluator::evaluate_workflow_file; 4 | 5 | #[test] 6 | fn test_reusable_workflow_validation() { 7 | let temp_dir = tempdir().unwrap(); 8 | let workflow_path = temp_dir.path().join("test-workflow.yml"); 9 | 10 | // Create a workflow file that uses reusable workflows 11 | let content = r#" 12 | on: 13 | pull_request: 14 | branches: 15 | - main 16 | 17 | jobs: 18 | call-workflow-1-in-local-repo: 19 | uses: octo-org/this-repo/.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89 20 | call-workflow-2-in-local-repo: 21 | uses: ./path/to/workflow.yml 22 | with: 23 | username: mona 24 | secrets: 25 | token: ${{ secrets.TOKEN }} 26 | "#; 27 | 28 | fs::write(&workflow_path, content).unwrap(); 29 | 30 | // Validate the workflow 31 | let result = evaluate_workflow_file(&workflow_path, false).unwrap(); 32 | 33 | // Should be valid since we've fixed the validation to handle reusable workflows 34 | assert!( 35 | result.is_valid, 36 | "Workflow should be valid, but got issues: {:?}", 37 | result.issues 38 | ); 39 | assert!(result.issues.is_empty()); 40 | 41 | // Create an invalid reusable workflow (bad format for 'uses') 42 | let invalid_content = r#" 43 | on: 44 | pull_request: 45 | branches: 46 | - main 47 | 48 | jobs: 49 | call-workflow-invalid: 50 | uses: invalid-format 51 | "#; 52 | 53 | fs::write(&workflow_path, invalid_content).unwrap(); 54 | 55 | // Validate the workflow 56 | let result = evaluate_workflow_file(&workflow_path, false).unwrap(); 57 | 58 | // Should be invalid due to the bad format 59 | assert!(!result.is_valid); 60 | assert!(result 61 | .issues 62 | .iter() 63 | .any(|issue| issue.contains("Invalid reusable workflow reference format"))); 64 | } 65 | -------------------------------------------------------------------------------- /trigger.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bahdotsh/wrkflw/d5d1904d0a45477fe6cb6818e1e79a1971d8e789/trigger.gif --------------------------------------------------------------------------------