├── .envrc ├── .github ├── dependabot.yml └── workflows │ ├── auto-approve.yml │ ├── main.yml │ ├── sarif.yml │ └── scorecard-analysis.yml ├── .gitignore ├── .gitmodules ├── .pre-commit-config.yaml ├── .rustfmt.toml ├── .vscode └── settings.json ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── SECURITY.md ├── clang-tidy-sarif ├── Cargo.toml ├── LICENSE ├── README.md ├── cliff.toml ├── src │ └── bin.rs └── tests │ └── version-numbers.rs ├── clippy-sarif ├── Cargo.toml ├── LICENSE ├── README.md ├── cliff.toml ├── src │ └── bin.rs └── tests │ └── version-numbers.rs ├── docs ├── config.toml ├── content │ ├── _index.md │ └── docs │ │ ├── _index.md │ │ ├── getting-started │ │ ├── _index.md │ │ └── introduction.md │ │ └── help │ │ ├── _index.md │ │ └── rust_docs.md └── static │ ├── images │ ├── carbon.png │ └── ghas.png │ └── pages │ └── google801b3c72f2cdc749.html ├── flake.lock ├── flake.nix ├── hadolint-sarif ├── Cargo.toml ├── LICENSE ├── README.md ├── cliff.toml ├── src │ └── bin.rs └── tests │ └── version-numbers.rs ├── miri-sarif ├── Cargo.toml ├── LICENSE ├── README.md ├── cliff.toml ├── src │ └── bin.rs └── tests │ └── version-numbers.rs ├── nix ├── pre-commit.nix └── rust.nix ├── rust-toolchain.toml ├── sarif-fmt ├── Cargo.toml ├── LICENSE ├── README.md ├── cliff.toml ├── src │ ├── bin.rs │ └── github │ │ ├── mod.rs │ │ └── problem-matchers │ │ └── sarif-plain-matcher.json └── tests │ ├── clang-tidy-test.rs │ ├── clippy-test.rs │ ├── data │ ├── Cargo.lock │ ├── Cargo.toml │ ├── Dockerfile │ ├── clang-tidy.out │ ├── clippy.out │ ├── cpp.cpp │ ├── hadolint.out │ ├── shell.sh │ ├── shellcheck.out │ └── src │ │ └── main.rs │ ├── hadolint-test.rs │ ├── shellcheck-test.rs │ └── version-numbers.rs ├── serde-sarif ├── .gitignore ├── Cargo.toml ├── LICENSE ├── README.md ├── build.rs ├── cliff.toml ├── src │ ├── converters │ │ ├── cargo.rs │ │ ├── clang_tidy.rs │ │ ├── clippy.rs │ │ ├── hadolint.rs │ │ ├── miri.rs │ │ ├── mod.rs │ │ └── shellcheck.rs │ ├── lib.rs │ ├── sarif.rs │ └── schema.json └── tests │ └── version-numbers.rs └── shellcheck-sarif ├── Cargo.toml ├── LICENSE ├── README.md ├── cliff.toml ├── src └── bin.rs └── tests └── version-numbers.rs /.envrc: -------------------------------------------------------------------------------- 1 | use flake 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | commit-message: 8 | prefix: ci 9 | - package-ecosystem: "cargo" 10 | directory: "/" 11 | schedule: 12 | interval: "daily" 13 | commit-message: 14 | prefix: ci 15 | - package-ecosystem: "gitsubmodule" 16 | directory: / 17 | schedule: 18 | interval: "daily" 19 | commit-message: 20 | prefix: ci 21 | -------------------------------------------------------------------------------- /.github/workflows/auto-approve.yml: -------------------------------------------------------------------------------- 1 | name: auto approve 2 | 3 | on: pull_request_target 4 | permissions: write-all 5 | 6 | jobs: 7 | auto-approve: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: hmarr/auto-approve-action@v4 11 | if: github.actor == 'dependabot[bot]' || github.actor == 'psastras' 12 | with: 13 | github-token: "${{ secrets.GITHUB_TOKEN }}" 14 | enable-auto-merge: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: alexwilson/enable-github-automerge-action@main 18 | if: github.actor == 'dependabot[bot]' 19 | with: 20 | github-token: "${{ secrets.GITHUB_TOKEN }}" 21 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | tags-ignore: 4 | - "**" 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | # ignore gh-pages 10 | name: main 11 | permissions: write-all 12 | 13 | jobs: 14 | build-test: 15 | strategy: 16 | matrix: 17 | runs-on: [ubuntu-latest, macos-latest] 18 | runs-on: ${{ matrix.runs-on }} 19 | steps: 20 | - uses: actions/checkout@v4 21 | - uses: DeterminateSystems/nix-installer-action@v17 22 | - uses: DeterminateSystems/magic-nix-cache-action@v9 23 | - name: Run `nixci build` 24 | run: | 25 | nix run nixpkgs#nixci -- \ 26 | --extra-access-tokens ${{ secrets.GITHUB_TOKEN }} \ 27 | -v \ 28 | build 29 | - name: Collect artifacts 30 | id: collect-artifacts 31 | run: | 32 | ALL=$(nix eval --raw '.#all.outPath')/bin/* 33 | echo ::set-output name=all::$ALL 34 | - uses: actions/upload-artifact@v4 35 | with: 36 | name: ${{ matrix.runs-on }} 37 | path: ${{steps.collect-artifacts.outputs.all}} 38 | 39 | docs: 40 | needs: [build-test] 41 | if: ${{ github.ref == 'refs/heads/main' }} 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v4 45 | with: 46 | submodules: recursive 47 | - uses: DeterminateSystems/nix-installer-action@v17 48 | - uses: DeterminateSystems/magic-nix-cache-action@v9 49 | - name: Run Zola 50 | run: | 51 | nix develop --command zola build 52 | cp ./static/pages/* public 53 | working-directory: docs 54 | - uses: peaceiris/actions-gh-pages@v4 55 | with: 56 | github_token: ${{ secrets.GITHUB_TOKEN }} 57 | publish_dir: ./docs/public 58 | force_orphan: true 59 | 60 | crates-publish: 61 | name: Publish to crates.io 62 | needs: [build-test] 63 | if: github.ref == 'refs/heads/main' && contains(github.event.head_commit.message, 'release:') 64 | runs-on: ubuntu-latest 65 | steps: 66 | - uses: actions/checkout@v4 67 | - uses: Swatinem/rust-cache@v2 68 | - uses: katyo/publish-crates@v2 69 | continue-on-error: true 70 | with: 71 | registry-token: ${{ secrets.CRATES_IO_TOKEN }} 72 | args: --no-verify 73 | 74 | fh-publish: 75 | name: Publish to FlakeHub 76 | needs: [build-test] 77 | if: github.ref == 'refs/heads/main' && contains(github.event.head_commit.message, 'release:') 78 | runs-on: ubuntu-latest 79 | steps: 80 | - uses: actions/checkout@v4 81 | - uses: DeterminateSystems/nix-installer-action@main 82 | - uses: DeterminateSystems/flakehub-push@main 83 | with: 84 | visibility: public 85 | 86 | gh-publish: 87 | name: Publish to Github 88 | needs: [build-test] 89 | if: github.ref == 'refs/heads/main' && contains(github.event.head_commit.message, 'release:') 90 | strategy: 91 | matrix: 92 | runs-on: [ubuntu-latest, macos-latest] 93 | target: 94 | [ 95 | clang-tidy-sarif, 96 | clippy-sarif, 97 | hadolint-sarif, 98 | miri-sarif, 99 | shellcheck-sarif, 100 | sarif-fmt, 101 | ] 102 | runs-on: ${{ matrix.runs-on }} 103 | steps: 104 | - uses: actions/checkout@v4 105 | with: 106 | fetch-depth: 0 107 | - uses: Swatinem/rust-cache@v2 108 | - uses: DeterminateSystems/nix-installer-action@v17 109 | - uses: DeterminateSystems/magic-nix-cache-action@v9 110 | - uses: actions-rs/cargo@v1 111 | with: 112 | command: build 113 | args: --release --bin ${{ matrix.target }} 114 | - name: targets 115 | id: targets 116 | run: | 117 | TARGET_TRIPLE=$(rustc --version --verbose | grep host | awk '{print $2}') 118 | echo ::set-output name=target_triple::$TARGET_TRIPLE 119 | TARGET_VERSION=$(./target/release/${{ matrix.target }} --version | awk '{print $2}') 120 | echo ::set-output name=target_version::$TARGET_VERSION 121 | TARGET_BINARY=./target/release/${{ matrix.target }}-${TARGET_TRIPLE} 122 | mv ./target/release/${{ matrix.target }} ${TARGET_BINARY} 123 | echo ::set-output name=target_binary::$TARGET_BINARY 124 | - name: changelog 125 | id: changelog 126 | run: | 127 | nix develop --command git-cliff -c ${{ matrix.target }}/cliff.toml --unreleased --tag ${{ matrix.target }}-v${{steps.targets.outputs.target_version}} --output CHANGELOG.md 128 | - name: latest 129 | continue-on-error: true 130 | run: | 131 | git push --delete origin ${{ matrix.target }}-latest 132 | - uses: ncipollo/release-action@v1 133 | with: 134 | artifacts: ${{steps.targets.outputs.target_binary}} 135 | bodyFile: CHANGELOG.md 136 | token: ${{ secrets.GITHUB_TOKEN }} 137 | allowUpdates: true 138 | tag: ${{ matrix.target }}-v${{steps.targets.outputs.target_version}} 139 | name: ${{ matrix.target }} ${{steps.targets.outputs.target_version}} 140 | - uses: ncipollo/release-action@v1 141 | with: 142 | artifacts: ${{steps.targets.outputs.target_binary}} 143 | bodyFile: CHANGELOG.md 144 | token: ${{ secrets.GITHUB_TOKEN }} 145 | allowUpdates: true 146 | tag: ${{ matrix.target }}-latest 147 | name: ${{ matrix.target }} latest 148 | replacesArtifacts: true 149 | makeLatest: true 150 | -------------------------------------------------------------------------------- /.github/workflows/sarif.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_run: 3 | workflows: ["main"] 4 | branches: [main] 5 | types: [completed] 6 | 7 | name: sarif 8 | permissions: write-all 9 | 10 | jobs: 11 | upload-sarif: 12 | runs-on: ubuntu-latest 13 | if: ${{ github.ref == 'refs/heads/main' }} 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: actions-rs/toolchain@v1 17 | with: 18 | profile: minimal 19 | components: clippy,rustfmt 20 | - uses: Swatinem/rust-cache@v2 21 | - run: cargo install clippy-sarif sarif-fmt 22 | - run: cargo clippy --all-targets --all-features --message-format=json | 23 | clippy-sarif | tee results.sarif | sarif-fmt 24 | - name: Upload SARIF file 25 | uses: github/codeql-action/upload-sarif@v3 26 | with: 27 | sarif_file: results.sarif 28 | -------------------------------------------------------------------------------- /.github/workflows/scorecard-analysis.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | # Only the default branch is supported. 4 | branches: 5 | - main 6 | schedule: 7 | # Weekly on Saturdays. 8 | - cron: '30 1 * * 6' 9 | 10 | name: scorecard-analysis 11 | permissions: read-all 12 | 13 | jobs: 14 | analysis: 15 | name: Scorecard analysis 16 | runs-on: ubuntu-latest 17 | permissions: 18 | security-events: write 19 | id-token: write 20 | 21 | steps: 22 | - name: "Checkout code" 23 | uses: actions/checkout@v4 24 | with: 25 | persist-credentials: false 26 | 27 | - name: "Run analysis" 28 | uses: ossf/scorecard-action@v2.4.1 29 | with: 30 | results_file: results.sarif 31 | results_format: sarif 32 | publish_results: true 33 | 34 | - name: "Upload artifact" 35 | uses: actions/upload-artifact@v4 36 | with: 37 | name: SARIF file 38 | path: results.sarif 39 | 40 | - name: "Upload to code-scanning" 41 | uses: github/codeql-action/upload-sarif@v3 42 | with: 43 | sarif_file: results.sarif -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .cache/ 3 | 4 | *.profraw 5 | public 6 | result 7 | .direnv 8 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "docs/themes/adidoks"] 2 | path = docs/themes/adidoks 3 | url = https://github.com/aaranxu/adidoks.git 4 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | /nix/store/860nhag5b16l86ylywx7n0jc56qqyib5-pre-commit-config.json -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | max_width = 80 2 | tab_spaces = 2 3 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "nixEnvSelector.nixFile": "${workspaceFolder}/flake.nix" 3 | } -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Anybody is welcome to propose or create a change. It is appreciated for larger changes that a 4 | notification and some discussion is made before moving forwards with such changes. Small changes 5 | and improvements are always welcome via pull request. 6 | 7 | It is expected that all contributions follow the relevant processes and conduct specified below. 8 | 9 | ## Pull Request Process 10 | 11 | 1. Please make sure to update relevant documentation, when applicable to your pull request 12 | 2. Pull requests commits should follow [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) guidelines. Merges are not allowed to keep a linear history (rebase only), therefore its important to keep the history clean 13 | 3. Please mention a codeowner to get eyes on your pull request 14 | 4. Once approved, you or a codeowner may merge the change 15 | 16 | ## Code of Conduct 17 | 18 | Be empathetic - everyone is coming from their own background, priorities, challenges and mindset. 19 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "sarif-fmt", 4 | "hadolint-sarif", 5 | "shellcheck-sarif", 6 | "clippy-sarif", 7 | "clang-tidy-sarif", 8 | "serde-sarif", 9 | "miri-sarif", 10 | ] 11 | 12 | [profile.release] 13 | strip = true 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2021 Paul Sastrasinh 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | [![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/psastras/sarif-rs/badge)](https://scorecard.dev/viewer/?uri=github.com/psastras/sarif-rs) 3 | [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/9282/badge)](https://www.bestpractices.dev/projects/9282) 4 | 5 | # sarif-rs 6 | 7 | A group of Rust projects for interacting with the 8 | [SARIF](https://sarifweb.azurewebsites.net/) format. 9 | 10 | ## Example 11 | 12 | Parse `cargo clippy` output, convert to SARIF (`clippy-sarif`), then pretty 13 | print the SARIF to terminal (`sarif-fmt`). 14 | 15 | ```shell 16 | $ cargo clippy --message-format=json | clippy-sarif | sarif-fmt 17 | $ warning: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)` 18 | ┌─ sarif-fmt/src/bin.rs:423:13 19 | │ 20 | 423 │ ╭ the_rule 21 | 424 │ │ .full_description 22 | 425 │ │ .as_ref() 23 | 426 │ │ .and_then(|mfms| Some(mfms.text.clone())) 24 | │ ╰───────────────────────────────────────────────────────^ 25 | │ 26 | = `#[warn(clippy::bind_instead_of_map)]` on by default 27 | for further information visit https://rust-lang.github.io/rust-clippy/master#bind_instead_of_map 28 | ``` 29 | 30 | ## Install 31 | 32 | Each CLI may be installed via `cargo`, [cargo-binstall](https://github.com/cargo-bins/cargo-binstall) or directly downloaded from the 33 | corresponding Github release. 34 | 35 | ### Cargo 36 | 37 | ```shell 38 | cargo install # ex. cargo install sarif-fmt 39 | ``` 40 | 41 | ### Cargo-binstall 42 | 43 | ```shell 44 | cargo binstall # ex. cargo binstall sarif-fmt 45 | ``` 46 | 47 | ### Github Releases 48 | 49 | The latest version is 50 | [continuously published and tagged](https://github.com/psastras/sarif-rs/releases). 51 | 52 | Using `curl`, 53 | 54 | ```shell 55 | # make sure to adjust the target and version (you may also want to pin to a specific version) 56 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/shellcheck-sarif-v0.8.0/shellcheck-sarif-x86_64-unknown-linux-gnu -o shellcheck-sarif 57 | ``` 58 | 59 | ### Fedora Linux 60 | 61 | ```shell 62 | sudo dnf install # ex. cargo binstall sarif-fmt 63 | ``` 64 | 65 | ### Nix 66 | 67 | Through the `nix` cli, 68 | 69 | ```shell 70 | nix --accept-flake-config profile install github:psastras/sarif-rs 71 | ``` 72 | 73 | Or [from FlakeHub](https://flakehub.com/flake/psastras/sarif-rs). 74 | 75 | ## Documentation 76 | 77 | See each subproject for more detailed information: 78 | 79 | - `clang-tidy-sarif`: CLI tool to convert `clang-tidy` diagnostics into SARIF. 80 | See the [Rust documentation](https://docs.rs/clang_tidy_sarif/). 81 | - `clippy-sarif`: CLI tool to convert `clippy` diagnostics into SARIF. See the 82 | [Rust documentation](https://docs.rs/clippy_sarif/). 83 | - `hadolint-sarif`: CLI tool to convert `hadolint` diagnostics into SARIF. See 84 | the [Rust documentation](https://docs.rs/hadolint_sarif/). 85 | - `miri-sarif`: CLI tool to convert `miri` diagnostics into SARIF. See the 86 | [Rust documentation](https://docs.rs/miri_sarif/). 87 | - `shellcheck-sarif`: CLI tool to convert `shellcheck` diagnostics into SARIF. 88 | See the [Rust documentation](https://docs.rs/shellcheck_sarif/). 89 | - `sarif-fmt`: CLI tool to pretty print SARIF diagnostics. See the 90 | [Rust documentation](https://docs.rs/sarif_fmt/). 91 | - `serde-sarif`: Typesafe SARIF structures for serializing and deserializing 92 | SARIF information using [serde](https://serde.rs/). See the 93 | [Rust documentation](https://docs.rs/serde_sarif/). 94 | 95 | ## Development 96 | 97 | Before you begin, ensure the following programs are available on your machine: 98 | 99 | - [`nix`](https://nixos.org/download.html#nix-quick-install) 100 | 101 | ### Using Cargo 102 | 103 | Enter the development shell provisioned by `nix` and build / test the project: 104 | 105 | ```shell 106 | nix develop 107 | cargo build 108 | cargo test 109 | ``` 110 | 111 | For more information on specific configurations, refer to the 112 | [`cargo` documentation](https://doc.rust-lang.org/cargo). 113 | 114 | ### Using Nix 115 | 116 | Enter the development shell provisioned by `nix` and build / test the project: 117 | 118 | ```shell 119 | # build all crates 120 | nix build 121 | 122 | # optionally, you may build a single crate 123 | nix build ".#sarif-fmt" 124 | ``` 125 | 126 | ### Releasing 127 | 128 | To release a new version (publish to crates.io), prefix the head commit with `release:` and update the relevant rust crate versions. Once merged into main the pipeline should pick up the change and publish a new version. 129 | 130 | License: MIT 131 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | Please report (suspected) security vulnerabilities by opening an issue on the issue tracker. Do NOT put the details of the security vulnerability inside the issue. 6 | 7 | You will receive a response on the issue with further information on how to disclose the security vulnerability. 8 | -------------------------------------------------------------------------------- /clang-tidy-sarif/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "clang-tidy-sarif" 3 | version = "0.8.0" 4 | authors = ["Paul Sastrasinh "] 5 | edition = "2018" 6 | description = "Convert clang-tidy output to SARIF" 7 | license = "MIT" 8 | readme = "README.md" 9 | keywords = ["sarif", "clang-tidy", "clang", "lint", "cli"] 10 | categories = ["command-line-utilities"] 11 | homepage = "https://psastras.github.io/sarif-rs/" 12 | documentation = "https://docs.rs/clang_tidy_sarif" 13 | repository = "https://github.com/psastras/sarif-rs" 14 | 15 | [badges] 16 | github = { repository = "psastras/sarif-rs" } 17 | 18 | [[bin]] 19 | name = "clang-tidy-sarif" 20 | path = "src/bin.rs" 21 | 22 | [dependencies] 23 | anyhow = "1.0.98" 24 | serde-sarif = { path = "../serde-sarif", version = "0.8.0", features = [ 25 | "clang-tidy-converters", 26 | ] } 27 | clap = { version = "4.5.40", features = ["derive"] } 28 | duct = "1.0.0" 29 | 30 | [dev-dependencies] 31 | version-sync = "0.9" 32 | 33 | [package.metadata.binstall] 34 | pkg-url = "{ repo }/releases/download/{ name }-v{ version }/{ name }-{ target }" 35 | pkg-fmt = "bin" 36 | -------------------------------------------------------------------------------- /clang-tidy-sarif/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /clang-tidy-sarif/README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | 3 | # clang-tidy-sarif 4 | 5 | This crate provides a command line tool to convert `clang-tidy` diagnostic 6 | output into SARIF. 7 | 8 | The latest [documentation can be found here](https://docs.rs/clang_tidy_sarif). 9 | 10 | clang-tidy is a popular linter / static analysis tool for C++. More information 11 | can be found on the official page: 12 | [https://clang.llvm.org/extra/clang-tidy/](https://clang.llvm.org/extra/clang-tidy/) 13 | 14 | SARIF or the Static Analysis Results Interchange Format is an industry standard 15 | format for the output of static analysis tools. More information can be found on 16 | the official website: 17 | [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 18 | 19 | ## Installation 20 | 21 | `clang-tidy-sarif` may be installed via `cargo` 22 | 23 | ```shell 24 | cargo install clang-tidy-sarif 25 | ``` 26 | 27 | via [cargo-binstall](https://github.com/cargo-bins/cargo-binstall) 28 | 29 | ```shell 30 | cargo binstall clang-tidy-sarif 31 | ``` 32 | 33 | or downloaded directly from Github Releases 34 | 35 | ```shell 36 | # make sure to adjust the target and version (you may also want to pin to a specific version) 37 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/clang-tidy-sarif-v0.8.0/clang-tidy-sarif-x86_64-unknown-linux-gnu -o clang-tidy-sarif 38 | ``` 39 | 40 | ### Fedora Linux 41 | 42 | ```shell 43 | sudo dnf install # ex. cargo binstall clang-tidy-sarif 44 | ``` 45 | 46 | ### Nix 47 | 48 | Through the `nix` cli, 49 | 50 | ```shell 51 | nix --accept-flake-config profile install github:psastras/sarif-rs#clang-tidy-sarif 52 | ``` 53 | 54 | ## Usage 55 | 56 | For most cases, simply run `clang-tidy` and pipe the results into 57 | `clang-tidy-sarif`. 58 | 59 | ## Example 60 | 61 | ```shell 62 | clang-tidy -checks=cert-* -warnings-as-errors=* main.cpp -- | clang-tidy-sarif 63 | ``` 64 | 65 | If you are using Github Actions, SARIF is useful for integrating with Github 66 | Advanced Security (GHAS), which can show code alerts in the "Security" tab of 67 | your repository. 68 | 69 | After uploading `clang-tidy-sarif` output to Github, `clang-tidy` diagnostics 70 | are available in GHAS. 71 | 72 | ## Example 73 | 74 | ```yaml 75 | on: 76 | workflow_run: 77 | workflows: ["main"] 78 | branches: [main] 79 | types: [completed] 80 | 81 | name: sarif 82 | 83 | jobs: 84 | upload-sarif: 85 | runs-on: ubuntu-latest 86 | if: ${{ github.ref == 'refs/heads/main' }} 87 | steps: 88 | - uses: actions/checkout@v2 89 | - uses: actions-rs/toolchain@v1 90 | with: 91 | profile: minimal 92 | toolchain: stable 93 | override: true 94 | - uses: Swatinem/rust-cache@v1 95 | - run: cargo install clang-tidy-sarif sarif-fmt 96 | - run: clang-tidy -checks=cert-* -warnings-as-errors=* main.cpp -- | clang-tidy-sarif | tee 97 | results.sarif | sarif-fmt 98 | - name: Upload SARIF file 99 | uses: github/codeql-action/upload-sarif@v1 100 | with: 101 | sarif_file: results.sarif 102 | ``` 103 | 104 | License: MIT 105 | -------------------------------------------------------------------------------- /clang-tidy-sarif/cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | """ 13 | # template for the changelog body 14 | # https://keats.github.io/tera/docs/#introduction 15 | body = """ 16 | {% if version %}\ 17 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 18 | {% else %}\ 19 | ## [unreleased] 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | striptags | trim | upper_first }} 23 | {% for commit in commits %} 24 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 25 | {% if commit.breaking %}[**breaking**] {% endif %}\ 26 | {{ commit.message | upper_first }}\ 27 | {% endfor %} 28 | {% endfor %}\n 29 | """ 30 | # template for the changelog footer 31 | footer = """""" 32 | # remove the leading and trailing s 33 | trim = true 34 | # postprocessors 35 | postprocessors = [ 36 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 37 | ] 38 | 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | # Replace issue numbers 49 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 50 | # Check spelling of the commit with https://github.com/crate-ci/typos 51 | # If the spelling is incorrect, it will be automatically fixed. 52 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 53 | ] 54 | # regex for parsing and grouping commits 55 | commit_parsers = [ 56 | { message = "^feat", group = "🚀 Features" }, 57 | { message = "^fix", group = "🐛 Bug Fixes" }, 58 | { message = "^doc", group = "📚 Documentation" }, 59 | { message = "^perf", group = "⚡ Performance" }, 60 | { message = "^refactor", group = "🚜 Refactor" }, 61 | { message = "^style", group = "🎨 Styling" }, 62 | { message = "^test", group = "🧪 Testing" }, 63 | { message = "^chore\\(release\\): prepare for", skip = true }, 64 | { message = "^chore\\(deps.*\\)", skip = true }, 65 | { message = "^chore\\(pr\\)", skip = true }, 66 | { message = "^chore\\(pull\\)", skip = true }, 67 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 68 | { body = ".*security", group = "🛡️ Security" }, 69 | { message = "^revert", group = "◀️ Revert" }, 70 | ] 71 | # protect breaking changes from being skipped due to matching a skipping commit_parser 72 | protect_breaking_commits = false 73 | # filter out the commits that are not matched by commit parsers 74 | filter_commits = true 75 | # regex for matching git tags 76 | tag_pattern = "clang-tidy-sarif-v[0-9].*" 77 | # regex for skipping tags 78 | # skip_tags = "" 79 | # regex for ignoring tags 80 | # ignore_tags = "" 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | # limit the number of commits included in the changelog. 86 | # limit_commits = 42 87 | -------------------------------------------------------------------------------- /clang-tidy-sarif/src/bin.rs: -------------------------------------------------------------------------------- 1 | #![doc(html_root_url = "https://docs.rs/clang-tidy-sarif/0.8.0")] 2 | 3 | //! This crate provides a command line tool to convert `clang-tidy` diagnostic 4 | //! output into SARIF. 5 | //! 6 | //! The latest [documentation can be found here](https://docs.rs/clang_tidy_sarif). 7 | //! 8 | //! clang-tidy is a popular linter / static analysis tool for C++. More information 9 | //! can be found on the official page: [https://clang.llvm.org/extra/clang-tidy/](https://clang.llvm.org/extra/clang-tidy/) 10 | //! 11 | //! SARIF or the Static Analysis Results Interchange Format is an industry 12 | //! standard format for the output of static analysis tools. More information 13 | //! can be found on the official website: [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 14 | //! 15 | //! ## Installation 16 | //! 17 | //! `clang-tidy-sarif` may be installed via `cargo` 18 | //! 19 | //! ```shell 20 | //! cargo install clang-tidy-sarif 21 | //! ``` 22 | //! 23 | //! or downloaded directly from Github Releases 24 | //! 25 | //!```shell 26 | //! # make sure to adjust the target and version (you may also want to pin to a specific version) 27 | //! curl -sSL https://github.com/psastras/sarif-rs/releases/download/clang-tidy-sarif-latest/clang-tidy-sarif-x86_64-unknown-linux-gnu -o clang-tidy-sarif 28 | //! ``` 29 | //! 30 | //! ## Usage 31 | //! 32 | //! For most cases, simply run `clang-tidy` and pipe the 33 | //! results into `clang-tidy-sarif`. 34 | //! 35 | //! ## Example 36 | //! 37 | //!```shell 38 | //! clang-tidy -checks=cert-* -warnings-as-errors=* main.cpp -- | clang-tidy-sarif 39 | //! ``` 40 | //! 41 | //! If you are using Github Actions, SARIF is useful for integrating with 42 | //! Github Advanced Security (GHAS), which can show code alerts in the 43 | //! "Security" tab of your repository. 44 | //! 45 | //! After uploading `clang-tidy-sarif` output to Github, `clang-tidy` diagnostics 46 | //! are available in GHAS. 47 | //! 48 | //! ## Example 49 | //! 50 | //! ```yaml 51 | //! on: 52 | //! workflow_run: 53 | //! workflows: ["main"] 54 | //! branches: [main] 55 | //! types: [completed] 56 | //! 57 | //! name: sarif 58 | //! 59 | //! jobs: 60 | //! upload-sarif: 61 | //! runs-on: ubuntu-latest 62 | //! if: ${{ github.ref == 'refs/heads/main' }} 63 | //! steps: 64 | //! - uses: actions/checkout@v2 65 | //! - uses: actions-rs/toolchain@v1 66 | //! with: 67 | //! profile: minimal 68 | //! toolchain: stable 69 | //! override: true 70 | //! - uses: Swatinem/rust-cache@v1 71 | //! - run: cargo install clang-tidy-sarif sarif-fmt 72 | //! - run: 73 | //! clang-tidy -checks=cert-* -warnings-as-errors=* main.cpp -- | tee 74 | //! results.sarif | sarif-fmt 75 | //! - name: Upload SARIF file 76 | //! uses: github/codeql-action/upload-sarif@v1 77 | //! with: 78 | //! sarif_file: results.sarif 79 | //! ``` 80 | //! 81 | 82 | use anyhow::Result; 83 | use clap::Parser; 84 | use std::fs::File; 85 | use std::io::{BufReader, BufWriter, Read, Write}; 86 | 87 | #[derive(Parser, Debug)] 88 | #[command( 89 | version, 90 | about = "Convert clang-tidy output into SARIF", 91 | after_help = "The expected input is generated by running 'clang-tidy'.", 92 | long_about = None, 93 | )] 94 | struct Args { 95 | /// input file; reads from stdin if none is given 96 | #[arg(short, long)] 97 | input: Option, 98 | /// output file; writes to stdout if none is given 99 | #[arg(short, long)] 100 | output: Option, 101 | } 102 | 103 | fn main() -> Result<()> { 104 | let args = Args::parse(); 105 | 106 | let read = match args.input { 107 | Some(path) => Box::new(File::open(path)?) as Box, 108 | None => Box::new(std::io::stdin()) as Box, 109 | }; 110 | let reader = BufReader::new(read); 111 | 112 | let write = match args.output { 113 | Some(path) => Box::new(File::create(path)?) as Box, 114 | None => Box::new(std::io::stdout()) as Box, 115 | }; 116 | let writer = BufWriter::new(write); 117 | 118 | serde_sarif::converters::clang_tidy::parse_to_writer(reader, writer) 119 | } 120 | -------------------------------------------------------------------------------- /clang-tidy-sarif/tests/version-numbers.rs: -------------------------------------------------------------------------------- 1 | #[test] 2 | fn test_readme_deps() { 3 | version_sync::assert_markdown_deps_updated!("README.md"); 4 | } 5 | 6 | #[test] 7 | fn test_html_root_url() { 8 | version_sync::assert_html_root_url_updated!("src/bin.rs"); 9 | } 10 | -------------------------------------------------------------------------------- /clippy-sarif/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "clippy-sarif" 3 | version = "0.8.0" 4 | authors = ["Paul Sastrasinh "] 5 | edition = "2018" 6 | description = "Convert clippy output to SARIF" 7 | license = "MIT" 8 | readme = "README.md" 9 | keywords = ["sarif", "clippy", "cli"] 10 | categories = ["command-line-utilities"] 11 | homepage = "https://psastras.github.io/sarif-rs/" 12 | documentation = "https://docs.rs/clippy_sarif" 13 | repository = "https://github.com/psastras/sarif-rs" 14 | 15 | [badges] 16 | github = { repository = "psastras/sarif-rs" } 17 | 18 | [[bin]] 19 | name = "clippy-sarif" 20 | path = "src/bin.rs" 21 | 22 | [dependencies] 23 | anyhow = "1.0.98" 24 | serde-sarif = { path = "../serde-sarif", version = "0.8.0", features = [ 25 | "clippy-converters", 26 | ] } 27 | clap = { version = "4.5.40", features = ["derive"] } 28 | 29 | [dev-dependencies] 30 | version-sync = "0.9" 31 | 32 | [package.metadata.binstall] 33 | pkg-url = "{ repo }/releases/download/{ name }-v{ version }/{ name }-{ target }" 34 | pkg-fmt = "bin" 35 | -------------------------------------------------------------------------------- /clippy-sarif/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /clippy-sarif/README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | 3 | # clippy-sarif 4 | 5 | This crate provides a command line tool to convert `cargo clippy` diagnostic 6 | output into SARIF. 7 | 8 | The latest [documentation can be found here](https://docs.rs/clippy_sarif). 9 | 10 | clippy is a popular linter / static analysis tool for rust. More information can 11 | be found on the official repository: 12 | [https://github.com/rust-lang/rust-clippy](https://github.com/rust-lang/rust-clippy) 13 | 14 | SARIF or the Static Analysis Results Interchange Format is an industry standard 15 | format for the output of static analysis tools. More information can be found on 16 | the official website: 17 | [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 18 | 19 | ## Installation 20 | 21 | `clippy-sarif` may be installed via `cargo` 22 | 23 | ```shell 24 | cargo install clippy-sarif 25 | ``` 26 | 27 | via [cargo-binstall](https://github.com/cargo-bins/cargo-binstall) 28 | 29 | ```shell 30 | cargo binstall clippy-sarif 31 | ``` 32 | 33 | or downloaded directly from Github Releases 34 | 35 | ```shell 36 | # make sure to adjust the target and version (you may also want to pin to a specific version) 37 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/clippy-sarif-v0.8.0/clippy-sarif-x86_64-unknown-linux-gnu -o clippy-sarif 38 | ``` 39 | 40 | ### Fedora Linux 41 | 42 | ```shell 43 | sudo dnf install # ex. cargo binstall clippy-sarif 44 | ``` 45 | 46 | ### Nix 47 | 48 | Through the `nix` cli, 49 | 50 | ```shell 51 | nix --accept-flake-config profile install github:psastras/sarif-rs#clippy-sarif 52 | ``` 53 | 54 | ## Usage 55 | 56 | For most cases, simply run `cargo clippy` with `json` output and pipe the 57 | results into `clippy-sarif`. 58 | 59 | ## Example 60 | 61 | ```shell 62 | cargo clippy --message-format=json | clippy-sarif 63 | ``` 64 | 65 | If you are using Github Actions, SARIF is useful for integrating with Github 66 | Advanced Security (GHAS), which can show code alerts in the "Security" tab of 67 | your repository. 68 | 69 | After uploading `clippy-sarif` output to Github, `clippy` diagnostics are 70 | available in GHAS. 71 | 72 | ## Example 73 | 74 | ```yaml 75 | on: 76 | workflow_run: 77 | workflows: ["main"] 78 | branches: [main] 79 | types: [completed] 80 | 81 | name: sarif 82 | 83 | jobs: 84 | upload-sarif: 85 | runs-on: ubuntu-latest 86 | if: ${{ github.ref == 'refs/heads/main' }} 87 | steps: 88 | - uses: actions/checkout@v2 89 | - uses: actions-rs/toolchain@v1 90 | with: 91 | profile: minimal 92 | toolchain: stable 93 | components: clippy,rustfmt 94 | override: true 95 | - uses: Swatinem/rust-cache@v1 96 | - run: cargo install clippy-sarif sarif-fmt 97 | - run: cargo clippy --all-targets --all-features --message-format=json | 98 | clippy-sarif | tee results.sarif | sarif-fmt 99 | - name: Upload SARIF file 100 | uses: github/codeql-action/upload-sarif@v1 101 | with: 102 | sarif_file: results.sarif 103 | ``` 104 | 105 | In some cases, the path to the file contained in the SARIF report [may be different than what is expected](https://github.com/psastras/sarif-rs/issues/370). This can happen for example if running `clippy-sarif` from a different folder than the crate folder. In this case consider using a tool like `jq` to amend to path: 106 | 107 | ## Example 108 | 109 | ```bash 110 | cat results.sarif \ 111 | | jq --arg pwd "some_folder/my_crate" '.runs[].results[].locations[].physicalLocation.artifactLocation.uri |= $pwd + "/" + .' \ 112 | > results.sarif.tmp 113 | ``` 114 | 115 | Note that this maybe be fixed in a future release. 116 | 117 | License: MIT 118 | -------------------------------------------------------------------------------- /clippy-sarif/cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | """ 13 | # template for the changelog body 14 | # https://keats.github.io/tera/docs/#introduction 15 | body = """ 16 | {% if version %}\ 17 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 18 | {% else %}\ 19 | ## [unreleased] 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | striptags | trim | upper_first }} 23 | {% for commit in commits %} 24 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 25 | {% if commit.breaking %}[**breaking**] {% endif %}\ 26 | {{ commit.message | upper_first }}\ 27 | {% endfor %} 28 | {% endfor %}\n 29 | """ 30 | # template for the changelog footer 31 | footer = """""" 32 | # remove the leading and trailing s 33 | trim = true 34 | # postprocessors 35 | postprocessors = [ 36 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 37 | ] 38 | 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | # Replace issue numbers 49 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 50 | # Check spelling of the commit with https://github.com/crate-ci/typos 51 | # If the spelling is incorrect, it will be automatically fixed. 52 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 53 | ] 54 | # regex for parsing and grouping commits 55 | commit_parsers = [ 56 | { message = "^feat", group = "🚀 Features" }, 57 | { message = "^fix", group = "🐛 Bug Fixes" }, 58 | { message = "^doc", group = "📚 Documentation" }, 59 | { message = "^perf", group = "⚡ Performance" }, 60 | { message = "^refactor", group = "🚜 Refactor" }, 61 | { message = "^style", group = "🎨 Styling" }, 62 | { message = "^test", group = "🧪 Testing" }, 63 | { message = "^chore\\(release\\): prepare for", skip = true }, 64 | { message = "^chore\\(deps.*\\)", skip = true }, 65 | { message = "^chore\\(pr\\)", skip = true }, 66 | { message = "^chore\\(pull\\)", skip = true }, 67 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 68 | { body = ".*security", group = "🛡️ Security" }, 69 | { message = "^revert", group = "◀️ Revert" }, 70 | ] 71 | # protect breaking changes from being skipped due to matching a skipping commit_parser 72 | protect_breaking_commits = false 73 | # filter out the commits that are not matched by commit parsers 74 | filter_commits = true 75 | # regex for matching git tags 76 | tag_pattern = "clippy-sarif-v[0-9].*" 77 | # regex for skipping tags 78 | # skip_tags = "" 79 | # regex for ignoring tags 80 | # ignore_tags = "" 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | # limit the number of commits included in the changelog. 86 | # limit_commits = 42 87 | -------------------------------------------------------------------------------- /clippy-sarif/src/bin.rs: -------------------------------------------------------------------------------- 1 | #![doc(html_root_url = "https://docs.rs/clippy-sarif/0.8.0")] 2 | 3 | //! This crate provides a command line tool to convert `cargo clippy` diagnostic 4 | //! output into SARIF. 5 | //! 6 | //! The latest [documentation can be found here](https://docs.rs/clippy_sarif). 7 | //! 8 | //! clippy is a popular linter / static analysis tool for rust. More information 9 | //! can be found on the official repository: [https://github.com/rust-lang/rust-clippy](https://github.com/rust-lang/rust-clippy) 10 | //! 11 | //! SARIF or the Static Analysis Results Interchange Format is an industry 12 | //! standard format for the output of static analysis tools. More information 13 | //! can be found on the official website: [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 14 | //! 15 | //! ## Installation 16 | //! 17 | //! `clippy-sarif` may be installed via `cargo` 18 | //! 19 | //! ```shell 20 | //! cargo install clippy-sarif 21 | //! ``` 22 | //! 23 | //! or downloaded directly from Github Releases 24 | //! 25 | //!```shell 26 | //! # make sure to adjust the target and version (you may also want to pin to a specific version) 27 | //! curl -sSL https://github.com/psastras/sarif-rs/releases/download/clippy-sarif-latest/clippy-sarif-x86_64-unknown-linux-gnu -o clippy-sarif 28 | //! ``` 29 | //! 30 | //! ## Usage 31 | //! 32 | //! For most cases, simply run `cargo clippy` with `json` output and pipe the 33 | //! results into `clippy-sarif`. 34 | //! 35 | //! ## Example 36 | //! 37 | //!```shell 38 | //! cargo clippy --message-format=json | clippy-sarif 39 | //! ``` 40 | //! 41 | //! If you are using Github Actions, SARIF is useful for integrating with 42 | //! Github Advanced Security (GHAS), which can show code alerts in the 43 | //! "Security" tab of your repository. 44 | //! 45 | //! After uploading `clippy-sarif` output to Github, `clippy` diagnostics 46 | //! are available in GHAS. 47 | //! 48 | //! ## Example 49 | //! 50 | //! ```yaml 51 | //! on: 52 | //! workflow_run: 53 | //! workflows: ["main"] 54 | //! branches: [main] 55 | //! types: [completed] 56 | //! 57 | //! name: sarif 58 | //! 59 | //! jobs: 60 | //! upload-sarif: 61 | //! runs-on: ubuntu-latest 62 | //! if: ${{ github.ref == 'refs/heads/main' }} 63 | //! steps: 64 | //! - uses: actions/checkout@v2 65 | //! - uses: actions-rs/toolchain@v1 66 | //! with: 67 | //! profile: minimal 68 | //! toolchain: stable 69 | //! components: clippy,rustfmt 70 | //! override: true 71 | //! - uses: Swatinem/rust-cache@v1 72 | //! - run: cargo install clippy-sarif sarif-fmt 73 | //! - run: 74 | //! cargo clippy --all-targets --all-features --message-format=json | 75 | //! clippy-sarif | tee results.sarif | sarif-fmt 76 | //! - name: Upload SARIF file 77 | //! uses: github/codeql-action/upload-sarif@v1 78 | //! with: 79 | //! sarif_file: results.sarif 80 | //! ``` 81 | //! 82 | 83 | use anyhow::Result; 84 | use clap::Parser; 85 | use std::fs::File; 86 | use std::io::{BufReader, BufWriter, Read, Write}; 87 | 88 | #[derive(Parser, Debug)] 89 | #[command( 90 | version, 91 | about = "Convert clippy output into SARIF", 92 | after_help = "The expected input is generated by running 'cargo clippy --message-format=json'.", 93 | long_about = None, 94 | )] 95 | struct Args { 96 | /// input file; reads from stdin if none is given 97 | #[arg(short, long)] 98 | input: Option, 99 | /// output file; writes to stdout if none is given 100 | #[arg(short, long)] 101 | output: Option, 102 | } 103 | 104 | fn main() -> Result<()> { 105 | let args = Args::parse(); 106 | 107 | let read = match args.input { 108 | Some(path) => Box::new(File::open(path)?) as Box, 109 | None => Box::new(std::io::stdin()) as Box, 110 | }; 111 | let reader = BufReader::new(read); 112 | 113 | let write = match args.output { 114 | Some(path) => Box::new(File::create(path)?) as Box, 115 | None => Box::new(std::io::stdout()) as Box, 116 | }; 117 | let writer = BufWriter::new(write); 118 | 119 | serde_sarif::converters::clippy::parse_to_writer(reader, writer) 120 | } 121 | -------------------------------------------------------------------------------- /clippy-sarif/tests/version-numbers.rs: -------------------------------------------------------------------------------- 1 | #[test] 2 | fn test_readme_deps() { 3 | version_sync::assert_markdown_deps_updated!("README.md"); 4 | } 5 | 6 | #[test] 7 | fn test_html_root_url() { 8 | version_sync::assert_html_root_url_updated!("src/bin.rs"); 9 | } 10 | -------------------------------------------------------------------------------- /docs/config.toml: -------------------------------------------------------------------------------- 1 | # The URL the site will be built for 2 | base_url = "https://psastras.github.io/sarif-rs" 3 | title = "sarif-rs" 4 | description = "A group of Rust projects for interacting with the SARIF format" 5 | 6 | # The site theme to use. 7 | theme = "adidoks" 8 | 9 | # The default language; used in feeds and search index 10 | # Note: the search index doesn't support Chinese/Japanese/Korean Languages 11 | default_language = "en" 12 | 13 | # Whether to automatically compile all Sass files in the sass directory 14 | compile_sass = true 15 | 16 | # Whether to generate a feed file for the site 17 | generate_feed = false 18 | 19 | # When set to "true", the generated HTML files are minified. 20 | minify_html = false 21 | 22 | # The taxonomies to be rendered for the site and their configuration. 23 | taxonomies = [] 24 | 25 | # Whether to build a search index to be used later on by a JavaScript library 26 | # When set to "true", a search index is built from the pages and section 27 | # content for `default_language`. 28 | build_search_index = true 29 | 30 | [search] 31 | # Whether to include the title of the page/section in the index 32 | include_title = true 33 | # Whether to include the description of the page/section in the index 34 | include_description = false 35 | # Whether to include the rendered content of the page/section in the index 36 | include_content = true 37 | 38 | [markdown] 39 | # Whether to do syntax highlighting. 40 | # Theme can be customised by setting the `highlight_theme` 41 | # variable to a theme supported by Zola 42 | highlight_code = true 43 | highlight_theme = "ayu-light" 44 | 45 | [extra] 46 | # Put all your custom variables here 47 | author = "Paul Sastrasinh" 48 | github = "https://github.com/psastras" 49 | twitter = "" 50 | email = "psastras@gmail.com" 51 | 52 | # If running on netlify.app site, set to true 53 | is_netlify = false 54 | 55 | # Set HTML file language 56 | language_code = "en-US" 57 | 58 | # Set theme-color meta tag for Chrome browser 59 | theme_color = "#fff" 60 | 61 | # More about site's title 62 | title_separator = "|" # set as |, -, _, etc 63 | title_addition = "SARIF CLI tools" 64 | 65 | 66 | # Set date format in blog publish metadata 67 | timeformat = "%B %e, %Y" # e.g. June 14, 2021 68 | timezone = "America/New_York" 69 | 70 | # Edit page on reposity or not 71 | edit_page = true 72 | docs_repo = "https://github.com/psastras/sarif-rs" 73 | repo_branch = "main" 74 | 75 | ## Math settings 76 | # options: true, false. Enable math support globally, 77 | # default: false. You can always enable math on a per page. 78 | math = false 79 | library = "katex" # options: "katex", "mathjax". default is "katex". 80 | 81 | ## Open Graph + Twitter Cards 82 | [extra.open] 83 | enable = true 84 | # this image will be used as fallback if a page has no image of its own 85 | image = "doks.png" 86 | og_locale = "en_US" 87 | twitter_site = "" 88 | twitter_creator = "" 89 | facebook_publisher = "" 90 | facebook_author = "" 91 | 92 | ## JSON-LD 93 | [extra.schema] 94 | type = "Organization" 95 | logo = "logo-doks.png" 96 | linked_in = "" 97 | github = "https://github.com/psastras" 98 | # section = "blog" # see config.extra.main~url 99 | ## Sitelinks Search Box 100 | site_links_search_box = true 101 | 102 | 103 | # Menu items 104 | [[extra.menu.main]] 105 | name = "Docs" 106 | section = "docs" 107 | url = "/docs/getting-started/introduction/" 108 | weight = 10 109 | 110 | [[extra.menu.social]] 111 | name = "GitHub" 112 | pre = '' 113 | url = "https://github.com/psastras/sarif-rs" 114 | post = "v0.1.0" 115 | weight = 20 116 | 117 | # Footer contents 118 | [extra.footer] 119 | info = '2021' 120 | 121 | -------------------------------------------------------------------------------- /docs/content/_index.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "sarif-rs" 3 | 4 | 5 | # The homepage contents 6 | [extra] 7 | lead = 'sarif-rs is a group of Rust libraries and CLI tools for interacting with SARIF files.' 8 | url = "/docs/getting-started/introduction/" 9 | url_button = "Get started" 10 | repo_version = "GitHub" 11 | repo_license = "Open-source MIT License." 12 | repo_url = "https://github.com/psastras/sarif-rs" 13 | 14 | [[extra.list]] 15 | title = "SARIF" 16 | content = 'The Static Analysis Results Interchange Format (SARIF) is an open specification for reporting software defects and enables communication between different backends and frontends.' 17 | 18 | [[extra.list]] 19 | title = "Interoperability" 20 | content = 'sarif-rs provides converters from various tools (clippy, hadolint, shellcheck) and the SARIF format. This enables tool compatibility with other SARIF based tools (ex. Github Advanced Security).' 21 | 22 | [[extra.list]] 23 | title = "POSIX Tools" 24 | content = 'All CLI tools are POSIX compliant, making them easy to use and chain with other CLI tools.' 25 | 26 | 27 | +++ 28 | -------------------------------------------------------------------------------- /docs/content/docs/_index.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Docs" 3 | description = "The documents of the AdiDoks theme." 4 | date = 2025-05-01T08:00:00+00:00 5 | updated = 2021-05-01T08:00:00+00:00 6 | sort_by = "weight" 7 | weight = 1 8 | template = "docs/section.html" 9 | +++ 10 | -------------------------------------------------------------------------------- /docs/content/docs/getting-started/_index.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Getting Started" 3 | description = "Quick start and guides for installing and using sarif-rs" 4 | date = 2025-05-01T08:00:00+00:00 5 | updated = 2021-05-01T08:00:00+00:00 6 | template = "docs/section.html" 7 | sort_by = "weight" 8 | weight = 1 9 | draft = false 10 | +++ 11 | -------------------------------------------------------------------------------- /docs/content/docs/getting-started/introduction.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Introduction" 3 | description = "sarif-rs is group of Rust projects (CLI and libraries) for interacting with the SARIF format." 4 | date = 2021-05-01T08:00:00+00:00 5 | updated = 2021-05-01T08:00:00+00:00 6 | draft = false 7 | weight = 10 8 | sort_by = "weight" 9 | template = "docs/page.html" 10 | 11 | [extra] 12 | lead = 'sarif-rs is group of Rust projects (CLI and libraries) for interacting with the SARIF format.' 13 | toc = true 14 | top = false 15 | +++ 16 | 17 | ## Examples 18 | 19 | Parse `cargo clippy` output, convert to SARIF (`clippy-sarif`), then pretty 20 | print the SARIF to terminal (`sarif-fmt`). 21 | 22 | ```shell 23 | $ cargo clippy --message-format=json | clippy-sarif | sarif-fmt 24 | $ warning: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)` 25 | ┌─ sarif-fmt/src/bin.rs:423:13 26 | │ 27 | 423 │ ╭ the_rule 28 | 424 │ │ .full_description 29 | 425 │ │ .as_ref() 30 | 426 │ │ .and_then(|mfms| Some(mfms.text.clone())) 31 | │ ╰───────────────────────────────────────────────────────^ 32 | │ 33 | = `#[warn(clippy::bind_instead_of_map)]` on by default 34 | for further information visit https://rust-lang.github.io/rust-clippy/master#bind_instead_of_map 35 | ``` 36 | 37 | Parse `clang-tidy` output, convert to SARIF (`clang-tidy-sarif`), then print the 38 | SARIF results to terminal in plain text format (`sarif-fmt -f plain`) 39 | 40 | ```shell 41 | $ clang-tidy -checks=cert-* cpp.cpp -- | clang-tidy-sarif | clang-tidy.sarif | sarif-fmt -f plain 42 | 2 warnings generated. 43 | warning [cpp.cpp:4:10] 'atoi' used to convert a string to an integer value, but function will not report conversion errors; consider using 'strtol' instead [cert-err34-c] 44 | warning [cpp.cpp:8:3] calling 'system' uses a command processor [cert-env33-c] 45 | ``` 46 | 47 | ![alt text](/sarif-rs/images/ghas.png "Example of SARIF GHAS integration") 48 | 49 | ## Install 50 | 51 | Each CLI may be installed via `cargo` or directly downloaded from the 52 | corresponding Github release. 53 | 54 | ### Cargo 55 | 56 | ```shell 57 | cargo install # ex. cargo install sarif-fmt 58 | ``` 59 | 60 | ### Github Releases 61 | 62 | The latest version is 63 | [continuously published and tagged](https://github.com/psastras/sarif-rs/releases). 64 | 65 | Using `curl`, 66 | 67 | ```shell 68 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/latest-x86_64-unknown-linux-gnu/sarif-fmt # make sure to adjust the target triplet (latest-) to the correct target 69 | ``` 70 | 71 | ## Provided Tools 72 | 73 | Below is a list of libraries and tools which are part of the `sarif-rs` project: 74 | 75 | - `clang-tidy-sarif`: CLI tool to convert `clang-tidy` diagnostics into SARIF. 76 | - `clippy-sarif`: CLI tool to convert `clippy` diagnostics into SARIF. 77 | - `hadolint-sarif`: CLI tool to convert `hadolint` diagnostics into SARIF. 78 | - `miri-sarif`: CLI tool to convert `cargo miri` diagnostics into SARIF. 79 | - `shellcheck-sarif`: CLI tool to convert `shellcheck` diagnostics into SARIF. 80 | - `sarif-fmt`: CLI tool to pretty print SARIF diagnostics. 81 | - `serde-sarif`: Typesafe SARIF structures for serializing and deserializing 82 | SARIF information using [serde](https://serde.rs/). 83 | -------------------------------------------------------------------------------- /docs/content/docs/help/_index.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Help" 3 | description = "Get help on AdiDoks." 4 | date = 2025-05-01T19:00:00+00:00 5 | updated = 2021-05-01T19:00:00+00:00 6 | template = "docs/section.html" 7 | sort_by = "weight" 8 | weight = 5 9 | draft = false 10 | +++ 11 | -------------------------------------------------------------------------------- /docs/content/docs/help/rust_docs.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Rust Docs" 3 | description = "Links to Rust documentation." 4 | date = 2021-05-01T19:30:00+00:00 5 | updated = 2021-05-01T19:30:00+00:00 6 | draft = false 7 | weight = 30 8 | sort_by = "weight" 9 | template = "docs/page.html" 10 | 11 | [extra] 12 | lead = "Links to Rust documentation." 13 | toc = true 14 | top = false 15 | +++ 16 | 17 | Rust documentation for each tool and library is available on docs.rs. 18 | 19 | | Tool / Library | docs.rs Link | 20 | | ------------------ | ---------------------------------------------------------------------- | 21 | | `clang-tidy-sarif` | [https://docs.rs/clang_tidy_sarif/](https://docs.rs/clang_tidy_sarif/) | 22 | | `clippy-sarif` | [https://docs.rs/clippy_sarif/](https://docs.rs/clippy_sarif/) | 23 | | `hadlint-sarif` | [https://docs.rs/hadolint_sarif/](https://docs.rs/hadolint_sarif/) | 24 | | `miri-sarif` | [https://docs.rs/miri_sarif/](https://docs.rs/miri_sarif/) | 25 | | `shellcheck-sarif` | [https://docs.rs/shellcheck_sarif/](https://docs.rs/shellcheck_sarif/) | 26 | | `sarif-fmt` | [https://docs.rs/sarif_fmt/](https://docs.rs/sarif_fmt/) | 27 | | `serde-sarif` | [https://docs.rs/serde_sarif/](https://docs.rs/serde_sarif/) | 28 | -------------------------------------------------------------------------------- /docs/static/images/carbon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psastras/sarif-rs/da2b0fe12cb394f11c32a5b800290b23dd9fb724/docs/static/images/carbon.png -------------------------------------------------------------------------------- /docs/static/images/ghas.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/psastras/sarif-rs/da2b0fe12cb394f11c32a5b800290b23dd9fb724/docs/static/images/ghas.png -------------------------------------------------------------------------------- /docs/static/pages/google801b3c72f2cdc749.html: -------------------------------------------------------------------------------- 1 | google-site-verification: google801b3c72f2cdc749.html -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "crane": { 4 | "inputs": { 5 | "nixpkgs": [ 6 | "rust-flake", 7 | "nixpkgs" 8 | ] 9 | }, 10 | "locked": { 11 | "lastModified": 1718474113, 12 | "narHash": "sha256-UKrfy/46YF2TRnxTtKCYzqf2f5ZPRRWwKCCJb7O5X8U=", 13 | "owner": "ipetkov", 14 | "repo": "crane", 15 | "rev": "0095fd8ea00ae0a9e6014f39c375e40c2fbd3386", 16 | "type": "github" 17 | }, 18 | "original": { 19 | "owner": "ipetkov", 20 | "repo": "crane", 21 | "type": "github" 22 | } 23 | }, 24 | "devour-flake": { 25 | "flake": false, 26 | "locked": { 27 | "lastModified": 1709858306, 28 | "narHash": "sha256-Vey9n9hIlWiSAZ6CCTpkrL6jt4r2JvT2ik9wa2bjeC0=", 29 | "owner": "srid", 30 | "repo": "devour-flake", 31 | "rev": "17b711b9deadbbc5629cb7d2b64cf86ae72af3fa", 32 | "type": "github" 33 | }, 34 | "original": { 35 | "owner": "srid", 36 | "repo": "devour-flake", 37 | "type": "github" 38 | } 39 | }, 40 | "flake-compat": { 41 | "flake": false, 42 | "locked": { 43 | "lastModified": 1696426674, 44 | "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", 45 | "owner": "edolstra", 46 | "repo": "flake-compat", 47 | "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", 48 | "type": "github" 49 | }, 50 | "original": { 51 | "owner": "edolstra", 52 | "repo": "flake-compat", 53 | "type": "github" 54 | } 55 | }, 56 | "flake-parts": { 57 | "inputs": { 58 | "nixpkgs-lib": "nixpkgs-lib" 59 | }, 60 | "locked": { 61 | "lastModified": 1719994518, 62 | "narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=", 63 | "owner": "hercules-ci", 64 | "repo": "flake-parts", 65 | "rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7", 66 | "type": "github" 67 | }, 68 | "original": { 69 | "owner": "hercules-ci", 70 | "repo": "flake-parts", 71 | "type": "github" 72 | } 73 | }, 74 | "flake-utils": { 75 | "inputs": { 76 | "systems": "systems" 77 | }, 78 | "locked": { 79 | "lastModified": 1705309234, 80 | "narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=", 81 | "owner": "numtide", 82 | "repo": "flake-utils", 83 | "rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26", 84 | "type": "github" 85 | }, 86 | "original": { 87 | "owner": "numtide", 88 | "repo": "flake-utils", 89 | "type": "github" 90 | } 91 | }, 92 | "gitignore": { 93 | "inputs": { 94 | "nixpkgs": [ 95 | "pre-commit-hooks", 96 | "nixpkgs" 97 | ] 98 | }, 99 | "locked": { 100 | "lastModified": 1709087332, 101 | "narHash": "sha256-HG2cCnktfHsKV0s4XW83gU3F57gaTljL9KNSuG6bnQs=", 102 | "owner": "hercules-ci", 103 | "repo": "gitignore.nix", 104 | "rev": "637db329424fd7e46cf4185293b9cc8c88c95394", 105 | "type": "github" 106 | }, 107 | "original": { 108 | "owner": "hercules-ci", 109 | "repo": "gitignore.nix", 110 | "type": "github" 111 | } 112 | }, 113 | "nixpkgs": { 114 | "locked": { 115 | "lastModified": 1717179513, 116 | "narHash": "sha256-vboIEwIQojofItm2xGCdZCzW96U85l9nDW3ifMuAIdM=", 117 | "owner": "nixos", 118 | "repo": "nixpkgs", 119 | "rev": "63dacb46bf939521bdc93981b4cbb7ecb58427a0", 120 | "type": "github" 121 | }, 122 | "original": { 123 | "owner": "nixos", 124 | "ref": "24.05", 125 | "repo": "nixpkgs", 126 | "type": "github" 127 | } 128 | }, 129 | "nixpkgs-lib": { 130 | "locked": { 131 | "lastModified": 1719876945, 132 | "narHash": "sha256-Fm2rDDs86sHy0/1jxTOKB1118Q0O3Uc7EC0iXvXKpbI=", 133 | "type": "tarball", 134 | "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" 135 | }, 136 | "original": { 137 | "type": "tarball", 138 | "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" 139 | } 140 | }, 141 | "nixpkgs-stable": { 142 | "locked": { 143 | "lastModified": 1720386169, 144 | "narHash": "sha256-NGKVY4PjzwAa4upkGtAMz1npHGoRzWotlSnVlqI40mo=", 145 | "owner": "NixOS", 146 | "repo": "nixpkgs", 147 | "rev": "194846768975b7ad2c4988bdb82572c00222c0d7", 148 | "type": "github" 149 | }, 150 | "original": { 151 | "owner": "NixOS", 152 | "ref": "nixos-24.05", 153 | "repo": "nixpkgs", 154 | "type": "github" 155 | } 156 | }, 157 | "nixpkgs_2": { 158 | "locked": { 159 | "lastModified": 1719082008, 160 | "narHash": "sha256-jHJSUH619zBQ6WdC21fFAlDxHErKVDJ5fpN0Hgx4sjs=", 161 | "owner": "NixOS", 162 | "repo": "nixpkgs", 163 | "rev": "9693852a2070b398ee123a329e68f0dab5526681", 164 | "type": "github" 165 | }, 166 | "original": { 167 | "owner": "NixOS", 168 | "ref": "nixpkgs-unstable", 169 | "repo": "nixpkgs", 170 | "type": "github" 171 | } 172 | }, 173 | "nixpkgs_3": { 174 | "locked": { 175 | "lastModified": 1715447595, 176 | "narHash": "sha256-VsVAUQOj/cS1LCOmMjAGeRksXIAdPnFIjCQ0XLkCsT0=", 177 | "owner": "nixos", 178 | "repo": "nixpkgs", 179 | "rev": "062ca2a9370a27a35c524dc82d540e6e9824b652", 180 | "type": "github" 181 | }, 182 | "original": { 183 | "owner": "nixos", 184 | "ref": "nixos-unstable", 185 | "repo": "nixpkgs", 186 | "type": "github" 187 | } 188 | }, 189 | "pre-commit-hooks": { 190 | "inputs": { 191 | "flake-compat": "flake-compat", 192 | "gitignore": "gitignore", 193 | "nixpkgs": "nixpkgs_2", 194 | "nixpkgs-stable": "nixpkgs-stable" 195 | }, 196 | "locked": { 197 | "lastModified": 1721042469, 198 | "narHash": "sha256-6FPUl7HVtvRHCCBQne7Ylp4p+dpP3P/OYuzjztZ4s70=", 199 | "owner": "cachix", 200 | "repo": "pre-commit-hooks.nix", 201 | "rev": "f451c19376071a90d8c58ab1a953c6e9840527fd", 202 | "type": "github" 203 | }, 204 | "original": { 205 | "owner": "cachix", 206 | "repo": "pre-commit-hooks.nix", 207 | "type": "github" 208 | } 209 | }, 210 | "root": { 211 | "inputs": { 212 | "devour-flake": "devour-flake", 213 | "flake-parts": "flake-parts", 214 | "nixpkgs": "nixpkgs", 215 | "pre-commit-hooks": "pre-commit-hooks", 216 | "rust-flake": "rust-flake", 217 | "systems": "systems_2" 218 | } 219 | }, 220 | "rust-flake": { 221 | "inputs": { 222 | "crane": "crane", 223 | "nixpkgs": "nixpkgs_3", 224 | "rust-overlay": "rust-overlay" 225 | }, 226 | "locked": { 227 | "lastModified": 1722280942, 228 | "narHash": "sha256-xbL+LnUrWQoW6yukTSI1KN0vfnHNus+hwXP1h6NLvhU=", 229 | "owner": "juspay", 230 | "repo": "rust-flake", 231 | "rev": "a51532d54713d0b4a0f999da4753026af9068df2", 232 | "type": "github" 233 | }, 234 | "original": { 235 | "owner": "juspay", 236 | "repo": "rust-flake", 237 | "type": "github" 238 | } 239 | }, 240 | "rust-overlay": { 241 | "inputs": { 242 | "flake-utils": "flake-utils", 243 | "nixpkgs": [ 244 | "rust-flake", 245 | "nixpkgs" 246 | ] 247 | }, 248 | "locked": { 249 | "lastModified": 1715480255, 250 | "narHash": "sha256-gEZl8nYidQwqJhOigJ91JDjoBFoPEWVsd82AKnaE7Go=", 251 | "owner": "oxalica", 252 | "repo": "rust-overlay", 253 | "rev": "d690205a4f01ec0930303c4204e5063958e51255", 254 | "type": "github" 255 | }, 256 | "original": { 257 | "owner": "oxalica", 258 | "repo": "rust-overlay", 259 | "type": "github" 260 | } 261 | }, 262 | "systems": { 263 | "locked": { 264 | "lastModified": 1681028828, 265 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 266 | "owner": "nix-systems", 267 | "repo": "default", 268 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 269 | "type": "github" 270 | }, 271 | "original": { 272 | "owner": "nix-systems", 273 | "repo": "default", 274 | "type": "github" 275 | } 276 | }, 277 | "systems_2": { 278 | "locked": { 279 | "lastModified": 1681028828, 280 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 281 | "owner": "nix-systems", 282 | "repo": "default", 283 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 284 | "type": "github" 285 | }, 286 | "original": { 287 | "owner": "nix-systems", 288 | "repo": "default", 289 | "type": "github" 290 | } 291 | } 292 | }, 293 | "root": "root", 294 | "version": 7 295 | } 296 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | inputs = { 3 | nixpkgs.url = "github:nixos/nixpkgs/24.05"; 4 | flake-parts.url = "github:hercules-ci/flake-parts"; 5 | systems.url = "github:nix-systems/default"; 6 | pre-commit-hooks.url = "github:cachix/pre-commit-hooks.nix"; 7 | rust-flake.url = "github:juspay/rust-flake"; 8 | devour-flake.url = "github:srid/devour-flake"; 9 | devour-flake.flake = false; 10 | }; 11 | 12 | outputs = inputs: 13 | 14 | inputs.flake-parts.lib.mkFlake { inherit inputs; } { 15 | systems = import inputs.systems; 16 | 17 | imports = [ 18 | inputs.rust-flake.flakeModules.default 19 | inputs.rust-flake.flakeModules.nixpkgs 20 | inputs.pre-commit-hooks.flakeModule 21 | ./nix/pre-commit.nix 22 | ./nix/rust.nix 23 | ]; 24 | 25 | perSystem = { pkgs, self', config, ... }: { 26 | formatter = pkgs.nixpkgs-fmt; 27 | devShells.default = pkgs.mkShell { 28 | inputsFrom = [ 29 | self'.devShells.rust 30 | config.pre-commit.devShell 31 | ]; 32 | # Add your devShell tools here clang-tidy ./sarif-fmt/tests/data/cpp.cpp 33 | packages = with pkgs; [ 34 | git-cliff 35 | zola 36 | ]; 37 | 38 | shellHook = 39 | '' 40 | # For nixci 41 | export DEVOUR_FLAKE=${inputs.devour-flake} 42 | ''; 43 | }; 44 | }; 45 | }; 46 | } 47 | -------------------------------------------------------------------------------- /hadolint-sarif/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hadolint-sarif" 3 | version = "0.8.0" 4 | authors = ["Paul Sastrasinh "] 5 | edition = "2018" 6 | description = "Convert hadolint output to SARIF" 7 | license = "MIT" 8 | readme = "README.md" 9 | keywords = ["sarif", "hadolint", "docker", "cli"] 10 | categories = ["command-line-utilities"] 11 | homepage = "https://psastras.github.io/sarif-rs/" 12 | documentation = "https://docs.rs/hadolint_sarif" 13 | repository = "https://github.com/psastras/sarif-rs" 14 | 15 | [badges] 16 | github = { repository = "psastras/sarif-rs" } 17 | 18 | [[bin]] 19 | name = "hadolint-sarif" 20 | path = "src/bin.rs" 21 | 22 | [dependencies] 23 | anyhow = "1.0.98" 24 | serde-sarif = { path = "../serde-sarif", version = "0.8.0", features = [ 25 | "hadolint-converters", 26 | ] } 27 | clap = { version = "4.5.40", features = ["derive"] } 28 | 29 | [dev-dependencies] 30 | version-sync = "0.9" 31 | 32 | [package.metadata.binstall] 33 | pkg-url = "{ repo }/releases/download/{ name }-v{ version }/{ name }-{ target }" 34 | pkg-fmt = "bin" 35 | -------------------------------------------------------------------------------- /hadolint-sarif/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /hadolint-sarif/README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | 3 | # hadolint-sarif 4 | 5 | This crate provides a command line tool to convert `hadolint` diagnostic output 6 | into SARIF. 7 | 8 | The latest [documentation can be found here](https://docs.rs/hadolint_sarif). 9 | 10 | hadolint is a popular linter / static analysis tool for Dockerfiles. More 11 | information can be found on the official repository: 12 | [https://github.com/hadolint/hadolint](https://github.com/hadolint/hadolint) 13 | 14 | SARIF or the Static Analysis Results Interchange Format is an industry standard 15 | format for the output of static analysis tools. More information can be found on 16 | the official website: 17 | [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 18 | 19 | ## Installation 20 | 21 | `hadolint-sarif` may be installed via `cargo` 22 | 23 | ```shell 24 | cargo install hadolint-sarif 25 | ``` 26 | 27 | via [cargo-binstall](https://github.com/cargo-bins/cargo-binstall) 28 | 29 | ```shell 30 | cargo binstall hadolint-sarif 31 | ``` 32 | 33 | or downloaded directly from Github Releases 34 | 35 | ```shell 36 | # make sure to adjust the target and version (you may also want to pin to a specific version) 37 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/hadolint-sarif-v0.8.0/hadolint-sarif-x86_64-unknown-linux-gnu -o hadolint-sarif 38 | ``` 39 | 40 | ### Fedora Linux 41 | 42 | ```shell 43 | sudo dnf install # ex. cargo binstall hadolint-sarif 44 | ``` 45 | 46 | ### Nix 47 | 48 | Through the `nix` cli, 49 | 50 | ```shell 51 | nix --accept-flake-config profile install github:psastras/sarif-rs#hadolint-sarif 52 | ``` 53 | 54 | ## Usage 55 | 56 | For most cases, simply run `hadolint` with `json` output and pipe the results 57 | into `hadolint-sarif`. 58 | 59 | ## Example 60 | 61 | ```shell 62 | hadolint -f json Dockerfile | hadolint-sarif 63 | ``` 64 | 65 | If you are using Github Actions, SARIF is useful for integrating with Github 66 | Advanced Security (GHAS), which can show code alerts in the "Security" tab of 67 | your repository. 68 | 69 | After uploading `hadolint-sarif` output to Github, `hadolint` diagnostics are 70 | available in GHAS. 71 | 72 | ## Example 73 | 74 | ```yaml 75 | on: 76 | workflow_run: 77 | workflows: ["main"] 78 | branches: [main] 79 | types: [completed] 80 | 81 | name: sarif 82 | 83 | jobs: 84 | upload-sarif: 85 | runs-on: ubuntu-latest 86 | if: ${{ github.ref == 'refs/heads/main' }} 87 | steps: 88 | - uses: actions/checkout@v2 89 | - uses: actions-rs/toolchain@v1 90 | with: 91 | profile: minimal 92 | toolchain: stable 93 | override: true 94 | - uses: Swatinem/rust-cache@v1 95 | - run: cargo install hadolint-sarif sarif-fmt 96 | - run: hadolint -f json Dockerfile | hadolint-sarif | tee results.sarif | 97 | sarif-fmt 98 | - name: Upload SARIF file 99 | uses: github/codeql-action/upload-sarif@v1 100 | with: 101 | sarif_file: results.sarif 102 | ``` 103 | 104 | License: MIT 105 | -------------------------------------------------------------------------------- /hadolint-sarif/cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | """ 13 | # template for the changelog body 14 | # https://keats.github.io/tera/docs/#introduction 15 | body = """ 16 | {% if version %}\ 17 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 18 | {% else %}\ 19 | ## [unreleased] 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | striptags | trim | upper_first }} 23 | {% for commit in commits %} 24 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 25 | {% if commit.breaking %}[**breaking**] {% endif %}\ 26 | {{ commit.message | upper_first }}\ 27 | {% endfor %} 28 | {% endfor %}\n 29 | """ 30 | # template for the changelog footer 31 | footer = """""" 32 | # remove the leading and trailing s 33 | trim = true 34 | # postprocessors 35 | postprocessors = [ 36 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 37 | ] 38 | 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | # Replace issue numbers 49 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 50 | # Check spelling of the commit with https://github.com/crate-ci/typos 51 | # If the spelling is incorrect, it will be automatically fixed. 52 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 53 | ] 54 | # regex for parsing and grouping commits 55 | commit_parsers = [ 56 | { message = "^feat", group = "🚀 Features" }, 57 | { message = "^fix", group = "🐛 Bug Fixes" }, 58 | { message = "^doc", group = "📚 Documentation" }, 59 | { message = "^perf", group = "⚡ Performance" }, 60 | { message = "^refactor", group = "🚜 Refactor" }, 61 | { message = "^style", group = "🎨 Styling" }, 62 | { message = "^test", group = "🧪 Testing" }, 63 | { message = "^chore\\(release\\): prepare for", skip = true }, 64 | { message = "^chore\\(deps.*\\)", skip = true }, 65 | { message = "^chore\\(pr\\)", skip = true }, 66 | { message = "^chore\\(pull\\)", skip = true }, 67 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 68 | { body = ".*security", group = "🛡️ Security" }, 69 | { message = "^revert", group = "◀️ Revert" }, 70 | ] 71 | # protect breaking changes from being skipped due to matching a skipping commit_parser 72 | protect_breaking_commits = false 73 | # filter out the commits that are not matched by commit parsers 74 | filter_commits = true 75 | # regex for matching git tags 76 | tag_pattern = "hadolint-sarif-v[0-9].*" 77 | # regex for skipping tags 78 | # skip_tags = "" 79 | # regex for ignoring tags 80 | # ignore_tags = "" 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | # limit the number of commits included in the changelog. 86 | # limit_commits = 42 87 | -------------------------------------------------------------------------------- /hadolint-sarif/src/bin.rs: -------------------------------------------------------------------------------- 1 | #![doc(html_root_url = "https://docs.rs/hadolint-sarif/0.8.0")] 2 | 3 | //! This crate provides a command line tool to convert `hadolint` diagnostic 4 | //! output into SARIF. 5 | //! 6 | //! The latest [documentation can be found here](https://docs.rs/hadolint_sarif). 7 | //! 8 | //! hadolint is a popular linter / static analysis tool for Dockerfiles. More information 9 | //! can be found on the official repository: [https://github.com/hadolint/hadolint](https://github.com/hadolint/hadolint) 10 | //! 11 | //! SARIF or the Static Analysis Results Interchange Format is an industry 12 | //! standard format for the output of static analysis tools. More information 13 | //! can be found on the official website: [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 14 | //! 15 | //! ## Installation 16 | //! 17 | //! `hadolint-sarif` may be installed via `cargo` 18 | //! 19 | //! ```shell 20 | //! cargo install hadolint-sarif 21 | //! ``` 22 | //! 23 | //! or downloaded directly from Github Releases 24 | //! 25 | //!```shell 26 | //! # make sure to adjust the target and version (you may also want to pin to a specific version) 27 | //! curl -sSL https://github.com/psastras/sarif-rs/releases/download/hadolint-sarif-latest/hadolint-sarif-x86_64-unknown-linux-gnu -o hadolint-sarif 28 | //! ``` 29 | //! 30 | //! ## Usage 31 | //! 32 | //! For most cases, simply run `hadolint` with `json` output and pipe the 33 | //! results into `hadolint-sarif`. 34 | //! 35 | //! ## Example 36 | //! 37 | //!```shell 38 | //! hadolint -f json Dockerfile | hadolint-sarif 39 | //! ``` 40 | //! 41 | //! If you are using Github Actions, SARIF is useful for integrating with 42 | //! Github Advanced Security (GHAS), which can show code alerts in the 43 | //! "Security" tab of your repository. 44 | //! 45 | //! After uploading `hadolint-sarif` output to Github, `hadolint` diagnostics 46 | //! are available in GHAS. 47 | //! 48 | //! ## Example 49 | //! 50 | //! ```yaml 51 | //! on: 52 | //! workflow_run: 53 | //! workflows: ["main"] 54 | //! branches: [main] 55 | //! types: [completed] 56 | //! 57 | //! name: sarif 58 | //! 59 | //! jobs: 60 | //! upload-sarif: 61 | //! runs-on: ubuntu-latest 62 | //! if: ${{ github.ref == 'refs/heads/main' }} 63 | //! steps: 64 | //! - uses: actions/checkout@v2 65 | //! - uses: actions-rs/toolchain@v1 66 | //! with: 67 | //! profile: minimal 68 | //! toolchain: stable 69 | //! override: true 70 | //! - uses: Swatinem/rust-cache@v1 71 | //! - run: cargo install hadolint-sarif sarif-fmt 72 | //! - run: 73 | //! hadolint -f json Dockerfile | 74 | //! hadolint-sarif | tee results.sarif | sarif-fmt 75 | //! - name: Upload SARIF file 76 | //! uses: github/codeql-action/upload-sarif@v1 77 | //! with: 78 | //! sarif_file: results.sarif 79 | //! ``` 80 | //! 81 | 82 | use anyhow::Result; 83 | use clap::Parser; 84 | use std::fs::File; 85 | use std::io::{BufReader, BufWriter, Read, Write}; 86 | 87 | #[derive(Parser, Debug)] 88 | #[command( 89 | version, 90 | about = "Convert hadolint warnings into SARIF", 91 | after_help = "The expected input is generated by running 'hadoling -f json'.", 92 | long_about = None, 93 | )] 94 | struct Args { 95 | /// input file; reads from stdin if none is given 96 | #[arg(short, long)] 97 | input: Option, 98 | /// output file; writes to stdout if none is given 99 | #[arg(short, long)] 100 | output: Option, 101 | } 102 | 103 | fn main() -> Result<()> { 104 | let args = Args::parse(); 105 | 106 | let read = match args.input { 107 | Some(path) => Box::new(File::open(path)?) as Box, 108 | None => Box::new(std::io::stdin()) as Box, 109 | }; 110 | let reader = BufReader::new(read); 111 | 112 | let write = match args.output { 113 | Some(path) => Box::new(File::create(path)?) as Box, 114 | None => Box::new(std::io::stdout()) as Box, 115 | }; 116 | let writer = BufWriter::new(write); 117 | 118 | serde_sarif::converters::hadolint::parse_to_writer(reader, writer) 119 | } 120 | -------------------------------------------------------------------------------- /hadolint-sarif/tests/version-numbers.rs: -------------------------------------------------------------------------------- 1 | #[test] 2 | fn test_readme_deps() { 3 | version_sync::assert_markdown_deps_updated!("README.md"); 4 | } 5 | 6 | #[test] 7 | fn test_html_root_url() { 8 | version_sync::assert_html_root_url_updated!("src/bin.rs"); 9 | } 10 | -------------------------------------------------------------------------------- /miri-sarif/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "miri-sarif" 3 | version = "0.8.0" 4 | authors = ["Florian Gebhardt "] 5 | edition = "2018" 6 | description = "Convert miri output to SARIF" 7 | license = "MIT" 8 | readme = "README.md" 9 | keywords = ["sarif", "miri", "cli"] 10 | categories = ["command-line-utilities"] 11 | homepage = "https://psastras.github.io/sarif-rs/" 12 | documentation = "https://docs.rs/miri_sarif" 13 | repository = "https://github.com/psastras/sarif-rs" 14 | 15 | [badges] 16 | github = { repository = "psastras/sarif-rs" } 17 | 18 | [[bin]] 19 | name = "miri-sarif" 20 | path = "src/bin.rs" 21 | 22 | [dependencies] 23 | anyhow = "1.0.98" 24 | serde-sarif = { path = "../serde-sarif", version = "0.8.0", features = [ 25 | "miri-converters", 26 | ] } 27 | clap = { version = "4.5.40", features = ["derive"] } 28 | 29 | [dev-dependencies] 30 | version-sync = "0.9" 31 | 32 | [package.metadata.binstall] 33 | pkg-url = "{ repo }/releases/download/{ name }-v{ version }/{ name }-{ target }" 34 | pkg-fmt = "bin" 35 | -------------------------------------------------------------------------------- /miri-sarif/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /miri-sarif/README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | 3 | # miri-sarif 4 | 5 | This crate provides a command line tool to convert `cargo miri` diagnostic 6 | output into SARIF. 7 | 8 | The latest [documentation can be found here](https://docs.rs/miri_sarif). 9 | 10 | Miri is an undefined behavior detection tool for rust. More information can 11 | be found on the official repository: 12 | [https://github.com/rust-lang/miri](https://github.com/rust-lang/miri) 13 | 14 | SARIF or the Static Analysis Results Interchange Format is an industry standard 15 | format for the output of static analysis tools. More information can be found on 16 | the official website: 17 | [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 18 | 19 | ## Installation 20 | 21 | `miri-sarif` may be installed via `cargo` 22 | 23 | ```shell 24 | cargo install miri-sarif 25 | ``` 26 | 27 | via [cargo-binstall](https://github.com/cargo-bins/cargo-binstall) 28 | 29 | ```shell 30 | cargo binstall miri-sarif 31 | ``` 32 | 33 | or downloaded directly from Github Releases 34 | 35 | ```shell 36 | # make sure to adjust the target and version (you may also want to pin to a specific version) 37 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/miri-sarif-v0.8.0/miri-sarif-x86_64-unknown-linux-gnu -o miri-sarif 38 | ``` 39 | 40 | ### Fedora Linux 41 | 42 | ```shell 43 | sudo dnf install # ex. cargo binstall miri-sarif 44 | ``` 45 | 46 | ### Nix 47 | 48 | Through the `nix` cli, 49 | 50 | ```shell 51 | nix --accept-flake-config profile install github:psastras/sarif-rs#miri-sarif 52 | ``` 53 | 54 | ## Usage 55 | 56 | For miri to output machine readable data you need to pass `--error-format=json` in the `MIRIFLAGS` environment variable. 57 | 58 | ### `cargo miri test` & `cargo miri run` 59 | 60 | Because the relevant miri output is printed to stderr you will need to redirect 61 | stderr to stdout and stdout to `/dev/null`. 62 | 63 | #### Example 64 | 65 | ```shell 66 | MIRIFLAGS="--error-format=json" cargo miri test 2>&1 1>/dev/null | miri-sarif 67 | ``` 68 | 69 | ### `cargo miri nextest` 70 | 71 | Since `nextest` only outputs to stderr, you don't need to redirect stdout to `/dev/null`. \ 72 | But you should use `--success-output immediate` to also capture warnings produced by miri. \ 73 | Additionally you can use `--no-fail-fast` for miri to run all tests and not stop on the first failure. 74 | 75 | #### Example 76 | 77 | ```shell 78 | MIRIFLAGS="--error-format=json" cargo miri nextest --no-fail-fast --success-output immediate 2>&1 | miri-sarif 79 | ``` 80 | 81 | ## Github Actions 82 | 83 | If you are using Github Actions, SARIF is useful for integrating with Github 84 | Advanced Security (GHAS), which can show code alerts in the "Security" tab of 85 | your repository. 86 | 87 | After uploading `miri-sarif` output to Github, `miri` diagnostics are 88 | available in GHAS. 89 | 90 | ### Example 91 | 92 | ```yaml 93 | on: 94 | workflow_run: 95 | workflows: ["main"] 96 | branches: [main] 97 | types: [completed] 98 | 99 | name: sarif 100 | 101 | jobs: 102 | upload-sarif: 103 | runs-on: ubuntu-latest 104 | if: ${{ github.ref == 'refs/heads/main' }} 105 | steps: 106 | - uses: actions/checkout@v2 107 | - uses: actions-rs/toolchain@v1 108 | with: 109 | profile: minimal 110 | toolchain: nightly 111 | components: miri 112 | override: true 113 | - uses: Swatinem/rust-cache@v1 114 | - run: cargo install miri-sarif sarif-fmt cargo-nextest 115 | - run: MIRIFLAGS="--error-format=json" cargo miri nextest run --no-fail-fast --success-output immediate 2>&1 | 116 | miri-sarif | tee results.sarif | sarif-fmt 117 | - name: Upload SARIF file 118 | uses: github/codeql-action/upload-sarif@v3 119 | with: 120 | sarif_file: results.sarif 121 | ``` 122 | 123 | In some cases, the path to the file contained in the SARIF report [may be different than what is expected](https://github.com/psastras/sarif-rs/issues/370). This can happen for example if running `miri-sarif` from a different folder than the crate folder. In this case consider using a tool like `jq` to amend to path: 124 | 125 | ### Example 126 | 127 | ```bash 128 | cat results.sarif \ 129 | | jq --arg pwd "some_folder/my_crate" '.runs[].results[].locations[].physicalLocation.artifactLocation.uri |= $pwd + "/" + .' \ 130 | > results.sarif.tmp 131 | ``` 132 | 133 | Note that this maybe be fixed in a future release. 134 | 135 | License: MIT 136 | -------------------------------------------------------------------------------- /miri-sarif/cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | """ 13 | # template for the changelog body 14 | # https://keats.github.io/tera/docs/#introduction 15 | body = """ 16 | {% if version %}\ 17 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 18 | {% else %}\ 19 | ## [unreleased] 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | striptags | trim | upper_first }} 23 | {% for commit in commits %} 24 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 25 | {% if commit.breaking %}[**breaking**] {% endif %}\ 26 | {{ commit.message | upper_first }}\ 27 | {% endfor %} 28 | {% endfor %}\n 29 | """ 30 | # template for the changelog footer 31 | footer = """""" 32 | # remove the leading and trailing s 33 | trim = true 34 | # postprocessors 35 | postprocessors = [ 36 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 37 | ] 38 | 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | # Replace issue numbers 49 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 50 | # Check spelling of the commit with https://github.com/crate-ci/typos 51 | # If the spelling is incorrect, it will be automatically fixed. 52 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 53 | ] 54 | # regex for parsing and grouping commits 55 | commit_parsers = [ 56 | { message = "^feat", group = "🚀 Features" }, 57 | { message = "^fix", group = "🐛 Bug Fixes" }, 58 | { message = "^doc", group = "📚 Documentation" }, 59 | { message = "^perf", group = "⚡ Performance" }, 60 | { message = "^refactor", group = "🚜 Refactor" }, 61 | { message = "^style", group = "🎨 Styling" }, 62 | { message = "^test", group = "🧪 Testing" }, 63 | { message = "^chore\\(release\\): prepare for", skip = true }, 64 | { message = "^chore\\(deps.*\\)", skip = true }, 65 | { message = "^chore\\(pr\\)", skip = true }, 66 | { message = "^chore\\(pull\\)", skip = true }, 67 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 68 | { body = ".*security", group = "🛡️ Security" }, 69 | { message = "^revert", group = "◀️ Revert" }, 70 | ] 71 | # protect breaking changes from being skipped due to matching a skipping commit_parser 72 | protect_breaking_commits = false 73 | # filter out the commits that are not matched by commit parsers 74 | filter_commits = true 75 | # regex for matching git tags 76 | tag_pattern = "miri-sarif-v[0-9].*" 77 | # regex for skipping tags 78 | # skip_tags = "" 79 | # regex for ignoring tags 80 | # ignore_tags = "" 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | # limit the number of commits included in the changelog. 86 | # limit_commits = 42 87 | -------------------------------------------------------------------------------- /miri-sarif/src/bin.rs: -------------------------------------------------------------------------------- 1 | #![doc(html_root_url = "https://docs.rs/miri-sarif/0.8.0")] 2 | 3 | //! This crate provides a command line tool to convert `cargo miri` diagnostic 4 | //! output into SARIF. 5 | //! 6 | //! The latest [documentation can be found here](https://docs.rs/miri_sarif). 7 | //! 8 | //! Miri is an undefined behavior detection tool for rust. More information 9 | //! can be found on the official repository: [https://github.com/rust-lang/miri](https://github.com/rust-lang/miri) 10 | //! 11 | //! SARIF or the Static Analysis Results Interchange Format is an industry 12 | //! standard format for the output of static analysis tools. More information 13 | //! can be found on the official website: [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 14 | //! 15 | //! ## Installation 16 | //! 17 | //! `miri-sarif` may be installed via `cargo` 18 | //! 19 | //! ```shell 20 | //! cargo install miri-sarif 21 | //! ``` 22 | //! 23 | //! or downloaded directly from Github Releases 24 | //! 25 | //! ```shell 26 | //! # make sure to adjust the target and version (you may also want to pin to a specific version) 27 | //! curl -sSL https://github.com/psastras/sarif-rs/releases/download/miri-sarif-v0.8.0/miri-sarif-x86_64-unknown-linux-gnu -o miri-sarif 28 | //! ``` 29 | //! 30 | //! ## Usage 31 | //! 32 | //! For miri to output machine readable data you need to pass `--error-format=json` in the `MIRIFLAGS` environment variable. 33 | //! 34 | //! ### `cargo miri test` & `cargo miri run` 35 | //! 36 | //! Because the relevant miri output is printed to stderr you will need to redirect 37 | //! stderr to stdout and stdout to `/dev/null`. 38 | //! 39 | //! #### Example 40 | //! 41 | //! ```shell 42 | //! MIRIFLAGS="--error-format=json" cargo miri test 2>&1 1>/dev/null | miri-sarif 43 | //! ``` 44 | //! 45 | //! ### `cargo miri nextest` 46 | //! 47 | //! Since `nextest` only outputs to stderr, you don't need to redirect stdout to `/dev/null`. 48 | //! But you should use `--success-output immediate` to also capture warnings produced by miri. 49 | //! Additionally you can use `--no-fail-fast` for miri to run all tests and not stop on the first failure. 50 | //! 51 | //! #### Example 52 | //! 53 | //! ```shell 54 | //! MIRIFLAGS="--error-format=json" cargo miri nextest --no-fail-fast --success-output immediate 2>&1 | miri-sarif 55 | //! ``` 56 | //! 57 | //! ## Github Actions 58 | //! 59 | //! If you are using Github Actions, SARIF is useful for integrating with 60 | //! Github Advanced Security (GHAS), which can show code alerts in the 61 | //! "Security" tab of your repository. 62 | //! 63 | //! After uploading `clippy-sarif` output to Github, `clippy` diagnostics 64 | //! are available in GHAS. 65 | //! 66 | //! ### Example 67 | //! 68 | //! ```yaml 69 | //! on: 70 | //! workflow_run: 71 | //! workflows: ["main"] 72 | //! branches: [main] 73 | //! types: [completed] 74 | //! 75 | //! name: sarif 76 | //! 77 | //! jobs: 78 | //! upload-sarif: 79 | //! runs-on: ubuntu-latest 80 | //! if: ${{ github.ref == 'refs/heads/main' }} 81 | //! steps: 82 | //! - uses: actions/checkout@v2 83 | //! - uses: actions-rs/toolchain@v1 84 | //! with: 85 | //! profile: minimal 86 | //! toolchain: nightly 87 | //! components: miri 88 | //! override: true 89 | //! - uses: Swatinem/rust-cache@v1 90 | //! - run: cargo install miri-sarif sarif-fmt cargo-nextest 91 | //! - run: MIRIFLAGS="--error-format=json" cargo miri nextest run --no-fail-fast --success-output immediate 2>&1 | 92 | //! miri-sarif | tee results.sarif | sarif-fmt 93 | //! - name: Upload SARIF file 94 | //! uses: github/codeql-action/upload-sarif@v3 95 | //! with: 96 | //! sarif_file: results.sarif 97 | //! ``` 98 | 99 | use anyhow::Result; 100 | use clap::Parser; 101 | use std::fs::File; 102 | use std::io::{BufReader, BufWriter, Read, Write}; 103 | 104 | #[derive(Parser, Debug)] 105 | #[command( 106 | version, 107 | about = "Convert miri output into SARIF", 108 | after_help = "The expected input is generated by running 'MIRIFLAGS=\"--error-format=json\" cargo miri test'.", 109 | long_about = None, 110 | )] 111 | struct Args { 112 | /// input file; reads from stdin if none is given 113 | #[arg(short, long)] 114 | input: Option, 115 | /// output file; writes to stdout if none is given 116 | #[arg(short, long)] 117 | output: Option, 118 | } 119 | 120 | fn main() -> Result<()> { 121 | let args = Args::parse(); 122 | 123 | let read = match args.input { 124 | Some(path) => Box::new(File::open(path)?) as Box, 125 | None => Box::new(std::io::stdin()) as Box, 126 | }; 127 | let reader = BufReader::new(read); 128 | 129 | let write = match args.output { 130 | Some(path) => Box::new(File::create(path)?) as Box, 131 | None => Box::new(std::io::stdout()) as Box, 132 | }; 133 | let writer = BufWriter::new(write); 134 | 135 | serde_sarif::converters::miri::parse_to_writer(reader, writer) 136 | } 137 | -------------------------------------------------------------------------------- /miri-sarif/tests/version-numbers.rs: -------------------------------------------------------------------------------- 1 | #[test] 2 | fn test_readme_deps() { 3 | version_sync::assert_markdown_deps_updated!("README.md"); 4 | } 5 | 6 | #[test] 7 | fn test_html_root_url() { 8 | version_sync::assert_html_root_url_updated!("src/bin.rs"); 9 | } 10 | -------------------------------------------------------------------------------- /nix/pre-commit.nix: -------------------------------------------------------------------------------- 1 | { ... }: 2 | { 3 | perSystem = { pkgs, lib, ... }: { 4 | pre-commit = { 5 | check.enable = true; 6 | settings = { 7 | hooks = { 8 | nixpkgs-fmt.enable = true; 9 | }; 10 | }; 11 | }; 12 | }; 13 | } 14 | -------------------------------------------------------------------------------- /nix/rust.nix: -------------------------------------------------------------------------------- 1 | { inputs, ... }: 2 | { 3 | debug = true; 4 | perSystem = { config, self', pkgs, lib, system, ... }: 5 | let 6 | inherit (pkgs.stdenv) isDarwin; 7 | inherit (pkgs.darwin) apple_sdk; 8 | globalCrateConfig = { 9 | crane = { 10 | args = { 11 | buildInputs = lib.optionals isDarwin 12 | ([ 13 | pkgs.fixDarwinDylibNames 14 | ]) ++ [ 15 | pkgs.libiconv 16 | ]; 17 | cargoTestExtraArgs = "-- --nocapture"; 18 | DEVOUR_FLAKE = inputs.devour-flake; 19 | } // lib.optionalAttrs pkgs.stdenv.isLinux { 20 | CARGO_BUILD_TARGET = "x86_64-unknown-linux-musl"; 21 | CARGO_BUILD_RUSTFLAGS = "-C target-feature=+crt-static"; 22 | }; 23 | extraBuildArgs = { 24 | postInstall = '' 25 | ${if isDarwin then "fixDarwinDylibNames" else ""} 26 | ''; 27 | }; 28 | }; 29 | }; 30 | in 31 | { 32 | rust-project = { 33 | src = lib.cleanSourceWith { 34 | name = "sarif-rs"; 35 | src = inputs.self; # The original, unfiltered source 36 | filter = path: type: 37 | # Needed for documentation checks 38 | (lib.hasSuffix "\.md" path) || 39 | # Needed for .json schema 40 | (lib.hasSuffix "\.json" path) || 41 | # Needed for tests 42 | (lib.hasInfix "/data/" path) || 43 | # Default filter from crane (allow .rs files) 44 | (config.rust-project.crane-lib.filterCargoSources path type) 45 | ; 46 | }; 47 | }; 48 | 49 | packages = 50 | let 51 | inherit (config.rust-project) crates; 52 | in 53 | rec { 54 | all = pkgs.symlinkJoin { 55 | name = "all"; 56 | paths = with crates; [ 57 | sarif-fmt.crane.outputs.drv.crate 58 | clippy-sarif.crane.outputs.drv.crate 59 | hadolint-sarif.crane.outputs.drv.crate 60 | miri-sarif.crane.outputs.drv.crate 61 | shellcheck-sarif.crane.outputs.drv.crate 62 | clang-tidy-sarif.crane.outputs.drv.crate 63 | ]; 64 | }; 65 | default = all; 66 | }; 67 | }; 68 | } 69 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | profile = "default" 3 | channel = "stable" 4 | targets = ["aarch64-apple-darwin", "x86_64-unknown-linux-musl"] -------------------------------------------------------------------------------- /sarif-fmt/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sarif-fmt" 3 | version = "0.8.0" 4 | authors = ["Paul Sastrasinh "] 5 | edition = "2018" 6 | description = "View (pretty print) SARIF files in terminal" 7 | license = "MIT" 8 | readme = "README.md" 9 | keywords = ["sarif", "fmt", "cli", "terminal"] 10 | categories = ["command-line-utilities"] 11 | homepage = "https://psastras.github.io/sarif-rs/" 12 | documentation = "https://docs.rs/clippy_sarif" 13 | repository = "https://github.com/psastras/sarif-rs" 14 | 15 | [badges] 16 | github = { repository = "psastras/sarif-rs" } 17 | 18 | [[bin]] 19 | name = "sarif-fmt" 20 | path = "src/bin.rs" 21 | 22 | [dependencies] 23 | anyhow = "1.0.98" 24 | codespan-reporting = "0.12.0" 25 | serde-sarif = { path = "../serde-sarif", version = "0.8.0" } 26 | serde_json = "1.0.140" 27 | clap = { version = "4.5.40", features = ["derive"] } 28 | 29 | [dev-dependencies] 30 | duct = "1.0.0" 31 | duct_sh = "1.0.0" 32 | version-sync = "0.9" 33 | 34 | [package.metadata.binstall] 35 | pkg-url = "{ repo }/releases/download/{ name }-v{ version }/{ name }-{ target }" 36 | pkg-fmt = "bin" 37 | -------------------------------------------------------------------------------- /sarif-fmt/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /sarif-fmt/README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | 3 | # sarif-fmt 4 | 5 | This crate provides a command line tool to pretty print SARIF files to easy 6 | human readable output. 7 | 8 | The latest [documentation can be found here](https://docs.rs/sarif_fmt). 9 | 10 | SARIF or the Static Analysis Results Interchange Format is an industry standard 11 | format for the output of static analysis tools. More information can be found on 12 | the official website: 13 | [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 14 | 15 | ## Installation 16 | 17 | `sarif-fmt` may be installed via `cargo` 18 | 19 | ```shell 20 | cargo install sarif-fmt 21 | ``` 22 | 23 | via [cargo-binstall](https://github.com/cargo-bins/cargo-binstall) 24 | 25 | ```shell 26 | cargo binstall sarif-fmt 27 | ``` 28 | 29 | or downloaded directly from Github Releases 30 | 31 | ```shell 32 | # make sure to adjust the target and version (you may also want to pin to a specific version) 33 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/sarif-fmt-v0.8.0/sarif-fmt-x86_64-unknown-linux-gnu -o sarif-fmt 34 | ``` 35 | 36 | ### Fedora Linux 37 | 38 | ```shell 39 | sudo dnf install # ex. cargo binstall sarif-fmt 40 | ``` 41 | 42 | ### Nix 43 | 44 | Through the `nix` cli, 45 | 46 | ```shell 47 | nix --accept-flake-config profile install github:psastras/sarif-rs#sarif-fmt 48 | ``` 49 | 50 | ## Usage 51 | 52 | For most cases, simply pipe a SARIF file into `sarif-fmt` 53 | (`cat ./foo.sarif | sarif-fmt`) 54 | 55 | ## Example 56 | 57 | ```shell 58 | $ cargo clippy --message-format=json | clippy-sarif | sarif-fmt 59 | $ warning: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed as `map(|x| y)` 60 | ┌─ sarif-fmt/src/bin.rs:423:13 61 | │ 62 | 423 │ ╭ the_rule 63 | 424 │ │ .full_description 64 | 425 │ │ .as_ref() 65 | 426 │ │ .and_then(|mfms| Some(mfms.text.clone())) 66 | │ ╰───────────────────────────────────────────────────────^ 67 | │ 68 | = `#[warn(clippy::bind_instead_of_map)]` on by default 69 | for further information visit https://rust-lang.github.io/rust-clippy/master#bind_instead_of_map 70 | ``` 71 | 72 | Often it is useful to record the SARIF file for machine processing but also 73 | print the nicely formatted results to stdout at the same time. This can be done 74 | using the `tee` command: 75 | 76 | ```shell 77 | $ clang-tidy -checks=cert-* cpp.cpp -- | clang-tidy-sarif | tee clang-tidy.sarif | sarif-fmt 78 | $ 2 warnings generated. 79 | warning: 'atoi' used to convert a string to an integer value, but function will not report conversion errors; consider using 'strtol' instead [cert-err34-c] 80 | ┌─ /home/psastras/repos/sarif-rs/sarif-fmt/tests/data/cpp.cpp:4:10 81 | │ 82 | 4 │ return atoi(num); 83 | │ ^^^^^^^^^^ 84 | 85 | warning: calling 'system' uses a command processor [cert-env33-c] 86 | ┌─ /home/psastras/repos/sarif-rs/sarif-fmt/tests/data/cpp.cpp:8:3 87 | │ 88 | 8 │ system("ls"); 89 | │ ^^^^^^^^^^^^^ 90 | 91 | $ cat clang-tidy.sarif 92 | { 93 | "runs": [ 94 | { 95 | "results": [ 96 | { 97 | "level": "warning", 98 | "locations": [ 99 | { 100 | "physicalLocation": { 101 | "artifactLocation": { 102 | "uri": "cpp.cpp" 103 | }, 104 | "region": { 105 | "startColumn": 10, 106 | "startLine": 4 107 | } 108 | } 109 | } 110 | ], 111 | "message": { 112 | "text": "'atoi' used to convert a string to an integer value, but function will not report conversion errors; consider using 'strtol' instead [cert-err34-c]" 113 | } 114 | }, 115 | { 116 | "level": "warning", 117 | "locations": [ 118 | { 119 | "physicalLocation": { 120 | "artifactLocation": { 121 | "uri": "cpp.cpp" 122 | }, 123 | "region": { 124 | "startColumn": 3, 125 | "startLine": 8 126 | } 127 | } 128 | } 129 | ], 130 | "message": { 131 | "text": "calling 'system' uses a command processor [cert-env33-c]" 132 | } 133 | } 134 | ], 135 | "tool": { 136 | "driver": { 137 | "name": "clang-tidy" 138 | } 139 | } 140 | } 141 | ], 142 | "version": "2.1.0" 143 | } 144 | ``` 145 | 146 | License: MIT 147 | -------------------------------------------------------------------------------- /sarif-fmt/cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | """ 13 | # template for the changelog body 14 | # https://keats.github.io/tera/docs/#introduction 15 | body = """ 16 | {% if version %}\ 17 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 18 | {% else %}\ 19 | ## [unreleased] 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | striptags | trim | upper_first }} 23 | {% for commit in commits %} 24 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 25 | {% if commit.breaking %}[**breaking**] {% endif %}\ 26 | {{ commit.message | upper_first }}\ 27 | {% endfor %} 28 | {% endfor %}\n 29 | """ 30 | # template for the changelog footer 31 | footer = """""" 32 | # remove the leading and trailing s 33 | trim = true 34 | # postprocessors 35 | postprocessors = [ 36 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 37 | ] 38 | 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | # Replace issue numbers 49 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 50 | # Check spelling of the commit with https://github.com/crate-ci/typos 51 | # If the spelling is incorrect, it will be automatically fixed. 52 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 53 | ] 54 | # regex for parsing and grouping commits 55 | commit_parsers = [ 56 | { message = "^feat", group = "🚀 Features" }, 57 | { message = "^fix", group = "🐛 Bug Fixes" }, 58 | { message = "^doc", group = "📚 Documentation" }, 59 | { message = "^perf", group = "⚡ Performance" }, 60 | { message = "^refactor", group = "🚜 Refactor" }, 61 | { message = "^style", group = "🎨 Styling" }, 62 | { message = "^test", group = "🧪 Testing" }, 63 | { message = "^chore\\(release\\): prepare for", skip = true }, 64 | { message = "^chore\\(deps.*\\)", skip = true }, 65 | { message = "^chore\\(pr\\)", skip = true }, 66 | { message = "^chore\\(pull\\)", skip = true }, 67 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 68 | { body = ".*security", group = "🛡️ Security" }, 69 | { message = "^revert", group = "◀️ Revert" }, 70 | ] 71 | # protect breaking changes from being skipped due to matching a skipping commit_parser 72 | protect_breaking_commits = false 73 | # filter out the commits that are not matched by commit parsers 74 | filter_commits = true 75 | # regex for matching git tags 76 | tag_pattern = "sarif-fmt-v[0-9].*" 77 | # regex for skipping tags 78 | # skip_tags = "" 79 | # regex for ignoring tags 80 | # ignore_tags = "" 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | # limit the number of commits included in the changelog. 86 | # limit_commits = 42 87 | -------------------------------------------------------------------------------- /sarif-fmt/src/github/mod.rs: -------------------------------------------------------------------------------- 1 | struct ProblemMatchers { 2 | problem_matcher: Vec, 3 | } 4 | 5 | struct ProblemMatcher { 6 | owner: String, 7 | pattern: Vec, 8 | } 9 | 10 | struct ProblemMatcherPattern { 11 | regexp: String, 12 | severity: i32, 13 | file: i32, 14 | line: i32, 15 | column: i32, 16 | message: i32, 17 | } 18 | 19 | pub fn enable_problem_matcher() { 20 | if is_running_in_gha() && atty::is(atty::Stream::Stdout) { 21 | let matcher_str = include_str!("problem-matchers/sarif-plain-matcher.json"); 22 | 23 | println!("::add-matcher::sarif-plain-matcher.json") 24 | } 25 | } 26 | 27 | pub fn disable_problem_matcher() { 28 | if is_running_in_gha() && atty::is(atty::Stream::Stdout) { 29 | println!("::remove-matcher owner=sarif-plain::") 30 | } 31 | } 32 | 33 | fn is_running_in_gha() -> bool { 34 | match std::env::var("GITHUB_ACTIONS") 35 | .unwrap_or("false".to_string()) 36 | .as_str() 37 | { 38 | "true" => true, 39 | _ => false, 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /sarif-fmt/src/github/problem-matchers/sarif-plain-matcher.json: -------------------------------------------------------------------------------- 1 | { 2 | "problemMatcher": [ 3 | { 4 | "owner": "sarif-plain", 5 | "pattern": [ 6 | { 7 | "regexp": "^(.+)\\s\\[(.+):(.+):(.+)\\]\\s(.+)$", 8 | "severity": 1, 9 | "file": 2, 10 | "line": 3, 11 | "column": 4, 12 | "message": 5 13 | } 14 | ] 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /sarif-fmt/tests/clang-tidy-test.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use std::collections::HashMap; 3 | use std::fs; 4 | use std::iter::FromIterator; 5 | use std::path::PathBuf; 6 | 7 | #[test] 8 | // Test that the happy path linting works 9 | fn test_clang_tidy() -> Result<()> { 10 | let cargo_manifest_directory = 11 | fs::canonicalize(PathBuf::from(env!("CARGO_MANIFEST_DIR")))?; 12 | let cargo_workspace_directory = fs::canonicalize(PathBuf::from_iter( 13 | [cargo_manifest_directory.clone(), PathBuf::from("..")].iter(), 14 | ))?; 15 | 16 | duct_sh::sh( 17 | "cargo build --bin clang-tidy-sarif", 18 | ) 19 | .dir(cargo_workspace_directory.clone()) 20 | .run()?; 21 | 22 | duct_sh::sh("cargo build --bin sarif-fmt") 23 | .dir(cargo_workspace_directory.clone()) 24 | .run()?; 25 | 26 | let sarif_fmt_bin = fs::canonicalize(PathBuf::from_iter( 27 | [ 28 | cargo_workspace_directory.clone(), 29 | PathBuf::from("./target/debug/sarif-fmt"), 30 | ] 31 | .iter(), 32 | ))?; 33 | 34 | let clang_tidy_sarif_bin = fs::canonicalize(PathBuf::from_iter( 35 | [ 36 | cargo_workspace_directory.clone(), 37 | PathBuf::from("./target/debug/clang-tidy-sarif"), 38 | ] 39 | .iter(), 40 | ))?; 41 | 42 | let clang_tidy_output = fs::canonicalize(PathBuf::from_iter( 43 | [ 44 | cargo_workspace_directory.clone(), 45 | PathBuf::from("./sarif-fmt/tests/data/clang-tidy.out"), 46 | ] 47 | .iter(), 48 | ))?; 49 | 50 | let cmd = format!( 51 | "{} -i {} | {}", 52 | clang_tidy_sarif_bin.to_str().unwrap(), 53 | clang_tidy_output.to_str().unwrap(), 54 | sarif_fmt_bin.to_str().unwrap(), 55 | ); 56 | 57 | let mut env_map: HashMap<_, _> = std::env::vars().collect(); 58 | env_map.insert("NO_COLOR".into(), "1".into()); 59 | 60 | let output = duct_sh::sh_dangerous(cmd.as_str()) 61 | .dir(cargo_workspace_directory) 62 | .unchecked() 63 | .full_env(&env_map) 64 | .read()?; 65 | 66 | assert!(output.contains("warning: Array access (from variable 'str') results in a null pointer dereference")); 67 | assert!(output.contains("cpp.cpp:8:10")); 68 | assert!(output.contains("return str[0];")); 69 | // 1st note for the above error 70 | assert!(output.contains("cpp.cpp:12:25")); 71 | assert!(output.contains("return get_first_char(nullptr);")); 72 | assert!(output.contains("Passing null pointer value via 1st parameter 'str'")); 73 | // 2nd note, same line of code 74 | assert!(output.contains("cpp.cpp:12:10")); 75 | assert!(output.contains("Calling 'get_first_char'")); 76 | // 3rd note, same line of code as the original error 77 | assert!(output.contains("cpp.cpp:8:10")); 78 | assert!(output.contains("------- Array access (from variable 'str') results in a null pointer dereference")); 79 | 80 | Ok(()) 81 | } 82 | -------------------------------------------------------------------------------- /sarif-fmt/tests/clippy-test.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use std::fs; 3 | use std::iter::FromIterator; 4 | use std::path::PathBuf; 5 | 6 | #[test] 7 | // Test that the happy path linting works 8 | fn test_clippy() -> Result<()> { 9 | let cargo_manifest_directory = 10 | fs::canonicalize(PathBuf::from(env!("CARGO_MANIFEST_DIR")))?; 11 | let cargo_workspace_directory = fs::canonicalize(PathBuf::from_iter( 12 | [cargo_manifest_directory.clone(), PathBuf::from("..")].iter(), 13 | ))?; 14 | 15 | duct_sh::sh( 16 | "cargo build --bin clippy-sarif", 17 | ) 18 | .dir(cargo_workspace_directory.clone()) 19 | .run()?; 20 | 21 | duct_sh::sh("cargo build --bin sarif-fmt") 22 | .dir(cargo_workspace_directory.clone()) 23 | .run()?; 24 | 25 | let sarif_fmt_bin = fs::canonicalize(PathBuf::from_iter( 26 | [ 27 | cargo_workspace_directory.clone(), 28 | PathBuf::from("./target/debug/sarif-fmt"), 29 | ] 30 | .iter(), 31 | ))?; 32 | 33 | let clippy_sarif_bin = fs::canonicalize(PathBuf::from_iter( 34 | [ 35 | cargo_workspace_directory.clone(), 36 | PathBuf::from("./target/debug/clippy-sarif"), 37 | ] 38 | .iter(), 39 | ))?; 40 | 41 | let clippy_output = fs::canonicalize(PathBuf::from_iter( 42 | [ 43 | cargo_workspace_directory.clone(), 44 | PathBuf::from("./sarif-fmt/tests/data/clippy.out"), 45 | ] 46 | .iter(), 47 | ))?; 48 | 49 | let data_dir = fs::canonicalize(PathBuf::from_iter( 50 | [ 51 | cargo_workspace_directory.clone(), 52 | PathBuf::from("./sarif-fmt/tests/data"), 53 | ] 54 | .iter(), 55 | ))?; 56 | 57 | let cmd = format!( 58 | "{} -i {} | {}", 59 | clippy_sarif_bin.to_str().unwrap(), 60 | clippy_output.to_str().unwrap(), 61 | sarif_fmt_bin.to_str().unwrap(), 62 | ); 63 | 64 | let output = duct_sh::sh_dangerous(cmd.as_str()) 65 | .dir(data_dir) 66 | .unchecked() 67 | .env("NO_COLOR", "1") 68 | .read()?; 69 | 70 | assert!(output.contains( 71 | "error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false" 72 | )); 73 | assert!(output.contains("src/main.rs:3:6")); 74 | assert!(output.contains("if vec.len() <= 0 {}")); 75 | assert!(output 76 | .contains("#[deny(clippy::absurd_extreme_comparisons)]` on by default")); 77 | assert!(output 78 | .contains("for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons")); 79 | 80 | Ok(()) 81 | } 82 | -------------------------------------------------------------------------------- /sarif-fmt/tests/data/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "data" 7 | version = "0.1.0" 8 | -------------------------------------------------------------------------------- /sarif-fmt/tests/data/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "data" 3 | version = "0.1.0" 4 | edition = "2018" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | 10 | [workspace] 11 | -------------------------------------------------------------------------------- /sarif-fmt/tests/data/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian 2 | RUN export node_version="0.10" \ 3 | && apt-get update && apt-get -y install nodejs="$node_verion" 4 | COPY package.json usr/src/app 5 | RUN cd /usr/src/app \ 6 | && npm install node-static 7 | 8 | EXPOSE 80000 9 | CMD ["npm", "start"] -------------------------------------------------------------------------------- /sarif-fmt/tests/data/clang-tidy.out: -------------------------------------------------------------------------------- 1 | Error while trying to load a compilation database: 2 | Could not auto-detect compilation database for file "./sarif-fmt/tests/data/cpp.cpp" 3 | No compilation database found in /Users/paul.sastrasinh/oss-repos/sarif-rs/sarif-fmt/tests/data or any parent directory 4 | fixed-compilation-database: Error while opening fixed database: No such file or directory 5 | json-compilation-database: Error while opening JSON database: No such file or directory 6 | Running without flags. 7 | 1 warning generated. 8 | sarif-fmt/tests/data/cpp.cpp:8:10: warning: Array access (from variable 'str') results in a null pointer dereference [clang-analyzer-core.NullDereference] 9 | return str[0]; 10 | ^ 11 | sarif-fmt/tests/data/cpp.cpp:12:25: note: Passing null pointer value via 1st parameter 'str' 12 | return get_first_char(nullptr); 13 | ^~~~~~~ 14 | sarif-fmt/tests/data/cpp.cpp:12:10: note: Calling 'get_first_char' 15 | return get_first_char(nullptr); 16 | ^~~~~~~~~~~~~~~~~~~~~~~ 17 | sarif-fmt/tests/data/cpp.cpp:8:10: note: Array access (from variable 'str') results in a null pointer dereference 18 | return str[0]; 19 | -------------------------------------------------------------------------------- /sarif-fmt/tests/data/clippy.out: -------------------------------------------------------------------------------- 1 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"warning: this `if` branch is empty\n --> src/main.rs:3:3\n |\n3 | if vec.len() <= 0 {}\n | ^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `vec.len() <= 0;`\n |\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_if\n = note: `#[warn(clippy::needless_if)]` on by default\n\n","$message_type":"diagnostic","children":[{"children":[],"code":null,"level":"help","message":"for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_if","rendered":null,"spans":[]},{"children":[],"code":null,"level":"note","message":"`#[warn(clippy::needless_if)]` on by default","rendered":null,"spans":[]},{"children":[],"code":null,"level":"help","message":"you can remove it","rendered":null,"spans":[{"byte_end":70,"byte_start":50,"column_end":23,"column_start":3,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":3,"line_start":3,"suggested_replacement":"vec.len() <= 0;","suggestion_applicability":"MachineApplicable","text":[{"highlight_end":23,"highlight_start":3,"text":" if vec.len() <= 0 {}"}]}]}],"code":{"code":"clippy::needless_if","explanation":null},"level":"warning","message":"this `if` branch is empty","spans":[{"byte_end":70,"byte_start":50,"column_end":23,"column_start":3,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":3,"line_start":3,"suggested_replacement":null,"suggestion_applicability":null,"text":[{"highlight_end":23,"highlight_start":3,"text":" if vec.len() <= 0 {}"}]}]}} 2 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false\n --> src/main.rs:3:6\n |\n3 | if vec.len() <= 0 {}\n | ^^^^^^^^^^^^^^\n |\n = help: because `0` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `vec.len() == 0` instead\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons\n = note: `#[deny(clippy::absurd_extreme_comparisons)]` on by default\n\n","$message_type":"diagnostic","children":[{"children":[],"code":null,"level":"help","message":"because `0` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `vec.len() == 0` instead","rendered":null,"spans":[]},{"children":[],"code":null,"level":"help","message":"for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons","rendered":null,"spans":[]},{"children":[],"code":null,"level":"note","message":"`#[deny(clippy::absurd_extreme_comparisons)]` on by default","rendered":null,"spans":[]}],"code":{"code":"clippy::absurd_extreme_comparisons","explanation":null},"level":"error","message":"this comparison involving the minimum or maximum element for this type contains a case that is always true or always false","spans":[{"byte_end":67,"byte_start":53,"column_end":20,"column_start":6,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":3,"line_start":3,"suggested_replacement":null,"suggestion_applicability":null,"text":[{"highlight_end":20,"highlight_start":6,"text":" if vec.len() <= 0 {}"}]}]}} 3 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false\n --> src/main.rs:4:6\n |\n4 | if 100 > i32::MAX {}\n | ^^^^^^^^^^^^^^\n |\n = help: because `i32::MAX` is the maximum value for this type, this comparison is always false\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons\n\n","$message_type":"diagnostic","children":[{"children":[],"code":null,"level":"help","message":"because `i32::MAX` is the maximum value for this type, this comparison is always false","rendered":null,"spans":[]},{"children":[],"code":null,"level":"help","message":"for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons","rendered":null,"spans":[]}],"code":{"code":"clippy::absurd_extreme_comparisons","explanation":null},"level":"error","message":"this comparison involving the minimum or maximum element for this type contains a case that is always true or always false","spans":[{"byte_end":90,"byte_start":76,"column_end":20,"column_start":6,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":4,"line_start":4,"suggested_replacement":null,"suggestion_applicability":null,"text":[{"highlight_end":20,"highlight_start":6,"text":" if 100 > i32::MAX {}"}]}]}} 4 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"error: aborting due to 2 previous errors; 1 warning emitted\n\n","$message_type":"diagnostic","children":[],"code":null,"level":"error","message":"aborting due to 2 previous errors; 1 warning emitted","spans":[]}} 5 | error: could not compile `data` (bin "data") due to 3 previous errors; 1 warning emitted 6 | warning: build failed, waiting for other jobs to finish... 7 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"warning: this `if` branch is empty\n --> src/main.rs:3:3\n |\n3 | if vec.len() <= 0 {}\n | ^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `vec.len() <= 0;`\n |\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_if\n = note: `#[warn(clippy::needless_if)]` on by default\n\n","$message_type":"diagnostic","children":[{"children":[],"code":null,"level":"help","message":"for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#needless_if","rendered":null,"spans":[]},{"children":[],"code":null,"level":"note","message":"`#[warn(clippy::needless_if)]` on by default","rendered":null,"spans":[]},{"children":[],"code":null,"level":"help","message":"you can remove it","rendered":null,"spans":[{"byte_end":70,"byte_start":50,"column_end":23,"column_start":3,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":3,"line_start":3,"suggested_replacement":"vec.len() <= 0;","suggestion_applicability":"MachineApplicable","text":[{"highlight_end":23,"highlight_start":3,"text":" if vec.len() <= 0 {}"}]}]}],"code":{"code":"clippy::needless_if","explanation":null},"level":"warning","message":"this `if` branch is empty","spans":[{"byte_end":70,"byte_start":50,"column_end":23,"column_start":3,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":3,"line_start":3,"suggested_replacement":null,"suggestion_applicability":null,"text":[{"highlight_end":23,"highlight_start":3,"text":" if vec.len() <= 0 {}"}]}]}} 8 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false\n --> src/main.rs:3:6\n |\n3 | if vec.len() <= 0 {}\n | ^^^^^^^^^^^^^^\n |\n = help: because `0` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `vec.len() == 0` instead\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons\n = note: `#[deny(clippy::absurd_extreme_comparisons)]` on by default\n\n","$message_type":"diagnostic","children":[{"children":[],"code":null,"level":"help","message":"because `0` is the minimum value for this type, the case where the two sides are not equal never occurs, consider using `vec.len() == 0` instead","rendered":null,"spans":[]},{"children":[],"code":null,"level":"help","message":"for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons","rendered":null,"spans":[]},{"children":[],"code":null,"level":"note","message":"`#[deny(clippy::absurd_extreme_comparisons)]` on by default","rendered":null,"spans":[]}],"code":{"code":"clippy::absurd_extreme_comparisons","explanation":null},"level":"error","message":"this comparison involving the minimum or maximum element for this type contains a case that is always true or always false","spans":[{"byte_end":67,"byte_start":53,"column_end":20,"column_start":6,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":3,"line_start":3,"suggested_replacement":null,"suggestion_applicability":null,"text":[{"highlight_end":20,"highlight_start":6,"text":" if vec.len() <= 0 {}"}]}]}} 9 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"error: this comparison involving the minimum or maximum element for this type contains a case that is always true or always false\n --> src/main.rs:4:6\n |\n4 | if 100 > i32::MAX {}\n | ^^^^^^^^^^^^^^\n |\n = help: because `i32::MAX` is the maximum value for this type, this comparison is always false\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons\n\n","$message_type":"diagnostic","children":[{"children":[],"code":null,"level":"help","message":"because `i32::MAX` is the maximum value for this type, this comparison is always false","rendered":null,"spans":[]},{"children":[],"code":null,"level":"help","message":"for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#absurd_extreme_comparisons","rendered":null,"spans":[]}],"code":{"code":"clippy::absurd_extreme_comparisons","explanation":null},"level":"error","message":"this comparison involving the minimum or maximum element for this type contains a case that is always true or always false","spans":[{"byte_end":90,"byte_start":76,"column_end":20,"column_start":6,"expansion":null,"file_name":"src/main.rs","is_primary":true,"label":null,"line_end":4,"line_start":4,"suggested_replacement":null,"suggestion_applicability":null,"text":[{"highlight_end":20,"highlight_start":6,"text":" if 100 > i32::MAX {}"}]}]}} 10 | {"reason":"compiler-message","package_id":"path+file://sarif-rs/sarif-fmt/tests/data#0.1.0","manifest_path":"sarif-rs/sarif-fmt/tests/data/Cargo.toml","target":{"kind":["bin"],"crate_types":["bin"],"name":"data","src_path":"sarif-rs/sarif-fmt/tests/data/src/main.rs","edition":"2018","doc":true,"doctest":false,"test":true},"message":{"rendered":"error: aborting due to 2 previous errors; 1 warning emitted\n\n","$message_type":"diagnostic","children":[],"code":null,"level":"error","message":"aborting due to 2 previous errors; 1 warning emitted","spans":[]}} 11 | error: could not compile `data` (bin "data" test) due to 3 previous errors; 1 warning emitted 12 | {"reason":"build-finished","success":false} -------------------------------------------------------------------------------- /sarif-fmt/tests/data/cpp.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | int string_to_int(const char *num) { 4 | return atoi(num); 5 | } 6 | 7 | static int get_first_char(const char* str) { 8 | return str[0]; 9 | } 10 | 11 | int test_note() { 12 | return get_first_char(nullptr); 13 | } 14 | 15 | void ls() { 16 | system("ls"); 17 | } 18 | -------------------------------------------------------------------------------- /sarif-fmt/tests/data/hadolint.out: -------------------------------------------------------------------------------- 1 | [{"code":"DL3006","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"warning","line":1,"message":"Always tag the version of an image explicitly"},{"code":"DL3015","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"info","line":2,"message":"Avoid additional packages by specifying `--no-install-recommends`"},{"code":"DL3009","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"info","line":2,"message":"Delete the apt-get lists after installing something"},{"code":"SC2154","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"warning","line":2,"message":"node_verion is referenced but not assigned (did you mean 'node_version'?)."},{"code":"DL3045","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"warning","line":4,"message":"`COPY` to a relative destination without `WORKDIR` set."},{"code":"DL3003","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"warning","line":5,"message":"Use WORKDIR to switch to a directory"},{"code":"DL3016","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"warning","line":5,"message":"Pin versions in npm. Instead of `npm install ` use `npm install @`"},{"code":"DL3011","column":1,"file":"./sarif-fmt/tests/data/Dockerfile","level":"error","line":8,"message":"Valid UNIX ports range from 0 to 65535"}] -------------------------------------------------------------------------------- /sarif-fmt/tests/data/shell.sh: -------------------------------------------------------------------------------- 1 | echo $1 # Unquoted variables 2 | find . -name *.ogg # Unquoted find/grep patterns 3 | rm "~/my file.txt" # Quoted tilde expansion 4 | v='--verbose="true"'; cmd $v # Literal quotes in variables 5 | for f in "*.ogg" # Incorrectly quoted 'for' loops 6 | touch $@ # Unquoted $@ 7 | 8 | -------------------------------------------------------------------------------- /sarif-fmt/tests/data/shellcheck.out: -------------------------------------------------------------------------------- 1 | [{"file":"./sarif-fmt/tests/data/shell.sh","line":5,"endLine":5,"column":1,"endColumn":1,"level":"error","code":1073,"message":"Couldn't parse this for loop. Fix to allow more checks.","fix":null},{"file":"./sarif-fmt/tests/data/shell.sh","line":6,"endLine":6,"column":1,"endColumn":1,"level":"error","code":1058,"message":"Expected 'do'.","fix":null},{"file":"./sarif-fmt/tests/data/shell.sh","line":6,"endLine":6,"column":1,"endColumn":1,"level":"error","code":1072,"message":"Expected 'do'. Fix any mentioned problems and try again.","fix":null}] -------------------------------------------------------------------------------- /sarif-fmt/tests/data/src/main.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | let vec: Vec = Vec::new(); 3 | if vec.len() <= 0 {} 4 | if 100 > i32::MAX {} 5 | } 6 | -------------------------------------------------------------------------------- /sarif-fmt/tests/hadolint-test.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use std::collections::HashMap; 3 | use std::fs; 4 | use std::iter::FromIterator; 5 | use std::path::PathBuf; 6 | 7 | #[test] 8 | // Test that the happy path linting works 9 | fn test_hadolint() -> Result<()> { 10 | let cargo_manifest_directory = 11 | fs::canonicalize(PathBuf::from(env!("CARGO_MANIFEST_DIR")))?; 12 | let cargo_workspace_directory = fs::canonicalize(PathBuf::from_iter( 13 | [cargo_manifest_directory.clone(), PathBuf::from("..")].iter(), 14 | ))?; 15 | 16 | duct_sh::sh( 17 | "cargo build --bin hadolint-sarif", 18 | ) 19 | .dir(cargo_workspace_directory.clone()) 20 | .run()?; 21 | 22 | duct_sh::sh("cargo build --bin sarif-fmt") 23 | .dir(cargo_workspace_directory.clone()) 24 | .run()?; 25 | 26 | let sarif_fmt_bin = fs::canonicalize(PathBuf::from_iter( 27 | [ 28 | cargo_workspace_directory.clone(), 29 | PathBuf::from("./target/debug/sarif-fmt"), 30 | ] 31 | .iter(), 32 | ))?; 33 | 34 | let hadolint_sarif_bin = fs::canonicalize(PathBuf::from_iter( 35 | [ 36 | cargo_workspace_directory.clone(), 37 | PathBuf::from("./target/debug/hadolint-sarif"), 38 | ] 39 | .iter(), 40 | ))?; 41 | 42 | let hadolint_output = fs::canonicalize(PathBuf::from_iter( 43 | [ 44 | cargo_workspace_directory.clone(), 45 | PathBuf::from("./sarif-fmt/tests/data/hadolint.out"), 46 | ] 47 | .iter(), 48 | ))?; 49 | 50 | let cmd = format!( 51 | "{} -i {} | {}", 52 | hadolint_sarif_bin.to_str().unwrap(), 53 | hadolint_output.to_str().unwrap(), 54 | sarif_fmt_bin.to_str().unwrap(), 55 | ); 56 | 57 | let mut env_map: HashMap<_, _> = std::env::vars().collect(); 58 | env_map.insert("NO_COLOR".into(), "1".into()); 59 | let output = duct_sh::sh_dangerous(cmd.as_str()) 60 | .dir(cargo_workspace_directory) 61 | .unchecked() 62 | .full_env(&env_map) 63 | .read()?; 64 | print!("{}", output); 65 | assert!( 66 | output.contains("warning: Always tag the version of an image explicitly") 67 | ); 68 | assert!(output.contains("Dockerfile:1:1")); 69 | assert!(output.contains("FROM debian")); 70 | assert!(output.contains("DL3006")); 71 | assert!(output.contains( 72 | "For more information: https://github.com/hadolint/hadolint/wiki/DL3006" 73 | )); 74 | 75 | Ok(()) 76 | } 77 | -------------------------------------------------------------------------------- /sarif-fmt/tests/shellcheck-test.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use std::collections::HashMap; 3 | use std::fs; 4 | use std::iter::FromIterator; 5 | use std::path::PathBuf; 6 | 7 | #[test] 8 | // Test that the happy path linting works 9 | fn test_shellcheck() -> Result<()> { 10 | let cargo_manifest_directory = 11 | fs::canonicalize(PathBuf::from(env!("CARGO_MANIFEST_DIR")))?; 12 | let cargo_workspace_directory = fs::canonicalize(PathBuf::from_iter( 13 | [cargo_manifest_directory.clone(), PathBuf::from("..")].iter(), 14 | ))?; 15 | 16 | duct_sh::sh( 17 | "cargo build --bin shellcheck-sarif", 18 | ) 19 | .dir(cargo_workspace_directory.clone()) 20 | .run()?; 21 | 22 | duct_sh::sh("cargo build --bin sarif-fmt") 23 | .dir(cargo_workspace_directory.clone()) 24 | .run()?; 25 | 26 | let sarif_fmt_bin = fs::canonicalize(PathBuf::from_iter( 27 | [ 28 | cargo_workspace_directory.clone(), 29 | PathBuf::from("./target/debug/sarif-fmt"), 30 | ] 31 | .iter(), 32 | ))?; 33 | 34 | let shellcheck_sarif_bin = fs::canonicalize(PathBuf::from_iter( 35 | [ 36 | cargo_workspace_directory.clone(), 37 | PathBuf::from("./target/debug/shellcheck-sarif"), 38 | ] 39 | .iter(), 40 | ))?; 41 | 42 | let shellcheck_output = fs::canonicalize(PathBuf::from_iter( 43 | [ 44 | cargo_workspace_directory.clone(), 45 | PathBuf::from("./sarif-fmt/tests/data/shellcheck.out"), 46 | ] 47 | .iter(), 48 | ))?; 49 | 50 | let cmd = format!( 51 | "{} -i {} | {}", 52 | shellcheck_sarif_bin.to_str().unwrap(), 53 | shellcheck_output.to_str().unwrap(), 54 | sarif_fmt_bin.to_str().unwrap(), 55 | ); 56 | 57 | let mut env_map: HashMap<_, _> = std::env::vars().collect(); 58 | env_map.insert("NO_COLOR".into(), "1".into()); 59 | let output = duct_sh::sh_dangerous(cmd.as_str()) 60 | .dir(cargo_workspace_directory) 61 | .unchecked() 62 | .full_env(&env_map) 63 | .read()?; 64 | 65 | assert!(output.contains( 66 | "error: Couldn't parse this for loop. Fix to allow more checks." 67 | )); 68 | assert!(output.contains("shell.sh:5:1")); 69 | assert!(output.contains("for f in \"*.ogg\"")); 70 | assert!(output.contains("SC1073")); 71 | assert!(output 72 | .contains("For more information: https://www.shellcheck.net/wiki/SC1073")); 73 | 74 | Ok(()) 75 | } 76 | -------------------------------------------------------------------------------- /sarif-fmt/tests/version-numbers.rs: -------------------------------------------------------------------------------- 1 | #[test] 2 | fn test_readme_deps() { 3 | version_sync::assert_markdown_deps_updated!("README.md"); 4 | } 5 | 6 | #[test] 7 | fn test_html_root_url() { 8 | version_sync::assert_html_root_url_updated!("src/bin.rs"); 9 | } 10 | -------------------------------------------------------------------------------- /serde-sarif/.gitignore: -------------------------------------------------------------------------------- 1 | # Added by cargo 2 | # 3 | # already existing elements were commented out 4 | 5 | /target 6 | Cargo.lock 7 | -------------------------------------------------------------------------------- /serde-sarif/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "serde-sarif" 3 | version = "0.8.0" 4 | authors = ["Paul Sastrasinh "] 5 | edition = "2018" 6 | description = "Serde serialization for SARIF files" 7 | license = "MIT" 8 | readme = "README.md" 9 | keywords = ["sarif", "serde", "serialization"] 10 | categories = ["encoding"] 11 | build = "build.rs" 12 | homepage = "https://psastras.github.io/sarif-rs/" 13 | documentation = "https://docs.rs/serde_sarif" 14 | repository = "https://github.com/psastras/sarif-rs" 15 | 16 | [badges] 17 | github = { repository = "psastras/sarif-rs" } 18 | 19 | [lints.rust] 20 | unexpected_cfgs = { level = "warn", check-cfg = ['cfg(doc_cfg)'] } 21 | 22 | [features] 23 | default = [] 24 | clippy-converters = ["cargo_metadata", "regex", "anyhow"] 25 | miri-converters = ["cargo_metadata", "regex", "anyhow"] 26 | hadolint-converters = ["anyhow"] 27 | shellcheck-converters = ["anyhow"] 28 | clang-tidy-converters = ["regex", "anyhow", "once_cell"] 29 | opt-builder = [] 30 | 31 | [dependencies] 32 | anyhow = { version = "1.0.98", optional = true } 33 | cargo_metadata = { version = "0.19.2", optional = true } 34 | regex = { version = "1.11.1", optional = true } 35 | serde = "1.0.219" 36 | serde_json = "1.0.140" 37 | strum = "0.27" 38 | strum_macros = "0.27" 39 | thiserror = "2.0.12" 40 | typed-builder = "0.21.0" 41 | once_cell = { version = "1.21.3", optional = true } 42 | 43 | [dev-dependencies] 44 | version-sync = "0.9" 45 | 46 | [build-dependencies] 47 | anyhow = "1.0.98" 48 | prettyplease = "0.2.33" 49 | proc-macro2 = "1.0.95" 50 | quote = "1.0.40" 51 | schemafy_lib = "0.6.0" 52 | serde_json = "1.0.140" 53 | syn = "2.0.102" 54 | -------------------------------------------------------------------------------- /serde-sarif/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /serde-sarif/README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | 3 | # serde-sarif 4 | 5 | This crate provides a type safe [serde](https://serde.rs/) compatible 6 | [SARIF](https://sarifweb.azurewebsites.net/) structure. It is intended for use 7 | in Rust code which may need to read or write SARIF files. 8 | 9 | The latest [documentation can be found here](https://docs.rs/serde_sarif). 10 | 11 | serde is a popular serialization framework for Rust. More information can be 12 | found on the official repository: 13 | [https://github.com/serde-rs/serde](https://github.com/serde-rs/serde) 14 | 15 | SARIF or the Static Analysis Results Interchange Format is an industry standard 16 | format for the output of static analysis tools. More information can be found on 17 | the official website: 18 | [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 19 | 20 | ## Usage 21 | 22 | For most cases, simply use the root [sarif::Sarif] struct with [serde] to read 23 | and write to and from the struct. 24 | 25 | ## Example 26 | 27 | ```rust 28 | use serde_sarif::sarif::Sarif; 29 | 30 | let sarif: Sarif = serde_json::from_str( 31 | r#"{ "version": "2.1.0", "runs": [] }"# 32 | ).unwrap(); 33 | 34 | assert_eq!( 35 | sarif.version.to_string(), 36 | "\"2.1.0\"".to_string() 37 | ); 38 | ``` 39 | 40 | Because many of the [sarif::Sarif] structures contain a lot of optional fields, 41 | it is often convenient to use the builder pattern to contstruct these structs. 42 | Each structure has a builder method to accomplish this. 43 | 44 | ## Example 45 | 46 | ```rust 47 | use serde_sarif::sarif::Message; 48 | 49 | let message = Message::builder() 50 | .id("id") 51 | .build(); 52 | ``` 53 | 54 | This uses [`TypedBuilder`](https://docs.rs/typed-builder/latest/typed_builder/derive.TypedBuilder.html) 55 | for compile time type checking. 56 | 57 | ## Internal Implementation Details 58 | 59 | The root [sarif::Sarif] struct is automatically generated from the latest Sarif 60 | JSON schema, this is done at build time (via the buildscript). 61 | 62 | ## Crate Features 63 | 64 | This crate contains different features which may be enabled depending on your 65 | use case. 66 | 67 | ### Example 68 | 69 | ```toml 70 | [dependencies] 71 | serde-sarif = { version = "*", features = ["clippy-converters"] } 72 | ``` 73 | 74 | ### Converters 75 | 76 | - **clang-tidy-converters** Provides conversions between clang tidy and SARIF 77 | types 78 | - **clippy-converters** Provides conversions between Clippy and SARIF types 79 | - **hadolint-converters** Provides conversions between hadolint and SARIF types 80 | - **miri-converters** Provides conversions between miri and SARIF types 81 | - **shellcheck-converters** Provides conversions between shellcheck and SARIF 82 | types 83 | 84 | ### Other 85 | 86 | - **opt-builder** Enables 87 | [`TypedBuilder`](https://docs.rs/typed-builder/latest/typed_builder/derive.TypedBuilder.html#customization-with-attributes)s 88 | fallback setters for easier conditional building 89 | 90 | License: MIT 91 | -------------------------------------------------------------------------------- /serde-sarif/build.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::fs::File; 3 | use std::io::Write; 4 | use std::path::Path; 5 | use std::path::PathBuf; 6 | 7 | use anyhow::Result; 8 | use schemafy_lib::Expander; 9 | use schemafy_lib::Schema; 10 | use syn::parse::Parser; 11 | 12 | // Add additional items to the generated sarif.rs file 13 | // Currently adds: derive(TypedBuilder) to each struct 14 | // and appropriate use statements at the top of the file 15 | // todo: this (and other parts) need a refactor and tests 16 | fn process_token_stream(input: proc_macro2::TokenStream) -> syn::File { 17 | let mut ast: syn::File = syn::parse2(input).unwrap(); 18 | 19 | // add use directives to top of the file 20 | ast.items.insert( 21 | 0, 22 | syn::parse_quote! { 23 | use serde::{Serialize, Deserialize}; 24 | }, 25 | ); 26 | ast.items.insert( 27 | 0, 28 | syn::parse_quote! { 29 | use typed_builder::TypedBuilder; 30 | }, 31 | ); 32 | ast.items.insert( 33 | 0, 34 | syn::parse_quote! { 35 | use std::collections::BTreeMap; 36 | }, 37 | ); 38 | 39 | // Checks if the type is an Option type (returns true if yes, false otherwise) 40 | fn path_is_option(path: &syn::Path) -> bool { 41 | let idents_of_path = 42 | path.segments.iter().fold(String::new(), |mut acc, v| { 43 | acc.push_str(&v.ident.to_string()); 44 | acc.push('|'); 45 | acc 46 | }); 47 | 48 | vec!["Option|", "std|option|Option|", "core|option|Option|"] 49 | .into_iter() 50 | .any(|s| idents_of_path == *s) 51 | } 52 | 53 | // Checks if the type is a collection type (returns true if yes, false otherwise) 54 | fn path_is_vec(path: &syn::Path) -> bool { 55 | let idents_of_path = 56 | path.segments.iter().fold(String::new(), |mut acc, v| { 57 | acc.push_str(&v.ident.to_string()); 58 | acc.push('|'); 59 | acc 60 | }); 61 | 62 | vec!["Vec|", "std|vec|Vec|"] 63 | .into_iter() 64 | .any(|s| idents_of_path.starts_with(s)) 65 | } 66 | 67 | ast.items.iter_mut().for_each(|ref mut item| { 68 | if let syn::Item::Struct(s) = item { 69 | // add builder attributes to each struct 70 | s.attrs.extend(vec![ 71 | syn::parse_quote! { 72 | #[derive(TypedBuilder)] 73 | }, 74 | syn::parse_quote! { 75 | #[builder(field_defaults(setter(into)))] 76 | }, 77 | ]); 78 | 79 | // Add a field to PropertyBag to allow arbitrary JSON data 80 | // The proper way to do this would be to modify the JSON schema parsing library to properly 81 | // output this. This is a workaround since there's only one struct in the SARIF schema that requires this. 82 | if s.ident == "PropertyBag" { 83 | if let syn::Fields::Named(fields) = &mut s.fields { 84 | fields.named.push( 85 | syn::Field::parse_named 86 | .parse2(syn::parse_quote! { 87 | #[doc = r"Arbitrary properties to include in the PropertyBag"] 88 | #[serde(flatten)] 89 | #[builder(default = ::std::collections::BTreeMap::new())] 90 | pub additional_properties: BTreeMap 91 | }) 92 | .unwrap(), 93 | ); 94 | } 95 | } 96 | 97 | // Rewrite Result::kind and Result::level to use ResultKind and 98 | // ResultLevel instead of serde_json::Value. 99 | // This is a workaround for schemafy's inability to produce appropriate 100 | // exhaustive enums here. 101 | if s.ident == "Result" { 102 | if let syn::Fields::Named(fields) = &mut s.fields { 103 | for field in fields.named.iter_mut() { 104 | if field.ident.as_ref().unwrap() == "kind" { 105 | field.ty = syn::parse_quote! { Option }; 106 | } else if field.ident.as_ref().unwrap() == "level" { 107 | field.ty = syn::parse_quote! { Option }; 108 | } 109 | } 110 | } 111 | } 112 | 113 | // for each struct field, if that field is Optional, set None 114 | // as the default value when using the builder 115 | (&mut s.fields).into_iter().for_each(|ref mut field| { 116 | if let syn::Type::Path(typepath) = &field.ty { 117 | if path_is_option(&typepath.path) { 118 | #[cfg(not(feature = "opt-builder"))] 119 | field.attrs.push(syn::parse_quote! { 120 | #[builder(setter(strip_option), default)] 121 | }); 122 | 123 | #[cfg(feature = "opt-builder")] 124 | field.attrs.push(syn::parse_quote! { 125 | #[builder(setter(strip_option(fallback_prefix = "opt_")), default)] 126 | }); 127 | } else if path_is_vec(&typepath.path) { 128 | field.attrs.push(syn::parse_quote! { 129 | #[builder(default)] 130 | }) 131 | } 132 | } 133 | }); 134 | } 135 | }); 136 | 137 | ast 138 | } 139 | 140 | fn main() -> Result<()> { 141 | // Rerun if the schema changes 142 | println!("cargo:rerun-if-changed=src/schema.json"); 143 | let path = Path::new("src/schema.json"); 144 | 145 | // Generate the Rust schema struct 146 | let json = std::fs::read_to_string(path).unwrap(); 147 | let schema: Schema = serde_json::from_str(&json)?; 148 | let path_str = path.to_str().unwrap(); 149 | let mut expander = Expander::new(Some("Sarif"), path_str, &schema); 150 | let generated = process_token_stream(expander.expand(&schema)); 151 | 152 | // Write the struct to the $OUT_DIR/sarif.rs file. 153 | let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); 154 | let mut file = File::create(out_path.join("sarif.rs"))?; 155 | file.write_all(prettyplease::unparse(&generated).as_bytes())?; 156 | 157 | Ok(()) 158 | } 159 | -------------------------------------------------------------------------------- /serde-sarif/cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | """ 13 | # template for the changelog body 14 | # https://keats.github.io/tera/docs/#introduction 15 | body = """ 16 | {% if version %}\ 17 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 18 | {% else %}\ 19 | ## [unreleased] 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | striptags | trim | upper_first }} 23 | {% for commit in commits %} 24 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 25 | {% if commit.breaking %}[**breaking**] {% endif %}\ 26 | {{ commit.message | upper_first }}\ 27 | {% endfor %} 28 | {% endfor %}\n 29 | """ 30 | # template for the changelog footer 31 | footer = """""" 32 | # remove the leading and trailing s 33 | trim = true 34 | # postprocessors 35 | postprocessors = [ 36 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 37 | ] 38 | 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | # Replace issue numbers 49 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 50 | # Check spelling of the commit with https://github.com/crate-ci/typos 51 | # If the spelling is incorrect, it will be automatically fixed. 52 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 53 | ] 54 | # regex for parsing and grouping commits 55 | commit_parsers = [ 56 | { message = "^feat", group = "🚀 Features" }, 57 | { message = "^fix", group = "🐛 Bug Fixes" }, 58 | { message = "^doc", group = "📚 Documentation" }, 59 | { message = "^perf", group = "⚡ Performance" }, 60 | { message = "^refactor", group = "🚜 Refactor" }, 61 | { message = "^style", group = "🎨 Styling" }, 62 | { message = "^test", group = "🧪 Testing" }, 63 | { message = "^chore\\(release\\): prepare for", skip = true }, 64 | { message = "^chore\\(deps.*\\)", skip = true }, 65 | { message = "^chore\\(pr\\)", skip = true }, 66 | { message = "^chore\\(pull\\)", skip = true }, 67 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 68 | { body = ".*security", group = "🛡️ Security" }, 69 | { message = "^revert", group = "◀️ Revert" }, 70 | ] 71 | # protect breaking changes from being skipped due to matching a skipping commit_parser 72 | protect_breaking_commits = false 73 | # filter out the commits that are not matched by commit parsers 74 | filter_commits = true 75 | # regex for matching git tags 76 | tag_pattern = "serde-sarif-v[0-9].*" 77 | # regex for skipping tags 78 | # skip_tags = "" 79 | # regex for ignoring tags 80 | # ignore_tags = "" 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | # limit the number of commits included in the changelog. 86 | # limit_commits = 42 87 | -------------------------------------------------------------------------------- /serde-sarif/src/converters/cargo.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | convert::From, 4 | io::{BufWriter, Write}, 5 | }; 6 | 7 | use crate::sarif::{self, Location}; 8 | use anyhow::Result; 9 | use cargo_metadata::{ 10 | self, 11 | diagnostic::{Diagnostic, DiagnosticLevel, DiagnosticSpan}, 12 | }; 13 | 14 | // TODO: refactor, add features, etc. 15 | 16 | impl From<&Diagnostic> for sarif::Message { 17 | fn from(diagnostic: &Diagnostic) -> Self { 18 | sarif::Message::builder().text(&diagnostic.message).build() 19 | } 20 | } 21 | 22 | impl From<&DiagnosticSpan> for sarif::ArtifactLocation { 23 | fn from(span: &DiagnosticSpan) -> Self { 24 | sarif::ArtifactLocation::builder() 25 | .uri(&span.file_name) 26 | .build() 27 | } 28 | } 29 | 30 | impl From<&DiagnosticSpan> for sarif::Region { 31 | fn from(span: &DiagnosticSpan) -> Self { 32 | sarif::Region::builder() 33 | .byte_offset(span.byte_start) 34 | .byte_length(span.byte_end - span.byte_start) 35 | .start_line(span.line_start as i64) 36 | .start_column(span.column_start as i64) 37 | .end_line(span.line_end as i64) 38 | .end_column(span.column_end as i64) 39 | .build() 40 | } 41 | } 42 | 43 | impl From<&DiagnosticLevel> for sarif::ResultLevel { 44 | fn from(level: &DiagnosticLevel) -> Self { 45 | match level { 46 | DiagnosticLevel::Help | DiagnosticLevel::Note => sarif::ResultLevel::Note, 47 | DiagnosticLevel::Warning => sarif::ResultLevel::Warning, 48 | DiagnosticLevel::Error => sarif::ResultLevel::Error, 49 | _ => sarif::ResultLevel::None, 50 | } 51 | } 52 | } 53 | 54 | impl From<&DiagnosticSpan> for sarif::Location { 55 | fn from(span: &DiagnosticSpan) -> Self { 56 | let artifact_location = sarif::ArtifactLocation::from(span); 57 | let region = sarif::Region::from(span); 58 | let location = sarif::Location::builder().physical_location( 59 | sarif::PhysicalLocation::builder() 60 | .artifact_location(artifact_location) 61 | .region(region) 62 | .build(), 63 | ); 64 | 65 | if let Some(label) = span.label.as_ref() { 66 | location 67 | .message(sarif::Message::builder().text(label).build()) 68 | .build() 69 | } else { 70 | location.build() 71 | } 72 | } 73 | } 74 | 75 | // recursively visits all diagnostic children which are non-local (have no span) 76 | // to build up diagnostic text. 77 | fn build_global_message( 78 | diagnostic: &Diagnostic, 79 | writer: &mut BufWriter, 80 | ) -> Result<()> { 81 | // if span exists, this message is local to a span, so skip it 82 | if diagnostic.spans.is_empty() { 83 | writeln!(writer, "{}", diagnostic.message)?; 84 | } 85 | 86 | diagnostic 87 | .children 88 | .iter() 89 | .try_for_each(|diagnostic| build_global_message(diagnostic, writer)) 90 | } 91 | 92 | /// Collects all the locations in the diagnostic's children spans 93 | fn get_related_locations( 94 | diagnostic: &Diagnostic, 95 | ) -> Result, anyhow::Error> { 96 | let mut related_locations = vec![]; 97 | for child in &diagnostic.children { 98 | let mut message = child.message.clone(); 99 | for child_span in &child.spans { 100 | let mut child_loc: Location = child_span.into(); 101 | if child_span.suggested_replacement.is_some() { 102 | let replacement = child_span.suggested_replacement.as_ref().unwrap(); 103 | message.push_str(&format!(" \"{replacement}\"")); 104 | } 105 | 106 | child_loc.message = Some(sarif::Message::from(&message)); 107 | related_locations.push(child_loc); 108 | } 109 | } 110 | Ok(related_locations) 111 | } 112 | 113 | pub(crate) fn process>( 114 | mut diagnostic_iter: I, 115 | tool_name: &str, 116 | tool_info_uri: &str, 117 | ) -> Result { 118 | let mut results = vec![]; 119 | let mut map = HashMap::new(); 120 | let mut rules = vec![]; 121 | 122 | let re = 123 | regex::Regex::new(r"^for further information visit (?P\S+)").unwrap(); 124 | 125 | diagnostic_iter.try_for_each(|diagnostic| -> Result<()> { 126 | diagnostic.spans.iter().try_for_each(|span| -> Result<()> { 127 | let diagnostic_code = match &diagnostic.code { 128 | Some(diagnostic_code) => diagnostic_code.code.clone(), 129 | _ => String::new(), 130 | }; 131 | if !map.contains_key(&diagnostic_code) { 132 | let mut writer = BufWriter::new(Vec::new()); 133 | build_global_message(&diagnostic, &mut writer)?; 134 | map.insert(diagnostic_code.clone(), map.len() as i64); 135 | 136 | let rule = sarif::ReportingDescriptor::builder() 137 | .id(&diagnostic_code) 138 | .full_description(&String::from_utf8(writer.into_inner()?)?); 139 | 140 | // help_uri is contained in a child diagnostic with a diagnostic level == help 141 | // search for the relevant child diagnostic, then extract the uri from the message 142 | let help_uri = diagnostic 143 | .children 144 | .iter() 145 | .find(|child| matches!(child.level, DiagnosticLevel::Help)) 146 | .and_then(|help| { 147 | re.captures(&help.message) 148 | .and_then(|captures| captures.name("url")) 149 | .map(|re_match| re_match.as_str()) 150 | }); 151 | 152 | let rule = if let Some(help_uri) = help_uri { 153 | rule.help_uri(help_uri).build() 154 | } else { 155 | rule.build() 156 | }; 157 | 158 | rules.push(rule); 159 | } 160 | 161 | if let Some(value) = map.get(&diagnostic_code) { 162 | let level: sarif::ResultLevel = (&diagnostic.level).into(); 163 | results.push( 164 | sarif::Result::builder() 165 | .rule_id(diagnostic_code) 166 | .rule_index(*value) 167 | .message(&diagnostic) 168 | .locations(vec![span.into()]) 169 | .level(level) 170 | .related_locations(get_related_locations(&diagnostic)?) 171 | .build(), 172 | ); 173 | } 174 | Ok(()) 175 | })?; 176 | 177 | Ok(()) 178 | })?; 179 | 180 | let tool_component: sarif::ToolComponent = sarif::ToolComponent::builder() 181 | .name(tool_name) 182 | .information_uri(tool_info_uri) 183 | .rules(rules) 184 | .build(); 185 | let run = sarif::Run::builder() 186 | .tool(tool_component) 187 | .results(results) 188 | .build(); 189 | 190 | let sarif = sarif::Sarif::builder() 191 | .version(sarif::Version::V2_1_0.to_string()) 192 | .schema(sarif::SCHEMA_URL) 193 | .runs(vec![run]) 194 | .build(); 195 | 196 | Ok(sarif) 197 | } 198 | -------------------------------------------------------------------------------- /serde-sarif/src/converters/clang_tidy.rs: -------------------------------------------------------------------------------- 1 | use crate::sarif::{self}; 2 | use anyhow::Result; 3 | use once_cell::sync::Lazy; 4 | use regex::Regex; 5 | use serde::{Deserialize, Serialize}; 6 | use std::convert::TryFrom; 7 | use std::convert::TryInto; 8 | use std::io::{BufRead, Write}; 9 | use std::str::FromStr; 10 | use typed_builder::TypedBuilder; 11 | 12 | #[derive(Clone, PartialEq, Debug, Deserialize, Serialize, TypedBuilder)] 13 | #[builder(field_defaults(setter(into)))] 14 | struct ClangTidyResult { 15 | #[builder(setter(strip_option), default)] 16 | pub file: Option, 17 | #[builder(setter(strip_option), default)] 18 | pub line: Option, 19 | #[builder(setter(strip_option), default)] 20 | pub column: Option, 21 | pub level: String, 22 | pub message: String, 23 | pub rules: String, 24 | } 25 | 26 | impl TryFrom<&ClangTidyResult> for sarif::ArtifactLocation { 27 | type Error = sarif::BuilderError; 28 | 29 | fn try_from(result: &ClangTidyResult) -> Result { 30 | result 31 | .file 32 | .as_ref() 33 | .ok_or(sarif::BuilderError::UninitializedField("file")) 34 | .map(|uri| sarif::ArtifactLocation::builder().uri(uri).build()) 35 | } 36 | } 37 | 38 | impl TryFrom<&ClangTidyResult> for sarif::Region { 39 | type Error = sarif::BuilderError; 40 | 41 | fn try_from(result: &ClangTidyResult) -> Result { 42 | let start_line = result 43 | .line 44 | .ok_or(sarif::BuilderError::UninitializedField("line"))?; 45 | let start_column = result 46 | .column 47 | .ok_or(sarif::BuilderError::UninitializedField("column"))?; 48 | Ok( 49 | sarif::Region::builder() 50 | .start_line(start_line) 51 | .start_column(start_column) 52 | .build(), 53 | ) 54 | } 55 | } 56 | 57 | impl TryFrom<&ClangTidyResult> for sarif::Location { 58 | type Error = sarif::BuilderError; 59 | 60 | fn try_from(result: &ClangTidyResult) -> Result { 61 | let artifact_location = sarif::ArtifactLocation::try_from(result)?; 62 | let region = sarif::Region::try_from(result)?; 63 | let location = sarif::Location::builder().physical_location( 64 | sarif::PhysicalLocation::builder() 65 | .artifact_location(artifact_location) 66 | .region(region) 67 | .build(), 68 | ); 69 | 70 | // Notes are converted to 'location' items with the message stored along with the location. 71 | // For other types of items (error, warning, info), the message will be stored inside the 72 | // 'result', so we skip it here. 73 | Ok(if result.level == "note" { 74 | location.message(&result.message).build() 75 | } else { 76 | location.build() 77 | }) 78 | } 79 | } 80 | 81 | fn parse_clang_tidy_line( 82 | line: Result, 83 | ) -> Option { 84 | static RE: Lazy = Lazy::new(|| { 85 | Regex::new( 86 | r"^(?P([a-zA-Z]:|)[\w/\.\- \\]+):(?P\d+):(?P\d+):\s+(?Perror|warning|info|note):\s+(?P.+?)(?:\s+\[(?P[^\]]+)\])?$" 87 | ).unwrap() 88 | }); 89 | let line = line.unwrap(); 90 | let caps = RE.captures(&line); 91 | if let Some(caps) = caps { 92 | if let Some(message) = caps.name("message") { 93 | return Some(ClangTidyResult { 94 | file: caps.name("file").map(|f| f.as_str().into()), 95 | line: caps 96 | .name("line") 97 | .and_then(|f| f.as_str().parse::().ok()), 98 | column: caps 99 | .name("column") 100 | .and_then(|f| f.as_str().parse::().ok()), 101 | level: caps 102 | .name("level") 103 | .map_or_else(|| "info".into(), |f| f.as_str().into()), 104 | message: message.as_str().into(), 105 | rules: caps 106 | .name("rules") 107 | .map_or_else(|| "".into(), |f| f.as_str().into()), 108 | }); 109 | } 110 | } 111 | None 112 | } 113 | 114 | fn process(reader: R) -> Result { 115 | let mut results = vec![]; 116 | // Create an iterator over all the ClangTidyResult items 117 | let mut clang_tidy_result_iter = 118 | reader.lines().filter_map(parse_clang_tidy_line).peekable(); 119 | 120 | while let Some(result) = clang_tidy_result_iter.next() { 121 | // The first check alias is used as the ruleId for the result 122 | let rule_id = result.rules.split(',').next().unwrap_or_default(); 123 | 124 | let location: sarif::Location = (&result).try_into()?; 125 | let mut related_locations = vec![]; 126 | 127 | // Since clang-tidy emits "note" items which have to be folded into 128 | // the previous error/warning/info items, look ahead at the next items 129 | // and collect all the "notes". 130 | while let Some(next_result) = clang_tidy_result_iter.peek() { 131 | match next_result.level.as_str() { 132 | "note" => { 133 | let note_location: sarif::Location = (next_result).try_into()?; 134 | related_locations.push(note_location); 135 | // Since we got the next result via .peek(), advance the iterator 136 | clang_tidy_result_iter.next(); 137 | } 138 | _ => { 139 | // Not a note, back to the outer loop 140 | break; 141 | } 142 | } 143 | } 144 | 145 | let builder = sarif::Result::builder() 146 | .rule_id(rule_id) 147 | .message(&result.message) 148 | .locations(vec![location]) 149 | .level(sarif::ResultLevel::from_str(&result.level)?); 150 | let result = if !related_locations.is_empty() { 151 | builder.related_locations(related_locations).build() 152 | } else { 153 | builder.build() 154 | }; 155 | 156 | results.push(result); 157 | } 158 | 159 | let tool_component: sarif::ToolComponent = 160 | sarif::ToolComponent::builder().name("clang-tidy").build(); 161 | let run = sarif::Run::builder() 162 | .tool(tool_component) 163 | .results(results) 164 | .build(); 165 | 166 | let sarif = sarif::Sarif::builder() 167 | .version(sarif::Version::V2_1_0.to_string()) 168 | .runs(vec![run]) 169 | .build(); 170 | 171 | Ok(sarif) 172 | } 173 | 174 | /// Returns [sarif::Sarif] serialized into a JSON stream 175 | /// 176 | /// # Arguments 177 | /// 178 | /// * `reader` - A `BufRead` of cargo output 179 | /// * `writer` - A `Writer` to write the results to 180 | pub fn parse_to_writer( 181 | reader: R, 182 | writer: W, 183 | ) -> Result<()> { 184 | let sarif = process(reader)?; 185 | serde_json::to_writer_pretty(writer, &sarif)?; 186 | Ok(()) 187 | } 188 | 189 | /// Returns [sarif::Sarif] serialized into a JSON string 190 | /// 191 | /// # Arguments 192 | /// 193 | /// * `reader` - A `BufRead` of clang-tidy output 194 | pub fn parse_to_string(reader: R) -> Result { 195 | let sarif = process(reader)?; 196 | let json = serde_json::to_string_pretty(&sarif)?; 197 | Ok(json) 198 | } 199 | -------------------------------------------------------------------------------- /serde-sarif/src/converters/clippy.rs: -------------------------------------------------------------------------------- 1 | use std::io::{BufRead, Write}; 2 | 3 | use crate::sarif; 4 | use anyhow::Result; 5 | use cargo_metadata::{self, Message}; 6 | 7 | fn process(reader: R) -> Result { 8 | let iter = Message::parse_stream(reader) 9 | .filter_map(|r| r.ok()) 10 | .filter_map(|m| match m { 11 | Message::CompilerMessage(msg) => Some(msg.message), 12 | _ => None, 13 | }); 14 | 15 | super::cargo::process( 16 | iter, 17 | "clippy", 18 | "https://rust-lang.github.io/rust-clippy/", 19 | ) 20 | } 21 | 22 | /// Returns [sarif::Sarif] serialized into a JSON stream 23 | /// 24 | /// # Arguments 25 | /// 26 | /// * `reader` - A `BufRead` of cargo output 27 | /// * `writer` - A `Writer` to write the results to 28 | pub fn parse_to_writer( 29 | reader: R, 30 | writer: W, 31 | ) -> Result<()> { 32 | let sarif = process(reader)?; 33 | serde_json::to_writer_pretty(writer, &sarif)?; 34 | Ok(()) 35 | } 36 | 37 | /// Returns [sarif::Sarif] serialized into a JSON string 38 | /// 39 | /// # Arguments 40 | /// 41 | /// * `reader` - A `BufRead` of cargo clippy output 42 | pub fn parse_to_string(reader: R) -> Result { 43 | let sarif = process(reader)?; 44 | let json = serde_json::to_string_pretty(&sarif)?; 45 | Ok(json) 46 | } 47 | -------------------------------------------------------------------------------- /serde-sarif/src/converters/hadolint.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | io::{BufRead, Write}, 4 | str::FromStr, 5 | }; 6 | 7 | use strum_macros::Display; 8 | use strum_macros::EnumString; 9 | use typed_builder::TypedBuilder; 10 | 11 | use crate::sarif::{self, ResultLevel}; 12 | use anyhow::Result; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(Clone, PartialEq, Debug, Deserialize, Serialize, TypedBuilder)] 16 | #[builder(field_defaults(setter(into)))] 17 | struct HadolintResult { 18 | file: String, 19 | line: i64, 20 | column: i64, 21 | level: String, 22 | code: String, 23 | message: String, 24 | } 25 | 26 | #[doc = "A value specifying the severity level of the result."] 27 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 28 | #[serde(untagged)] 29 | enum HadolintLevel { 30 | #[strum(serialize = "info")] 31 | Info, 32 | #[strum(serialize = "warning")] 33 | Warning, 34 | #[strum(serialize = "error")] 35 | Error, 36 | #[strum(serialize = "style")] 37 | Style, 38 | } 39 | 40 | impl From for sarif::ResultLevel { 41 | fn from(level: HadolintLevel) -> Self { 42 | match level { 43 | HadolintLevel::Info => ResultLevel::Note, 44 | HadolintLevel::Warning => ResultLevel::Warning, 45 | HadolintLevel::Error => ResultLevel::Error, 46 | HadolintLevel::Style => ResultLevel::Note, 47 | } 48 | } 49 | } 50 | 51 | impl From<&HadolintResult> for sarif::ArtifactLocation { 52 | fn from(result: &HadolintResult) -> Self { 53 | sarif::ArtifactLocation::builder().uri(&result.file).build() 54 | } 55 | } 56 | 57 | impl From<&HadolintResult> for sarif::Location { 58 | fn from(result: &HadolintResult) -> Self { 59 | let artifact_location = sarif::ArtifactLocation::from(result); 60 | let region = sarif::Region::from(result); 61 | sarif::Location::builder() 62 | .physical_location( 63 | sarif::PhysicalLocation::builder() 64 | .artifact_location(artifact_location) 65 | .region(region) 66 | .build(), 67 | ) 68 | .build() 69 | } 70 | } 71 | 72 | impl From<&HadolintResult> for sarif::Region { 73 | fn from(result: &HadolintResult) -> Self { 74 | sarif::Region::builder() 75 | .start_line(result.line) 76 | .start_column(result.column) 77 | .build() 78 | } 79 | } 80 | 81 | fn process(mut reader: R) -> Result { 82 | let mut data = String::new(); 83 | reader.read_to_string(&mut data)?; 84 | let mut results = vec![]; 85 | let mut map = HashMap::new(); 86 | let mut rules = vec![]; 87 | 88 | let hadolint_results: Vec = serde_json::from_str(&data)?; 89 | hadolint_results 90 | .iter() 91 | .try_for_each(|result| -> Result<()> { 92 | if !map.contains_key(&result.code) { 93 | map.insert(result.code.clone(), map.len() as i64); 94 | rules.push( 95 | sarif::ReportingDescriptor::builder() 96 | .id(result.code.clone()) 97 | .name(result.code.clone()) 98 | .short_description(&result.code) 99 | .full_description(&format!( 100 | "For more information: https://github.com/hadolint/hadolint/wiki/{}", 101 | result.code 102 | )) 103 | .build(), 104 | ); 105 | } 106 | if let Some(value) = map.get(&result.code) { 107 | let level: sarif::ResultLevel = 108 | HadolintLevel::from_str(&result.level)?.into(); 109 | results.push( 110 | sarif::Result::builder() 111 | .rule_id(result.code.clone()) 112 | .rule_index(*value) 113 | .message(&result.message) 114 | .locations(vec![result.into()]) 115 | .level(level) 116 | .build(), 117 | ); 118 | } 119 | Ok(()) 120 | })?; 121 | let tool_component = sarif::ToolComponent::builder() 122 | .name("hadolint") 123 | .rules(rules) 124 | .build(); 125 | let run = sarif::Run::builder() 126 | .tool(tool_component) 127 | .results(results) 128 | .build(); 129 | 130 | Ok( 131 | sarif::Sarif::builder() 132 | .version(sarif::Version::V2_1_0.to_string()) 133 | .runs(vec![run]) 134 | .build(), 135 | ) 136 | } 137 | 138 | /// Returns [sarif::Sarif] serialized into a JSON stream 139 | /// 140 | /// # Arguments 141 | /// 142 | /// * `reader` - A `BufRead` of cargo output 143 | /// * `writer` - A `Writer` to write the results to 144 | pub fn parse_to_writer( 145 | reader: R, 146 | writer: W, 147 | ) -> Result<()> { 148 | let sarif = process(reader)?; 149 | serde_json::to_writer_pretty(writer, &sarif)?; 150 | Ok(()) 151 | } 152 | 153 | /// Returns [sarif::Sarif] serialized into a JSON string 154 | /// 155 | /// # Arguments 156 | /// 157 | /// * `reader` - A `BufRead` of hadolint output 158 | pub fn parse_to_string(reader: R) -> Result { 159 | let sarif = process(reader)?; 160 | let json = serde_json::to_string_pretty(&sarif)?; 161 | Ok(json) 162 | } 163 | -------------------------------------------------------------------------------- /serde-sarif/src/converters/miri.rs: -------------------------------------------------------------------------------- 1 | use std::io::{BufRead, Write}; 2 | 3 | use crate::sarif; 4 | use anyhow::Result; 5 | use cargo_metadata::{self, diagnostic::Diagnostic}; 6 | use serde::Deserialize; 7 | 8 | struct DiagnosticIter { 9 | input: R, 10 | } 11 | 12 | impl Iterator for DiagnosticIter { 13 | type Item = std::io::Result>; 14 | 15 | fn next(&mut self) -> Option { 16 | let mut line = String::new(); 17 | self 18 | .input 19 | .read_line(&mut line) 20 | .map(|n| { 21 | if n == 0 { 22 | None 23 | } else { 24 | if line.ends_with('\n') { 25 | line.truncate(line.len() - 1); 26 | } 27 | let mut deserializer = serde_json::Deserializer::from_str(&line); 28 | deserializer.disable_recursion_limit(); 29 | Some(Diagnostic::deserialize(&mut deserializer).ok()) 30 | } 31 | }) 32 | .transpose() 33 | } 34 | } 35 | 36 | fn process(reader: R) -> Result { 37 | let stream = DiagnosticIter { input: reader }; 38 | let iter = stream.filter_map(|r| r.ok()).flatten(); 39 | 40 | super::cargo::process(iter, "miri", "https://github.com/rust-lang/miri") 41 | } 42 | 43 | /// Returns [sarif::Sarif] serialized into a JSON stream 44 | /// 45 | /// # Arguments 46 | /// 47 | /// * `reader` - A `BufRead` of cargo miri output 48 | /// * `writer` - A `Writer` to write the results to 49 | pub fn parse_to_writer( 50 | reader: R, 51 | writer: W, 52 | ) -> Result<()> { 53 | let sarif = process(reader)?; 54 | serde_json::to_writer_pretty(writer, &sarif)?; 55 | Ok(()) 56 | } 57 | 58 | /// Returns [sarif::Sarif] serialized into a JSON string 59 | /// 60 | /// # Arguments 61 | /// 62 | /// * `reader` - A `BufRead` of cargo miri output 63 | pub fn parse_to_string(reader: R) -> Result { 64 | let sarif = process(reader)?; 65 | let json = serde_json::to_string_pretty(&sarif)?; 66 | Ok(json) 67 | } 68 | -------------------------------------------------------------------------------- /serde-sarif/src/converters/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(any(feature = "clippy-converters", feature = "miri-converters"))] 2 | mod cargo; 3 | 4 | #[cfg(feature = "clippy-converters")] 5 | #[cfg_attr(doc, doc(cfg(feature = "clippy-converters")))] 6 | pub mod clippy; 7 | 8 | #[cfg(feature = "miri-converters")] 9 | #[cfg_attr(doc, doc(cfg(feature = "miri-converters")))] 10 | pub mod miri; 11 | 12 | #[cfg(feature = "hadolint-converters")] 13 | #[cfg_attr(doc, doc(cfg(feature = "hadolint-converters")))] 14 | pub mod hadolint; 15 | 16 | #[cfg(feature = "shellcheck-converters")] 17 | #[cfg_attr(doc, doc(cfg(feature = "shellcheck-converters")))] 18 | pub mod shellcheck; 19 | 20 | #[cfg(feature = "clang-tidy-converters")] 21 | #[cfg_attr(doc, doc(cfg(feature = "clang-tidy-converters")))] 22 | pub mod clang_tidy; 23 | -------------------------------------------------------------------------------- /serde-sarif/src/converters/shellcheck.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | io::{BufRead, Write}, 4 | str::FromStr, 5 | }; 6 | 7 | use strum_macros::Display; 8 | use strum_macros::EnumString; 9 | use typed_builder::TypedBuilder; 10 | 11 | use crate::sarif::{self, ResultLevel}; 12 | use anyhow::Result; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | #[derive(Clone, PartialEq, Debug, Deserialize, Serialize, TypedBuilder)] 16 | #[builder(field_defaults(setter(into)))] 17 | struct ShellcheckResult { 18 | file: String, 19 | line: i64, 20 | #[serde(rename = "endLine")] 21 | end_line: i64, 22 | column: i64, 23 | #[serde(rename = "endColumn")] 24 | end_column: i64, 25 | level: String, 26 | code: i64, 27 | message: String, 28 | #[builder(setter(strip_option), default)] 29 | fix: Option, 30 | } 31 | 32 | #[derive(Clone, PartialEq, Debug, Deserialize, Serialize, TypedBuilder)] 33 | #[builder(field_defaults(setter(into)))] 34 | struct JSON1Format { 35 | #[builder(setter(transform = |i: impl IntoIterator>| i.into_iter().map(Into::into).collect()))] 36 | comments: Vec, 37 | } 38 | 39 | #[derive(Clone, PartialEq, Debug, Deserialize, Serialize, TypedBuilder)] 40 | #[builder(field_defaults(setter(into)))] 41 | struct ShellcheckFix { 42 | #[builder(setter(transform = |i: impl IntoIterator>| i.into_iter().map(Into::into).collect()))] 43 | replacements: Vec, 44 | } 45 | 46 | #[derive(Clone, PartialEq, Debug, Deserialize, Serialize, TypedBuilder)] 47 | #[builder(field_defaults(setter(into)))] 48 | struct ShellcheckReplacement { 49 | line: i64, 50 | #[serde(rename = "endLine")] 51 | end_line: i64, 52 | precedence: i64, 53 | #[serde(rename = "insertionPoint")] 54 | insertion_point: String, 55 | column: i64, 56 | replacement: String, 57 | #[serde(rename = "endColumn")] 58 | end_column: i64, 59 | } 60 | 61 | #[doc = "A value specifying the severity level of the result."] 62 | #[derive( 63 | Display, Debug, Serialize, Deserialize, EnumString, Copy, Clone, PartialEq, 64 | )] 65 | #[serde(untagged)] 66 | enum ShellcheckLevel { 67 | #[strum(serialize = "info")] 68 | Info, 69 | #[strum(serialize = "warning")] 70 | Warning, 71 | #[strum(serialize = "error")] 72 | Error, 73 | #[strum(serialize = "style")] 74 | Style, 75 | } 76 | 77 | impl From for sarif::ResultLevel { 78 | fn from(level: ShellcheckLevel) -> Self { 79 | match level { 80 | ShellcheckLevel::Info => ResultLevel::Note, 81 | ShellcheckLevel::Warning => ResultLevel::Warning, 82 | ShellcheckLevel::Error => ResultLevel::Error, 83 | ShellcheckLevel::Style => ResultLevel::Note, 84 | } 85 | } 86 | } 87 | 88 | impl From<&ShellcheckResult> for sarif::ArtifactLocation { 89 | fn from(result: &ShellcheckResult) -> Self { 90 | sarif::ArtifactLocation::builder().uri(&result.file).build() 91 | } 92 | } 93 | 94 | impl From<&ShellcheckResult> for sarif::Location { 95 | fn from(result: &ShellcheckResult) -> Self { 96 | let artifact_location = sarif::ArtifactLocation::from(result); 97 | let region = sarif::Region::from(result); 98 | sarif::Location::builder() 99 | .physical_location( 100 | sarif::PhysicalLocation::builder() 101 | .artifact_location(artifact_location) 102 | .region(region) 103 | .build(), 104 | ) 105 | .build() 106 | } 107 | } 108 | 109 | impl From<&ShellcheckReplacement> for sarif::Region { 110 | fn from(replacement: &ShellcheckReplacement) -> Self { 111 | sarif::Region::builder() 112 | .start_line(replacement.line) 113 | .start_column(replacement.column) 114 | .end_line(replacement.end_line) 115 | .end_column(replacement.end_column) 116 | .build() 117 | } 118 | } 119 | 120 | impl From<&ShellcheckResult> for sarif::Region { 121 | fn from(result: &ShellcheckResult) -> Self { 122 | sarif::Region::builder() 123 | .start_line(result.line) 124 | .start_column(result.column) 125 | .end_line(result.end_line) 126 | .end_column(result.end_column) 127 | .build() 128 | } 129 | } 130 | 131 | fn process(mut reader: R, format: String) -> Result { 132 | let mut data = String::new(); 133 | reader.read_to_string(&mut data)?; 134 | let mut results = vec![]; 135 | let mut map = HashMap::new(); 136 | let mut rules = vec![]; 137 | 138 | let shellcheck_results: Vec = if format != "json1" { 139 | serde_json::from_str(&data)? 140 | } else { 141 | let json1_format: JSON1Format = serde_json::from_str(&data)?; 142 | json1_format.comments 143 | }; 144 | 145 | shellcheck_results 146 | .iter() 147 | .try_for_each(|result| -> Result<()> { 148 | #[allow(clippy::map_entry)] 149 | if !map.contains_key(&result.code.to_string()) { 150 | map.insert(result.code.to_string(), map.len() as i64); 151 | rules.push( 152 | sarif::ReportingDescriptor::builder() 153 | .id(result.code.to_string()) 154 | .name(result.code.to_string()) 155 | .short_description(&format!("SC{}", result.code)) 156 | .help_uri(format!( 157 | "https://www.shellcheck.net/wiki/SC{}", 158 | result.code 159 | )) 160 | .full_description(&format!( 161 | "For more information: https://www.shellcheck.net/wiki/SC{}", 162 | result.code 163 | )) 164 | .build(), 165 | ); 166 | } 167 | if let Some(value) = map.get(&result.code.to_string()) { 168 | let level: sarif::ResultLevel = 169 | ShellcheckLevel::from_str(&result.level)?.into(); 170 | let fixes = if let Some(fix) = result.fix.as_ref() { 171 | fix 172 | .replacements 173 | .iter() 174 | .map(|fix| { 175 | sarif::Fix::builder().description(&fix.replacement).build() 176 | }) 177 | // .filter_map(|v| v.ok()) 178 | .collect() 179 | } else { 180 | vec![] 181 | }; 182 | let related_locations = if let Some(fix) = result.fix.as_ref() { 183 | fix 184 | .replacements 185 | .iter() 186 | .map(|replacement| { 187 | sarif::Location::builder() 188 | .physical_location( 189 | sarif::PhysicalLocation::builder() 190 | .artifact_location(result) 191 | .region(replacement) 192 | .build(), 193 | ) 194 | .build() 195 | }) 196 | .collect() 197 | } else { 198 | vec![] 199 | }; 200 | results.push( 201 | sarif::Result::builder() 202 | .rule_id(result.code.to_string()) 203 | .rule_index(*value) 204 | .message(&result.message) 205 | .locations(vec![result.into()]) 206 | .related_locations(related_locations) 207 | .fixes(fixes) 208 | .level(level) 209 | .build(), 210 | ); 211 | } 212 | Ok(()) 213 | })?; 214 | let tool_component: sarif::ToolComponent = sarif::ToolComponent::builder() 215 | .name("shellcheck") 216 | .rules(rules) 217 | .build(); 218 | let run = sarif::Run::builder() 219 | .tool(tool_component) 220 | .results(results) 221 | .build(); 222 | 223 | Ok( 224 | sarif::Sarif::builder() 225 | .version(sarif::Version::V2_1_0.to_string()) 226 | .runs(vec![run]) 227 | .build(), 228 | ) 229 | } 230 | 231 | /// Returns [sarif::Sarif] serialized into a JSON stream 232 | /// 233 | /// # Arguments 234 | /// 235 | /// * `reader` - A `BufRead` of cargo output 236 | /// * `writer` - A `Writer` to write the results to 237 | /// * `format` - The format of the input 238 | pub fn parse_to_writer( 239 | reader: R, 240 | writer: W, 241 | format: String, 242 | ) -> Result<()> { 243 | let sarif = process(reader, format)?; 244 | serde_json::to_writer_pretty(writer, &sarif)?; 245 | Ok(()) 246 | } 247 | 248 | /// Returns [sarif::Sarif] serialized into a JSON string 249 | /// 250 | /// # Arguments 251 | /// 252 | /// * `reader` - A `BufRead` of shellcheck output 253 | /// * `format` - The format of the input 254 | pub fn parse_to_string( 255 | reader: R, 256 | format: String, 257 | ) -> Result { 258 | let sarif = process(reader, format)?; 259 | let json = serde_json::to_string_pretty(&sarif)?; 260 | Ok(json) 261 | } 262 | -------------------------------------------------------------------------------- /serde-sarif/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc(html_root_url = "https://docs.rs/serde-sarif/0.8.0")] 2 | 3 | //! This crate provides a type safe [serde](https://serde.rs/) compatible 4 | //! [SARIF](https://sarifweb.azurewebsites.net/) structure. It is intended 5 | //! for use in Rust code which may need to read or write SARIF files. 6 | //! 7 | //! The latest [documentation can be found here](https://docs.rs/serde_sarif). 8 | //! 9 | //! serde is a popular serialization framework for Rust. More information can be 10 | //! found on the official repository: [https://github.com/serde-rs/serde](https://github.com/serde-rs/serde) 11 | //! 12 | //! SARIF or the Static Analysis Results Interchange Format is an industry 13 | //! standard format for the output of static analysis tools. More information 14 | //! can be found on the official website: [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 15 | //! 16 | //! ## Usage 17 | //! 18 | //! For most cases, simply use the root [sarif::Sarif] struct with [serde] to read and 19 | //! write to and from the struct. 20 | //! 21 | //! ## Example 22 | //! 23 | //!```rust 24 | //! use serde_sarif::sarif::Sarif; 25 | //! 26 | //! let sarif: Sarif = serde_json::from_str( 27 | //! r#"{ "version": "2.1.0", "runs": [] }"# 28 | //! ).unwrap(); 29 | //! 30 | //! assert_eq!( 31 | //! sarif.version.to_string(), 32 | //! "\"2.1.0\"".to_string() 33 | //! ); 34 | //! ``` 35 | //! 36 | //! Because many of the [sarif::Sarif] structures contain a lot of optional fields, it is 37 | //! often convenient to use the builder pattern to construct these structs. Each 38 | //! structure has a builder with a default. 39 | //! 40 | //! ## Example 41 | //! 42 | //! ```rust 43 | //! use serde_sarif::sarif::Message; 44 | //! 45 | //! let message = Message::default() 46 | //! .id("id") 47 | //! .build(); 48 | //! ``` 49 | //! 50 | //! This uses [`TypedBuilder`](typed_builder::TypedBuilder) 51 | //! for compile time type checking. 52 | //! 53 | //! ## Internal Implementation Details 54 | //! 55 | //! The root [sarif::Sarif] struct is automatically generated from the latest Sarif 56 | //! JSON schema, this is done at build time (via the buildscript). 57 | //! 58 | //! ## Crate Features 59 | //! 60 | //! This crate contains different features which may be enabled depending on your 61 | //! use case. 62 | //! 63 | //! ### Example 64 | //! 65 | //! ```toml 66 | //! [dependencies] 67 | //! serde-sarif = { version = "*", features = ["clippy-converters"] } 68 | //! ``` 69 | //! 70 | //! ### Converters 71 | //! - **clang-tidy-converters** Provides conversions between clang tidy and SARIF types 72 | //! - **clippy-converters** Provides conversions between Clippy and SARIF types 73 | //! - **hadolint-converters** Provides conversions between hadolint and SARIF types 74 | //! - **shellcheck-converters** Provides conversions between shellcheck and SARIF types 75 | //! 76 | //! ### Other 77 | //! 78 | //! - **opt-builder** Enables 79 | //! [`TypedBuilder`](typed_builder::TypedBuilder) 80 | //! fallback setters for easier conditional building 81 | 82 | pub mod converters; 83 | pub mod sarif; 84 | -------------------------------------------------------------------------------- /serde-sarif/src/sarif.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::derive_partial_eq_without_eq)] 2 | 3 | use strum_macros::Display; 4 | use strum_macros::EnumString; 5 | use thiserror::Error; 6 | 7 | include!(concat!(env!("OUT_DIR"), "/sarif.rs")); 8 | 9 | #[doc = "The SARIF format version of this log file."] 10 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 11 | pub enum Version { 12 | #[strum(serialize = "2.1.0")] 13 | #[serde(rename = "2.1.0")] 14 | V2_1_0, 15 | } 16 | 17 | // todo: should be generated / synced with schema.json 18 | pub static SCHEMA_URL: &str = 19 | "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0.json"; 20 | 21 | #[doc = "The role or roles played by the artifact in the analysis."] 22 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 23 | #[serde(rename_all = "camelCase")] 24 | #[strum(serialize_all = "camelCase")] 25 | pub enum ArtifactRoles { 26 | AnalysisTarget, 27 | Attachment, 28 | ResponseFile, 29 | ResultFile, 30 | StandardStream, 31 | TracedFile, 32 | Unmodified, 33 | Modified, 34 | Added, 35 | Deleted, 36 | Renamed, 37 | Uncontrolled, 38 | Driver, 39 | Extension, 40 | Translation, 41 | Taxonomy, 42 | Policy, 43 | ReferencedOnCommandLine, 44 | MemoryContents, 45 | Directory, 46 | UserSpecifiedConfiguration, 47 | ToolSpecifiedConfiguration, 48 | DebugOutputFile, 49 | } 50 | 51 | #[doc = "The SARIF format version of this external properties object."] 52 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 53 | pub enum ExternalPropertiesVersion { 54 | #[strum(serialize = "2.1.0")] 55 | #[serde(rename = "2.1.0")] 56 | V2_1_0, 57 | } 58 | 59 | #[doc = "A value specifying the severity level of the result."] 60 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 61 | #[serde(rename_all = "camelCase")] 62 | #[strum(serialize_all = "camelCase")] 63 | pub enum NotificationLevel { 64 | None, 65 | Note, 66 | Warning, 67 | Error, 68 | } 69 | 70 | #[doc = "Specifies the failure level for the report."] 71 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 72 | #[serde(rename_all = "camelCase")] 73 | #[strum(serialize_all = "camelCase")] 74 | pub enum ReportingConfigurationLevel { 75 | None, 76 | Note, 77 | Warning, 78 | Error, 79 | } 80 | 81 | #[doc = "A value that categorizes results by evaluation state."] 82 | #[derive( 83 | Clone, Display, Debug, Serialize, Deserialize, EnumString, PartialEq, 84 | )] 85 | #[serde(rename_all = "camelCase")] 86 | #[strum(serialize_all = "camelCase")] 87 | pub enum ResultKind { 88 | NotApplicable, 89 | Pass, 90 | Fail, 91 | Review, 92 | Open, 93 | Informational, 94 | } 95 | 96 | #[doc = "A value specifying the severity level of the result."] 97 | #[derive( 98 | Clone, Copy, Display, Debug, Serialize, Deserialize, EnumString, PartialEq, 99 | )] 100 | #[serde(rename_all = "camelCase")] 101 | #[strum(serialize_all = "camelCase")] 102 | pub enum ResultLevel { 103 | None, 104 | Note, 105 | Warning, 106 | Error, 107 | } 108 | 109 | #[doc = "The state of a result relative to a baseline of a previous run."] 110 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 111 | #[serde(rename_all = "camelCase")] 112 | #[strum(serialize_all = "camelCase")] 113 | pub enum ResultBaselineState { 114 | New, 115 | Unchanged, 116 | Updated, 117 | Absent, 118 | } 119 | 120 | #[doc = "Specifies the unit in which the tool measures columns."] 121 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 122 | #[serde(rename_all = "camelCase")] 123 | #[strum(serialize_all = "camelCase")] 124 | pub enum ResultColumnKind { 125 | Utf16CodeUnits, 126 | UnicodeCodePoints, 127 | } 128 | 129 | #[doc = "A string that indicates where the suppression is persisted."] 130 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 131 | #[serde(rename_all = "camelCase")] 132 | #[strum(serialize_all = "camelCase")] 133 | pub enum SupressionKind { 134 | InSource, 135 | External, 136 | } 137 | 138 | #[doc = "A string that indicates the review status of the suppression."] 139 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 140 | #[serde(rename_all = "camelCase")] 141 | #[strum(serialize_all = "camelCase")] 142 | pub enum SupressionStatus { 143 | Accepted, 144 | UnderReview, 145 | } 146 | 147 | #[doc = "Specifies the importance of this location in understanding the code flow in which it occurs. The order from most to least important is \"essential\", \"important\", \"unimportant\". Default: \"important\"."] 148 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 149 | #[serde(rename_all = "camelCase")] 150 | #[strum(serialize_all = "camelCase")] 151 | pub enum ThreadFlowLocationImportance { 152 | Important, 153 | Essential, 154 | } 155 | 156 | #[doc = "The kinds of data contained in this object."] 157 | #[derive(Display, Debug, Serialize, Deserialize, EnumString)] 158 | #[serde(rename_all = "camelCase")] 159 | #[strum(serialize_all = "camelCase")] 160 | pub enum ToolComponentContents { 161 | LocalizedData, 162 | NonLocalizedData, 163 | } 164 | 165 | #[derive(Error, Debug)] 166 | pub enum BuilderError { 167 | #[error("uninitialized field: {0}")] 168 | UninitializedField(&'static str), 169 | } 170 | 171 | // Note that due to the blanket implementation in core, TryFrom> 172 | // results in a compiler error. 173 | // https://github.com/rust-lang/rust/issues/50133 174 | impl From<&String> for MultiformatMessageString { 175 | fn from(message: &String) -> Self { 176 | MultiformatMessageString::builder() 177 | .text(message.clone()) 178 | .build() 179 | } 180 | } 181 | 182 | impl From<&String> for Message { 183 | fn from(message: &String) -> Self { 184 | Message::builder().text(message.clone()).build() 185 | } 186 | } 187 | 188 | impl From<&str> for Message { 189 | fn from(message: &str) -> Self { 190 | Message::builder().text(message).build() 191 | } 192 | } 193 | 194 | impl From for Tool { 195 | fn from(tool_component: ToolComponent) -> Self { 196 | Tool::builder().driver(tool_component).build() 197 | } 198 | } 199 | 200 | #[cfg(test)] 201 | mod tests { 202 | use std::str::FromStr; 203 | 204 | use super::*; 205 | macro_rules! map { 206 | ($( $key: expr => $val: expr ),*) => {{ 207 | let mut map = ::std::collections::BTreeMap::new(); 208 | $( map.insert($key, serde_json::json!($val)); )* 209 | map 210 | }} 211 | } 212 | 213 | #[test] 214 | fn test_serialize_property_bag_empty() { 215 | let property_bag = PropertyBag::builder().build(); 216 | let json = serde_json::to_string_pretty(&property_bag).unwrap(); 217 | let json_expected = r#"{}"#; 218 | assert_eq!(json, json_expected); 219 | } 220 | 221 | #[test] 222 | fn test_serialize_property_bag_additional_properties() { 223 | let property_bag = PropertyBag::builder() 224 | .additional_properties(map!["key1".to_string() => "value1"]) 225 | .build(); 226 | let json = serde_json::to_string_pretty(&property_bag).unwrap(); 227 | let json_expected = r#"{ 228 | "key1": "value1" 229 | }"#; 230 | assert_eq!(json, json_expected); 231 | } 232 | 233 | #[test] 234 | fn test_deserialize_property_bag_empty() { 235 | let json = r#"{}"#; 236 | let property_bag: PropertyBag = serde_json::from_str(json).unwrap(); 237 | let property_bag_expected = PropertyBag::builder().build(); 238 | assert_eq!(property_bag, property_bag_expected); 239 | } 240 | 241 | #[test] 242 | fn test_deserialize_property_bag_additional_properties() { 243 | let json = r#"{ 244 | "key1": "value1" 245 | }"#; 246 | let property_bag: PropertyBag = serde_json::from_str(json).unwrap(); 247 | let property_bag_expected = PropertyBag::builder() 248 | .additional_properties(map!["key1".to_string() => "value1"]) 249 | .build(); 250 | assert_eq!(property_bag, property_bag_expected); 251 | } 252 | 253 | #[test] 254 | fn test_serialize_resultkind() { 255 | assert_eq!( 256 | serde_json::to_string(&ResultKind::Fail).unwrap(), 257 | "\"fail\"" 258 | ); 259 | } 260 | 261 | #[test] 262 | fn test_parse_utf16codeunits() { 263 | let v = ResultColumnKind::from_str("utf16CodeUnits").unwrap(); 264 | assert!(matches!(v, ResultColumnKind::Utf16CodeUnits)); 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /serde-sarif/tests/version-numbers.rs: -------------------------------------------------------------------------------- 1 | #[test] 2 | fn test_readme_deps() { 3 | version_sync::assert_markdown_deps_updated!("README.md"); 4 | } 5 | 6 | #[test] 7 | fn test_html_root_url() { 8 | version_sync::assert_html_root_url_updated!("src/lib.rs"); 9 | } 10 | -------------------------------------------------------------------------------- /shellcheck-sarif/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "shellcheck-sarif" 3 | version = "0.8.0" 4 | authors = ["Paul Sastrasinh "] 5 | edition = "2018" 6 | description = "Convert shellcheck output to SARIF" 7 | license = "MIT" 8 | readme = "README.md" 9 | keywords = ["sarif", "shellcheck", "shell", "cli"] 10 | categories = ["command-line-utilities"] 11 | homepage = "https://psastras.github.io/sarif-rs/" 12 | documentation = "https://docs.rs/shellcheck_sarif" 13 | repository = "https://github.com/psastras/sarif-rs" 14 | 15 | [badges] 16 | github = { repository = "psastras/sarif-rs" } 17 | 18 | [[bin]] 19 | name = "shellcheck-sarif" 20 | path = "src/bin.rs" 21 | 22 | [dependencies] 23 | anyhow = "1.0.98" 24 | serde-sarif = { path = "../serde-sarif", version = "0.8.0", features = [ 25 | "shellcheck-converters", 26 | ] } 27 | clap = { version = "4.5.40", features = ["derive"] } 28 | 29 | [dev-dependencies] 30 | version-sync = "0.9" 31 | 32 | [package.metadata.binstall] 33 | pkg-url = "{ repo }/releases/download/{ name }-v{ version }/{ name }-{ target }" 34 | pkg-fmt = "bin" 35 | -------------------------------------------------------------------------------- /shellcheck-sarif/LICENSE: -------------------------------------------------------------------------------- 1 | ../LICENSE -------------------------------------------------------------------------------- /shellcheck-sarif/README.md: -------------------------------------------------------------------------------- 1 | [![Workflow Status](https://github.com/psastras/sarif-rs/workflows/main/badge.svg)](https://github.com/psastras/sarif-rs/actions?query=workflow%3A%22main%22) 2 | 3 | # shellcheck-sarif 4 | 5 | This crate provides a command line tool to convert `shellcheck` diagnostic 6 | output into SARIF. 7 | 8 | The latest [documentation can be found here](https://docs.rs/shellcheck_sarif). 9 | 10 | shellcheck is a popular linter / static analysis tool for shell scripts. More 11 | information can be found on the official repository: 12 | [https://github.com/koalaman/shellcheck](https://github.com/koalaman/shellcheck) 13 | 14 | SARIF or the Static Analysis Results Interchange Format is an industry standard 15 | format for the output of static analysis tools. More information can be found on 16 | the official website: 17 | [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 18 | 19 | ## Installation 20 | 21 | `shellcheck-sarif` may be installed via `cargo` 22 | 23 | ```shell 24 | cargo install shellcheck-sarif 25 | ``` 26 | 27 | via [cargo-binstall](https://github.com/cargo-bins/cargo-binstall) 28 | 29 | ```shell 30 | cargo binstall shellcheck-sarif 31 | ``` 32 | 33 | or downloaded directly from Github Releases 34 | 35 | ```shell 36 | # make sure to adjust the target and version (you may also want to pin to a specific version) 37 | curl -sSL https://github.com/psastras/sarif-rs/releases/download/shellcheck-sarif-v0.8.0/shellcheck-sarif-x86_64-unknown-linux-gnu -o shellcheck-sarif 38 | ``` 39 | 40 | ### Fedora Linux 41 | 42 | ```shell 43 | sudo dnf install # ex. cargo binstall shellcheck-sarif 44 | ``` 45 | 46 | ### Nix 47 | 48 | Through the `nix` cli, 49 | 50 | ```shell 51 | nix --accept-flake-config profile install github:psastras/sarif-rs#shellcheck-sarif 52 | ``` 53 | 54 | ## Usage 55 | 56 | For most cases, simply run `shellcheck` with `json` output and pipe the results 57 | into `shellcheck-sarif`. 58 | 59 | ## Example 60 | 61 | ```shell 62 | shellcheck -f json shellscript.sh | shellcheck-sarif 63 | ``` 64 | 65 | If you are using Github Actions, SARIF is useful for integrating with Github 66 | Advanced Security (GHAS), which can show code alerts in the "Security" tab of 67 | your repository. 68 | 69 | After uploading `shellcheck-sarif` output to Github, `shellcheck` diagnostics 70 | are available in GHAS. 71 | 72 | ## Example 73 | 74 | ```yaml 75 | on: 76 | workflow_run: 77 | workflows: ["main"] 78 | branches: [main] 79 | types: [completed] 80 | 81 | name: sarif 82 | 83 | jobs: 84 | upload-sarif: 85 | runs-on: ubuntu-latest 86 | if: ${{ github.ref == 'refs/heads/main' }} 87 | steps: 88 | - uses: actions/checkout@v2 89 | - uses: actions-rs/toolchain@v1 90 | with: 91 | profile: minimal 92 | toolchain: stable 93 | override: true 94 | - uses: Swatinem/rust-cache@v1 95 | - run: cargo install shellcheck-sarif sarif-fmt 96 | - run: shellcheck -f json shellscript.sh | shellcheck-sarif | tee 97 | results.sarif | sarif-fmt 98 | - name: Upload SARIF file 99 | uses: github/codeql-action/upload-sarif@v1 100 | with: 101 | sarif_file: results.sarif 102 | ``` 103 | 104 | License: MIT 105 | -------------------------------------------------------------------------------- /shellcheck-sarif/cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | # 4 | # Lines starting with "#" are comments. 5 | # Configuration options are organized into tables and keys. 6 | # See documentation for more information on available options. 7 | 8 | [changelog] 9 | # changelog header 10 | header = """ 11 | # Changelog\n 12 | """ 13 | # template for the changelog body 14 | # https://keats.github.io/tera/docs/#introduction 15 | body = """ 16 | {% if version %}\ 17 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 18 | {% else %}\ 19 | ## [unreleased] 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | striptags | trim | upper_first }} 23 | {% for commit in commits %} 24 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 25 | {% if commit.breaking %}[**breaking**] {% endif %}\ 26 | {{ commit.message | upper_first }}\ 27 | {% endfor %} 28 | {% endfor %}\n 29 | """ 30 | # template for the changelog footer 31 | footer = """""" 32 | # remove the leading and trailing s 33 | trim = true 34 | # postprocessors 35 | postprocessors = [ 36 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 37 | ] 38 | 39 | [git] 40 | # parse the commits based on https://www.conventionalcommits.org 41 | conventional_commits = true 42 | # filter out the commits that are not conventional 43 | filter_unconventional = true 44 | # process each line of a commit as an individual commit 45 | split_commits = false 46 | # regex for preprocessing the commit messages 47 | commit_preprocessors = [ 48 | # Replace issue numbers 49 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 50 | # Check spelling of the commit with https://github.com/crate-ci/typos 51 | # If the spelling is incorrect, it will be automatically fixed. 52 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 53 | ] 54 | # regex for parsing and grouping commits 55 | commit_parsers = [ 56 | { message = "^feat", group = "🚀 Features" }, 57 | { message = "^fix", group = "🐛 Bug Fixes" }, 58 | { message = "^doc", group = "📚 Documentation" }, 59 | { message = "^perf", group = "⚡ Performance" }, 60 | { message = "^refactor", group = "🚜 Refactor" }, 61 | { message = "^style", group = "🎨 Styling" }, 62 | { message = "^test", group = "🧪 Testing" }, 63 | { message = "^chore\\(release\\): prepare for", skip = true }, 64 | { message = "^chore\\(deps.*\\)", skip = true }, 65 | { message = "^chore\\(pr\\)", skip = true }, 66 | { message = "^chore\\(pull\\)", skip = true }, 67 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 68 | { body = ".*security", group = "🛡️ Security" }, 69 | { message = "^revert", group = "◀️ Revert" }, 70 | ] 71 | # protect breaking changes from being skipped due to matching a skipping commit_parser 72 | protect_breaking_commits = false 73 | # filter out the commits that are not matched by commit parsers 74 | filter_commits = true 75 | # regex for matching git tags 76 | tag_pattern = "shellcheck-sarif-v[0-9].*" 77 | # regex for skipping tags 78 | # skip_tags = "" 79 | # regex for ignoring tags 80 | # ignore_tags = "" 81 | # sort the tags topologically 82 | topo_order = false 83 | # sort the commits inside sections by oldest/newest order 84 | sort_commits = "oldest" 85 | # limit the number of commits included in the changelog. 86 | # limit_commits = 42 87 | -------------------------------------------------------------------------------- /shellcheck-sarif/src/bin.rs: -------------------------------------------------------------------------------- 1 | #![doc(html_root_url = "https://docs.rs/shellcheck-sarif/0.8.0")] 2 | 3 | //! This crate provides a command line tool to convert `shellcheck` diagnostic 4 | //! output into SARIF. 5 | //! 6 | //! The latest [documentation can be found here](https://docs.rs/shellcheck_sarif). 7 | //! 8 | //! shellcheck is a popular linter / static analysis tool for shell scripts. More information 9 | //! can be found on the official repository: [https://github.com/koalaman/shellcheck](https://github.com/koalaman/shellcheck) 10 | //! 11 | //! SARIF or the Static Analysis Results Interchange Format is an industry 12 | //! standard format for the output of static analysis tools. More information 13 | //! can be found on the official website: [https://sarifweb.azurewebsites.net/](https://sarifweb.azurewebsites.net/). 14 | //! 15 | //! ## Installation 16 | //! 17 | //! `shellcheck-sarif` may be installed via `cargo` 18 | //! 19 | //! ```shell 20 | //! cargo install shellcheck-sarif 21 | //! ``` 22 | //! 23 | //! or downloaded directly from Github Releases 24 | //! 25 | //!```shell 26 | //! # make sure to adjust the target and version (you may also want to pin to a specific version) 27 | //! curl -sSL https://github.com/psastras/sarif-rs/releases/download/shellcheck-sarif-latest/shellcheck-sarif-x86_64-unknown-linux-gnu -o shellcheck-sarif 28 | //! ``` 29 | //! 30 | //! ## Usage 31 | //! 32 | //! For most cases, simply run `shellcheck` with `json` output and pipe the 33 | //! results into `shellcheck-sarif`. 34 | //! 35 | //! ## Example 36 | //! 37 | //!```shell 38 | //! shellcheck -f json shellscript.sh | shellcheck-sarif 39 | //! ``` 40 | //! 41 | //! If you are using Github Actions, SARIF is useful for integrating with 42 | //! Github Advanced Security (GHAS), which can show code alerts in the 43 | //! "Security" tab of your repository. 44 | //! 45 | //! After uploading `shellcheck-sarif` output to Github, `shellcheck` diagnostics 46 | //! are available in GHAS. 47 | //! 48 | //! ## Example 49 | //! 50 | //! ```yaml 51 | //! on: 52 | //! workflow_run: 53 | //! workflows: ["main"] 54 | //! branches: [main] 55 | //! types: [completed] 56 | //! 57 | //! name: sarif 58 | //! 59 | //! jobs: 60 | //! upload-sarif: 61 | //! runs-on: ubuntu-latest 62 | //! if: ${{ github.ref == 'refs/heads/main' }} 63 | //! steps: 64 | //! - uses: actions/checkout@v2 65 | //! - uses: actions-rs/toolchain@v1 66 | //! with: 67 | //! profile: minimal 68 | //! toolchain: stable 69 | //! override: true 70 | //! - uses: Swatinem/rust-cache@v1 71 | //! - run: cargo install shellcheck-sarif sarif-fmt 72 | //! - run: 73 | //! shellcheck -f json shellscript.sh | 74 | //! shellcheck-sarif | tee results.sarif | sarif-fmt 75 | //! - name: Upload SARIF file 76 | //! uses: github/codeql-action/upload-sarif@v1 77 | //! with: 78 | //! sarif_file: results.sarif 79 | //! ``` 80 | //! 81 | 82 | use anyhow::Result; 83 | use clap::Parser; 84 | use std::fs::File; 85 | use std::io::{BufReader, BufWriter, Read, Write}; 86 | 87 | #[derive(Parser, Debug)] 88 | #[command( 89 | version, 90 | about = "Convert shellcheck warnings into SARIF", 91 | after_help = "The expected input is generated by running 'shellcheck -f json'.", 92 | long_about = None, 93 | )] 94 | struct Args { 95 | /// input file; reads from stdin if none is given 96 | #[arg(short, long)] 97 | input: Option, 98 | /// input format; json or json1; defaults to 'json' 99 | #[arg(short, long, default_value = "json")] 100 | format: Option, 101 | 102 | /// output file; writes to stdout if none is given 103 | #[arg(short, long)] 104 | output: Option, 105 | } 106 | 107 | fn main() -> Result<()> { 108 | let args = Args::parse(); 109 | 110 | let read = match args.input { 111 | Some(path) => Box::new(File::open(path)?) as Box, 112 | None => Box::new(std::io::stdin()) as Box, 113 | }; 114 | let reader = BufReader::new(read); 115 | 116 | let format = match args.format { 117 | Some(format) => format, 118 | None => "json".to_string(), 119 | }; 120 | 121 | let write = match args.output { 122 | Some(path) => Box::new(File::create(path)?) as Box, 123 | None => Box::new(std::io::stdout()) as Box, 124 | }; 125 | let writer = BufWriter::new(write); 126 | 127 | serde_sarif::converters::shellcheck::parse_to_writer(reader, writer, format) 128 | } 129 | -------------------------------------------------------------------------------- /shellcheck-sarif/tests/version-numbers.rs: -------------------------------------------------------------------------------- 1 | #[test] 2 | fn test_readme_deps() { 3 | version_sync::assert_markdown_deps_updated!("README.md"); 4 | } 5 | 6 | #[test] 7 | fn test_html_root_url() { 8 | version_sync::assert_html_root_url_updated!("src/bin.rs"); 9 | } 10 | --------------------------------------------------------------------------------