├── .dockerignore ├── .github └── workflows │ ├── license-enforcement.yml │ ├── release-docs.yml │ ├── release-image.yml │ ├── release-lgc.yml │ ├── release-plugins.yml │ └── validate.yml ├── .gitignore ├── .licenserc.yaml ├── .moon ├── tasks │ ├── rust.yml │ └── tag-plugin.yml ├── toolchain.yml └── workspace.yml ├── .prototools ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── LICENSE ├── README.md ├── apps └── lgc │ ├── Cargo.lock │ ├── Cargo.toml │ ├── moon.yml │ ├── src │ ├── commands.rs │ ├── commands │ │ ├── apply.rs │ │ ├── destroy.rs │ │ ├── init.rs │ │ ├── ping.rs │ │ ├── plan.rs │ │ ├── services.rs │ │ └── validate.rs │ ├── lgc.rs │ └── lib.rs │ └── tests │ ├── common.rs │ ├── test_init.rs │ ├── test_services.rs │ └── test_validate.rs ├── docs ├── moon.yml ├── package.json ├── pnpm-lock.yaml ├── src │ ├── .vitepress │ │ ├── config.mts │ │ └── theme │ │ │ ├── components │ │ │ └── plugins │ │ │ │ ├── PluginStatus.vue │ │ │ │ ├── PluginsIndexPage.vue │ │ │ │ ├── common.js │ │ │ │ └── plugins.data.js │ │ │ └── index.ts │ ├── commands │ │ ├── apply.md │ │ ├── destroy.md │ │ ├── init.md │ │ ├── ping.md │ │ ├── plan.md │ │ ├── services.md │ │ └── validate.md │ ├── concepts │ │ ├── detections.md │ │ ├── identifiers.md │ │ ├── plugins.md │ │ └── policies.md │ ├── developers │ │ ├── compiling.md │ │ ├── docker-images.md │ │ ├── how-to-create-plugins.md │ │ └── state.md │ ├── essentials │ │ ├── configuration.md │ │ ├── gitlab.md │ │ ├── installation.md │ │ └── quickstart.md │ ├── index.md │ ├── plugins │ │ ├── crowdstrike.md │ │ ├── elastic.md │ │ ├── google-chronicle.md │ │ ├── index.md │ │ ├── limacharlie.md │ │ ├── microsoft-sentinel.md │ │ ├── paloalto-cortex.md │ │ ├── sekoia.md │ │ ├── sigma.md │ │ ├── splunk.md │ │ ├── tanium.md │ │ └── yara.md │ ├── public │ │ ├── favicon.ico │ │ ├── img │ │ │ ├── gitlab-3-stages.png │ │ │ ├── gitlab-cicd-variables.png │ │ │ ├── logcraft-cli-overview.png │ │ │ └── logcraft-cli.webp │ │ ├── logo.png │ │ └── robots.txt │ └── support.md └── vale │ ├── .vale.ini │ └── styles │ ├── Google │ ├── AMPM.yml │ ├── Acronyms.yml │ ├── Colons.yml │ ├── Contractions.yml │ ├── DateFormat.yml │ ├── Ellipses.yml │ ├── EmDash.yml │ ├── Exclamation.yml │ ├── FirstPerson.yml │ ├── Gender.yml │ ├── GenderBias.yml │ ├── HeadingPunctuation.yml │ ├── Headings.yml │ ├── Latin.yml │ ├── LyHyphens.yml │ ├── OptionalPlurals.yml │ ├── Ordinal.yml │ ├── OxfordComma.yml │ ├── Parens.yml │ ├── Passive.yml │ ├── Periods.yml │ ├── Quotes.yml │ ├── Ranges.yml │ ├── Semicolons.yml │ ├── Slang.yml │ ├── Spacing.yml │ ├── Spelling.yml │ ├── Units.yml │ ├── We.yml │ ├── Will.yml │ ├── WordList.yml │ ├── meta.json │ └── vocab.txt │ ├── config │ └── vocabularies │ │ └── LogCraft │ │ ├── accept.txt │ │ └── reject.txt │ └── proselint │ ├── Airlinese.yml │ ├── AnimalLabels.yml │ ├── Annotations.yml │ ├── Apologizing.yml │ ├── Archaisms.yml │ ├── But.yml │ ├── Cliches.yml │ ├── CorporateSpeak.yml │ ├── Currency.yml │ ├── Cursing.yml │ ├── DateCase.yml │ ├── DateMidnight.yml │ ├── DateRedundancy.yml │ ├── DateSpacing.yml │ ├── DenizenLabels.yml │ ├── Diacritical.yml │ ├── GenderBias.yml │ ├── GroupTerms.yml │ ├── Hedging.yml │ ├── Hyperbole.yml │ ├── Jargon.yml │ ├── LGBTOffensive.yml │ ├── LGBTTerms.yml │ ├── Malapropisms.yml │ ├── Needless.yml │ ├── Nonwords.yml │ ├── Oxymorons.yml │ ├── P-Value.yml │ ├── RASSyndrome.yml │ ├── README.md │ ├── Skunked.yml │ ├── Spelling.yml │ ├── Typography.yml │ ├── Uncomparables.yml │ ├── Very.yml │ └── meta.json ├── libs ├── bindings │ ├── plugin.wit │ └── world.wit ├── lgc-common │ ├── Cargo.toml │ ├── moon.yml │ └── src │ │ ├── configuration.rs │ │ ├── detections.rs │ │ ├── diff.rs │ │ ├── lib.rs │ │ ├── plugins │ │ ├── manager.rs │ │ └── mod.rs │ │ ├── state │ │ ├── backends │ │ │ ├── http.rs │ │ │ ├── local.rs │ │ │ └── mod.rs │ │ └── mod.rs │ │ └── utils.rs ├── lgc-policies │ ├── Cargo.toml │ ├── moon.yml │ ├── src │ │ ├── helpers.rs │ │ ├── lib.rs │ │ ├── policy.rs │ │ └── schema.rs │ └── tests │ │ └── policy_tests.rs └── lgc-runtime │ ├── Cargo.toml │ ├── moon.yml │ └── src │ ├── engine.rs │ ├── lib.rs │ └── state.rs ├── plugins ├── sample │ ├── Cargo.toml │ ├── moon.yml │ └── src │ │ ├── backend.rs │ │ ├── lib.rs │ │ └── schema.rs ├── sentinel │ ├── Cargo.toml │ ├── README.md │ ├── moon.yml │ └── src │ │ ├── helpers.rs │ │ ├── lib.rs │ │ └── schemas │ │ ├── mod.rs │ │ ├── rule.rs │ │ ├── settings.rs │ │ └── types.rs └── splunk │ ├── Cargo.toml │ ├── README.md │ ├── moon.yml │ └── src │ ├── lib.rs │ └── schemas │ ├── mod.rs │ ├── rule.rs │ ├── settings.rs │ └── types.rs └── scripts ├── package-plugins.sh └── package.sh /.dockerignore: -------------------------------------------------------------------------------- 1 | # Version control 2 | .git 3 | .gitignore 4 | .github 5 | 6 | # Docker-related files 7 | Dockerfile 8 | .dockerignore 9 | 10 | # Documentation and miscellaneous files 11 | docs 12 | README.md 13 | licenserc.toml 14 | .prototools 15 | .moon 16 | scripts 17 | 18 | # Cargo build output (generated artifacts) 19 | target -------------------------------------------------------------------------------- /.github/workflows/license-enforcement.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | branches: 7 | - main 8 | 9 | name: License enforcement 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | licenses: 16 | runs-on: ubuntu-latest 17 | timeout-minutes: 10 18 | env: 19 | FORCE_COLOR: 1 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Check License Header 23 | uses: apache/skywalking-eyes/header@main -------------------------------------------------------------------------------- /.github/workflows/release-docs.yml: -------------------------------------------------------------------------------- 1 | name: Publish user docs 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'docs/**' 7 | 8 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 9 | permissions: 10 | contents: read 11 | pages: write 12 | id-token: write 13 | 14 | # Allow only one concurrent deployment, skipping runs queued between the run 15 | # in-progress and latest queued. However, do NOT cancel in-progress runs as 16 | # we want to allow these production deployments to complete. 17 | concurrency: 18 | group: "pages" 19 | cancel-in-progress: false 20 | 21 | 22 | jobs: 23 | build: 24 | runs-on: ubuntu-latest 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v4 28 | - name: Setup moon 29 | uses: 'moonrepo/setup-toolchain@v0' 30 | with: 31 | auto-install: true 32 | - name: verify documentation 33 | id: check 34 | uses: errata-ai/vale-action@v2.1.1 35 | with: 36 | files: docs/src 37 | fail_on_error: true 38 | vale_flags: "--config=docs/vale/.vale.ini" 39 | - name: build the docs 40 | id: build 41 | run: moon docs:build 42 | - name: Upload artifacts 43 | id: deployment 44 | uses: actions/upload-pages-artifact@v3 45 | with: 46 | path: docs/src/.vitepress/dist 47 | 48 | deploy: 49 | environment: 50 | name: github-pages 51 | url: ${{ steps.deployment.outputs.page_url }} 52 | runs-on: ubuntu-latest 53 | needs: build 54 | steps: 55 | - name: Deploy to GitHub Pages 56 | id: deployment 57 | uses: actions/deploy-pages@v4 58 | -------------------------------------------------------------------------------- /.github/workflows/release-image.yml: -------------------------------------------------------------------------------- 1 | name: Release LogCraft CLI docker image 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'lgc/**' 7 | 8 | env: 9 | REGISTRY: ghcr.io 10 | IMAGE_NAME: ${{ github.repository }} 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: read 17 | packages: write 18 | # with sigstore/fulcio when running outside of PRs. 19 | id-token: write 20 | 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v4 24 | 25 | # Install the cosign tool except on PR 26 | - name: Install cosign 27 | if: github.event_name != 'pull_request' 28 | uses: sigstore/cosign-installer@v3 29 | with: 30 | cosign-release: 'v2.2.4' 31 | 32 | # Set up BuildKit Docker container builder 33 | - name: Set up Docker Buildx 34 | uses: docker/setup-buildx-action@v3 35 | 36 | # Login against a Docker registry except on PR 37 | - name: Log into registry ${{ env.REGISTRY }} 38 | if: github.event_name != 'pull_request' 39 | uses: docker/login-action@v3 40 | with: 41 | registry: ${{ env.REGISTRY }} 42 | username: ${{ github.actor }} 43 | password: ${{ secrets.GITHUB_TOKEN }} 44 | 45 | # Extract metadata (tags, labels) for Docker 46 | - name: Extract Docker metadata 47 | id: meta 48 | uses: docker/metadata-action@v5 49 | with: 50 | images: ${{ env.REGISTRY }}/logcraftio/logcraft-cli 51 | tags: | 52 | type=match,pattern=lgc/(\d.\d.\d),group=1 53 | 54 | # Build and push Docker image with Buildx (don't push on PR) 55 | - name: Build and push Docker image 56 | id: build-and-push 57 | uses: docker/build-push-action@v6 58 | with: 59 | context: . 60 | platforms: linux/amd64,linux/arm64 61 | push: ${{ github.event_name != 'pull_request' }} 62 | tags: ${{ steps.meta.outputs.tags }} 63 | labels: ${{ steps.meta.outputs.labels }} 64 | cache-from: type=gha 65 | cache-to: type=gha,mode=max 66 | 67 | # Sign the resulting Docker image digest except on PRs. 68 | - name: Sign the published Docker image 69 | if: ${{ github.event_name != 'pull_request' }} 70 | env: 71 | TAGS: ${{ steps.meta.outputs.tags }} 72 | DIGEST: ${{ steps.build-and-push.outputs.digest }} 73 | run: echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} -------------------------------------------------------------------------------- /.github/workflows/release-lgc.yml: -------------------------------------------------------------------------------- 1 | name: LogCraft CLI Release 2 | 3 | permissions: 4 | contents: "write" 5 | 6 | on: 7 | push: 8 | tags: 9 | - 'lgc/**' 10 | 11 | jobs: 12 | build: 13 | strategy: 14 | matrix: 15 | os: [ubuntu-24.04, macos-15] 16 | runs-on: ${{ matrix.os }} 17 | steps: 18 | - name: Checkout code 19 | uses: actions/checkout@v4 20 | with: 21 | fetch-depth: 0 22 | 23 | - uses: 'moonrepo/setup-toolchain@v0' 24 | with: 25 | auto-install: true 26 | 27 | - run: "moon '#plugin:build'" 28 | - run: "moon lgc:build" 29 | 30 | - run: "bash scripts/package.sh" 31 | 32 | - name: Renaming OS and ARCH to lowercase 33 | id: toLowerCase 34 | run: | 35 | echo osLowercase=$(echo $RUNNER_OS | tr '[:upper:]' '[:lower:]') >> $GITHUB_OUTPUT 36 | echo archLowercase=$(echo $RUNNER_ARCH | tr '[:upper:]' '[:lower:]') >> $GITHUB_OUTPUT 37 | 38 | - name: Upload CLI Artifacts 39 | uses: actions/upload-artifact@v4 40 | with: 41 | name: lgc-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}-tarballs 42 | path: | 43 | releases/lgc-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz 44 | releases/lgc-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz.sha256 45 | releases/lgc-minimal-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz 46 | releases/lgc-minimal-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz.sha256 47 | 48 | - name: Create GitHub Release 49 | uses: ncipollo/release-action@v1 50 | with: 51 | allowUpdates: true 52 | makeLatest: true 53 | artifacts: | 54 | releases/lgc-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz 55 | releases/lgc-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz.sha256 56 | releases/lgc-minimal-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz 57 | releases/lgc-minimal-${{ steps.toLowerCase.outputs.osLowercase }}-${{ steps.toLowerCase.outputs.archLowercase }}.tar.gz.sha256 58 | name: ${{ github.ref_name }} 59 | body: | 60 | LogCraft CLI Release 61 | -------------------------------------------------------------------------------- /.github/workflows/release-plugins.yml: -------------------------------------------------------------------------------- 1 | name: LogCraft Plugin Release 2 | 3 | permissions: 4 | contents: "write" 5 | 6 | on: 7 | push: 8 | tags: 9 | - 'plugin/**' 10 | 11 | jobs: 12 | plan: 13 | runs-on: ubuntu-latest 14 | outputs: 15 | name: ${{ steps.name.outputs.name }} 16 | env: 17 | TAG: ${{ github.ref_name }} 18 | steps: 19 | - name: Get plugin name 20 | id: name 21 | run: | 22 | if [[ "${TAG}" =~ ^plugin/([^/]+)/([^/]+)$ ]]; then 23 | echo "name=${BASH_REMATCH[1]}" >> $GITHUB_OUTPUT 24 | else 25 | echo "Error: Invalid tag format: ${TAG}" >&2 26 | exit 1 27 | fi 28 | build: 29 | runs-on: ubuntu-24.04 30 | needs: plan 31 | steps: 32 | - name: Checkout code 33 | uses: actions/checkout@v4 34 | with: 35 | fetch-depth: 0 36 | 37 | - uses: 'moonrepo/setup-toolchain@v0' 38 | with: 39 | auto-install: true 40 | 41 | - run: "moon '#plugin:build'" 42 | - run: "bash scripts/package-plugins.sh" 43 | 44 | - name: Upload Plugins Tarball 45 | uses: actions/upload-artifact@v4 46 | with: 47 | name: plugins-tarball 48 | path: | 49 | releases/plugins.tar.gz 50 | releases/plugins.tar.gz.sha256 51 | 52 | - name: Create GitHub Release 53 | uses: ncipollo/release-action@v1 54 | with: 55 | artifacts: | 56 | releases/plugins.tar.gz 57 | releases/plugins.tar.gz.sha256 58 | name: ${{ github.ref_name }} 59 | body: | 60 | LogCraft CLI Plugins 61 | -------------------------------------------------------------------------------- /.github/workflows/validate.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | branches: 7 | - main 8 | 9 | name: Code testing and formatting 10 | 11 | jobs: 12 | validate: 13 | runs-on: macos-15 14 | steps: 15 | - name: Checkout code 16 | uses: actions/checkout@v4 17 | with: 18 | fetch-depth: 0 19 | # Prepare the toolchain 20 | - uses: 'moonrepo/setup-toolchain@v0' 21 | # Check code formatting 22 | - run: "moon :format --affected --remote" 23 | # Linting with clippy 24 | - run: "moon :clippy --affected --remote" 25 | # Run tests 26 | - run: "moon :test --affected --remote" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # rust 2 | target/ 3 | debug/ 4 | **/*.rs.bk 5 | 6 | # moon 7 | .moon/cache 8 | .moon/docker 9 | 10 | # Editor 11 | *.swp 12 | *.swo 13 | .idea 14 | .vscode 15 | 16 | # Vue/JS 17 | node_modules/ 18 | dist/ 19 | cache/ 20 | 21 | # MISC 22 | .DS_Store 23 | 24 | # Custom releases 25 | releases/ 26 | 27 | # Dev folder for testing purposes 28 | dev/ -------------------------------------------------------------------------------- /.licenserc.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2023 LogCraft.io. 2 | # SPDX-License-Identifier: MPL-2.0 3 | 4 | header: 5 | license: 6 | content: | 7 | Copyright (c) 2023 LogCraft.io. 8 | SPDX-License-Identifier: MPL-2.0 9 | 10 | paths-ignore: 11 | # Moon 12 | - ".moon/**" 13 | # Documentation 14 | - "docs/**" 15 | # Scripts 16 | - "scripts/**" 17 | # Unneeded 18 | - "Dockerfile" 19 | # Generated files 20 | - ".github/**" 21 | # Unmanaged 22 | - "**/Cargo.*" 23 | - ".prototools" 24 | - ".dockerignore" 25 | - ".gitignore" 26 | - "LICENSE" 27 | - "**/moon.yml" 28 | - "**/README.md" 29 | 30 | comment: on-failure 31 | 32 | dependency: 33 | files: 34 | - Cargo.toml 35 | -------------------------------------------------------------------------------- /.moon/tasks/rust.yml: -------------------------------------------------------------------------------- 1 | tasks: 2 | build: 3 | command: 'cargo build --release' 4 | deps: 5 | - '^:build' 6 | inputs: 7 | - 'src/**/*' 8 | - 'Cargo.toml' 9 | format: 10 | command: 'cargo fmt --all -- --check' 11 | test: 12 | command: 'cargo test' 13 | clippy: 14 | command: 'cargo clippy' 15 | check: 16 | command: 'cargo check' 17 | run: 18 | command: 'cargo run' 19 | preset: 'server' 20 | clean: 21 | command: 'cargo clean' 22 | -------------------------------------------------------------------------------- /.moon/tasks/tag-plugin.yml: -------------------------------------------------------------------------------- 1 | tasks: 2 | build: 3 | command: 'cargo build --release --target wasm32-wasip2' 4 | inputs: 5 | - 'src/**/*' 6 | - 'Cargo.toml' -------------------------------------------------------------------------------- /.moon/toolchain.yml: -------------------------------------------------------------------------------- 1 | rust: 2 | version: '1.82.0' 3 | targets: 4 | - 'wasm32-wasip2' 5 | - 'x86_64-unknown-linux-gnu' 6 | 7 | node: 8 | version: '20.9.0' 9 | packageManager: 'pnpm' 10 | pnpm: 11 | version: '10.5.0' -------------------------------------------------------------------------------- /.moon/workspace.yml: -------------------------------------------------------------------------------- 1 | # https://moonrepo.dev/docs/config/workspace 2 | $schema: 'https://moonrepo.dev/schemas/workspace.json' 3 | 4 | # Require a specific version of moon while running commands, otherwise fail. 5 | # versionConstraint: '>=1.0.0' 6 | 7 | # Extend and inherit an external configuration file. Must be a valid HTTPS URL or file system path. 8 | # extends: './shared/workspace.yml' 9 | 10 | # REQUIRED: A map of all projects found within the workspace, or a list or file system globs. 11 | # When using a map, each entry requires a unique project ID as the map key, and a file system 12 | # path to the project folder as the map value. File paths are relative from the workspace root, 13 | # and cannot reference projects located outside the workspace boundary. 14 | projects: 15 | - 'apps/*' 16 | - 'plugins/*' 17 | - 'libs/*' 18 | - 'docs' 19 | 20 | # Configures the version control system to utilize within the workspace. A VCS 21 | # is required for determining touched (added, modified, etc) files, calculating file hashes, 22 | # computing affected files, and much more. 23 | vcs: 24 | # The client to use when managing the repository. 25 | # Accepts "git". Defaults to "git". 26 | manager: 'git' 27 | 28 | # The default branch (master/main/trunk) in the repository for comparing the 29 | # local branch against. For git, this is is typically "master" or "main", 30 | # and must include the remote prefix (before /). 31 | defaultBranch: 'main' 32 | -------------------------------------------------------------------------------- /.prototools: -------------------------------------------------------------------------------- 1 | moon = "1.31.2" 2 | 3 | [plugins] 4 | moon = "https://raw.githubusercontent.com/moonrepo/moon/master/proto-plugin.toml" 5 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace.package] 2 | edition = "2021" 3 | rust-version = "1.82" 4 | repository = "https://github.com/LogCraftIO/logcraft-cli" 5 | 6 | [workspace] 7 | members = [ 8 | "libs/*", 9 | "apps/*", 10 | "plugins/*" 11 | ] 12 | exclude = ["libs/bindings"] 13 | resolver = "2" 14 | 15 | [profile.release] 16 | opt-level = "s" 17 | lto = true 18 | codegen-units = 1 19 | strip = true 20 | 21 | [workspace.dependencies] 22 | # Local dependencies 23 | lgc = { path = "apps/lgc" } 24 | lgc-common = { path = "libs/lgc-common" } 25 | lgc-runtime = { path = "libs/lgc-runtime" } 26 | lgc-policies = { path = "libs/lgc-policies" } 27 | 28 | # Utils 29 | anyhow = "1.0" 30 | thiserror = "2.0" 31 | console = "0.15" 32 | dialoguer = "0.11" 33 | tracing = {version = "0.1", features = ["log"] } 34 | tracing-subscriber = {version = "0.3", features = ["env-filter", "fmt", "std"] } 35 | once_cell = "1.20" 36 | 37 | # Async 38 | tokio = { version = "1.0", features = ["full"] } 39 | 40 | # Ser / Deser 41 | serde = { version = "1.0.211", features = ["derive"] } 42 | serde_with_macros = "3.12" 43 | serde_path_to_error = "0.1" 44 | serde_json = "1.0" 45 | serde_yaml_ng = "0.10" 46 | toml = "0.8" 47 | 48 | # WASM Related 49 | wasmtime = "30.0" 50 | wasmtime-wasi = "30.0" 51 | wasmtime-wasi-http = "30.0" 52 | wit-bindgen = "0.39" 53 | 54 | # Configuration related 55 | schemars = { version = "0.8" } 56 | jsonschema = { version = "0.29", default-features = false } 57 | 58 | # HTTP related 59 | reqwest = { version = "0.12", features = ["json", "rustls-tls"] } 60 | http = "1.2" 61 | waki = "0.5" 62 | url = "2.5" 63 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # ----- 2 | # Stage 1: Builder 3 | FROM docker.io/library/rust:1.84-alpine AS builder 4 | 5 | # Set dummy SYSROOT to force static linking 6 | ENV SYSROOT=/dummy 7 | 8 | # Install build dependencies 9 | RUN apk add --no-cache musl-dev libressl-dev 10 | 11 | # Set working directory 12 | WORKDIR /build 13 | 14 | # Copy source code and build artifacts 15 | COPY . . 16 | # Build the CLI 17 | RUN cargo build --release -p lgc 18 | # Build the plugins 19 | RUN rustup target add wasm32-wasip2 20 | RUN cargo build --release --target wasm32-wasip2 \ 21 | -p splunk \ 22 | -p sentinel 23 | 24 | # ----- 25 | # Stage 2: Final image 26 | FROM cgr.dev/chainguard/wolfi-base:latest 27 | 28 | # Define a variable for the installation directory 29 | ENV LOGCRAFT_DIR=/opt/logcraft-cli 30 | ENV PATH="${LOGCRAFT_DIR}:$PATH" 31 | 32 | # Metadata 33 | LABEL org.opencontainers.image.title="LogCraft CLI" \ 34 | org.opencontainers.image.authors="LogCraft " \ 35 | org.opencontainers.image.url="https://github.com/LogCraftIO/logcraft-cli/pkgs/container/logcraft-cli" \ 36 | org.opencontainers.image.documentation="https://docs.logcraft.io/" \ 37 | org.opencontainers.image.source="https://github.com/LogCraftIO/logcraft-cli" \ 38 | org.opencontainers.image.vendor="LogCraft" \ 39 | org.opencontainers.image.licenses="MPL-2.0" \ 40 | org.opencontainers.image.description="Easily build Detection-as-Code pipelines for modern security tools (SIEM, EDR, XDR, ...)" 41 | 42 | # Set the working directory and change ownership 43 | WORKDIR /srv/workspace 44 | 45 | # Copy artifacts from the builder stage using the variable 46 | COPY --from=builder /build/target/release/lgc ${LOGCRAFT_DIR}/lgc 47 | COPY --from=builder /build/target/wasm32-wasip2/release/splunk.wasm ${LOGCRAFT_DIR}/plugins/splunk.wasm 48 | COPY --from=builder /build/target/wasm32-wasip2/release/sentinel.wasm ${LOGCRAFT_DIR}/plugins/sentinel.wasm -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LogCraft CLI 2 | 3 | LogCraft CLI `lgc` is an opensource utility that simplifies the adoption of Detection as Code principles with legacy SIEM, EDR, XDR and any other modern security solutions while leveraging native capabilities of GitLab, GitHub, Bitbucket and other Version Control Systems (VCS). 4 | 5 | --- 6 | 7 | **Documentation**: https://docs.logcraft.io 8 | 9 | **Source Code**: https://github.com/LogCraftIO/logcraft-cli 10 | 11 | --- 12 | 13 | ![LogCraft CLI](./docs/src/public/img/logcraft-cli.webp) 14 | -------------------------------------------------------------------------------- /apps/lgc/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "lgc" 7 | version = "0.1.0" 8 | dependencies = [ 9 | "lgc-core", 10 | ] 11 | 12 | [[package]] 13 | name = "lgc-core" 14 | version = "0.1.0" 15 | -------------------------------------------------------------------------------- /apps/lgc/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lgc" 3 | version = "1.1.1" 4 | repository.workspace = true 5 | edition.workspace = true 6 | 7 | [[bin]] 8 | name = "lgc" 9 | path = "src/lgc.rs" 10 | 11 | [target.x86_64-unknown-linux-gnu.dependencies] 12 | openssl = { version = "0.10", features = ["vendored"] } 13 | 14 | [dependencies] 15 | # Local dependencies 16 | lgc-common.workspace = true 17 | lgc-policies.workspace = true 18 | 19 | # Utils 20 | tracing.workspace = true 21 | tracing-subscriber.workspace = true 22 | console.workspace = true 23 | dialoguer.workspace = true 24 | clap = { version = "4.5", features = ["derive", "env", "cargo"] } 25 | envsubst = "0.2" 26 | 27 | # Async 28 | anyhow.workspace = true 29 | tokio.workspace = true 30 | 31 | # Ser / Deser 32 | serde.workspace = true 33 | serde_json.workspace = true 34 | serde_yaml_ng.workspace = true 35 | toml.workspace = true 36 | 37 | # Wasm related 38 | wasmtime.workspace = true 39 | wasmtime-wasi.workspace = true 40 | 41 | # Configuration related 42 | figment = { version = "0.10", features = ["toml", "env"] } 43 | jsonschema.workspace = true 44 | 45 | [dev-dependencies] 46 | assert_cmd = "2.0" 47 | assert_fs = "1.1" 48 | predicates = "3.1" 49 | rexpect = "0.6" 50 | -------------------------------------------------------------------------------- /apps/lgc/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'rust' 2 | type: 'application' 3 | 4 | project: 5 | name: 'lgc' 6 | description: 'LogCraft Command Line Interface (CLI)' 7 | 8 | dependsOn: 9 | - 'lgc-common' 10 | - 'lgc-runtime' 11 | 12 | tasks: 13 | run: 14 | command: 'cargo run' 15 | deps: 16 | - '#plugin:build' 17 | -------------------------------------------------------------------------------- /apps/lgc/src/commands.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | // Export all commands 5 | pub mod apply; 6 | pub mod destroy; 7 | pub mod init; 8 | pub mod ping; 9 | pub mod plan; 10 | pub mod services; 11 | pub mod validate; 12 | -------------------------------------------------------------------------------- /apps/lgc/src/commands/init.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use std::str::FromStr; 5 | 6 | /// Prepare working directory for other lgcli commands 7 | #[derive(clap::Parser)] 8 | #[clap( 9 | about = "Initialize LogCraft CLI configuration", 10 | allow_hyphen_values = true 11 | )] 12 | pub struct InitCommand { 13 | /// Optional path for the project root 14 | #[clap(short, long, default_value = ".")] 15 | pub root: String, 16 | 17 | /// Optional base directory in which detections will be stored 18 | #[clap(short, long, default_value = lgc_common::configuration::LGC_RULES_DIR)] 19 | pub workspace: String, 20 | 21 | /// Creates the workspace directory in the root path [default: false] 22 | #[clap(short, long)] 23 | pub create: bool, 24 | } 25 | 26 | impl InitCommand { 27 | /// Run the init command. 28 | pub fn run(self) -> anyhow::Result<()> { 29 | use anyhow::bail; 30 | use lgc_common::configuration; 31 | 32 | let project_path = std::path::PathBuf::from_str(&self.root)?; 33 | if !project_path.exists() { 34 | bail!("directory '{}' does not exist", self.root) 35 | } else if !project_path.is_dir() { 36 | bail!("'{}' is not a directory", self.root) 37 | } 38 | 39 | if self.create { 40 | let rules_dir = &project_path.join(&self.workspace); 41 | if std::path::Path::new(rules_dir).exists() { 42 | bail!("workspace directory '{}' already exists", self.workspace) 43 | } 44 | 45 | // Create detections directory & configuration file 46 | if let Err(e) = std::fs::create_dir(rules_dir) { 47 | bail!("unable to create detection rules directory: {}", e) 48 | } 49 | 50 | tracing::info!("workspace directory '{}' created", self.workspace); 51 | } 52 | 53 | let config_path = &project_path.join(configuration::LGC_CONFIG_PATH); 54 | if std::fs::File::create_new(config_path).is_err() { 55 | bail!("{} already exists", configuration::LGC_CONFIG_PATH) 56 | } 57 | 58 | // Save the configuration 59 | configuration::ProjectConfiguration { 60 | core: configuration::CoreConfiguration { 61 | workspace: self.workspace, 62 | ..Default::default() 63 | }, 64 | ..Default::default() 65 | } 66 | .save_config(config_path.to_str())?; 67 | 68 | tracing::info!("{} saved", configuration::LGC_CONFIG_PATH); 69 | Ok(()) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /apps/lgc/src/commands/validate.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use lgc_common::{ 5 | configuration, 6 | plugins::manager::{PluginActions, PluginManager}, 7 | utils::filter_missing_plugins, 8 | }; 9 | 10 | use lgc_policies::policy::Severity; 11 | 12 | /// Validate detection rules 13 | #[derive(clap::Parser)] 14 | #[clap(about = "Validate local detection rules", allow_hyphen_values = true)] 15 | pub struct ValidateCommand { 16 | /// Quiet mode 17 | #[clap(short, long)] 18 | pub quiet: bool, 19 | } 20 | 21 | impl ValidateCommand { 22 | pub async fn run(self, config: configuration::ProjectConfiguration) -> anyhow::Result<()> { 23 | // Load detections 24 | let mut detections = config.load_detections(None)?; 25 | if detections.is_empty() { 26 | anyhow::bail!("nothing to validate, no detection found."); 27 | } 28 | 29 | let mut has_warning = false; 30 | let mut has_error = false; 31 | for (plugin, detections) in &detections { 32 | // Load policies per plugin 33 | let policies = config.read_plugin_policies(plugin)?; 34 | if !self.quiet { 35 | let spelling = match policies.len() { 36 | 0 | 1 => "policy", 37 | _ => "policies", 38 | }; 39 | tracing::info!( 40 | "{} {} loaded for plugin '{plugin}'.", 41 | policies.len(), 42 | spelling 43 | ); 44 | } 45 | 46 | for (policy_path, policy) in policies { 47 | let schema = policy 48 | .to_schema() 49 | .map_err(|e| anyhow::anyhow!("incorrect policy '{policy_path}': {e}"))?; 50 | 51 | let validator = jsonschema::Validator::new(&schema)?; 52 | let message = schema["x-message"] 53 | .as_str() 54 | .unwrap_or("missing message in schema"); 55 | 56 | // Validate detections against policies 57 | for (detection_path, content) in &detections.detections { 58 | let val: serde_json::Value = serde_json::from_slice(content)?; 59 | match validator.validate(&val) { 60 | Ok(_) => (), 61 | Err(_) => match policy.severity { 62 | Severity::Error => { 63 | tracing::error!("{message} (policy: {policy_path}, detection: {detection_path})"); 64 | has_error = true; 65 | } 66 | Severity::Warning => { 67 | tracing::warn!("{message} (policy: {policy_path}, detection: {detection_path})"); 68 | has_warning = true; 69 | } 70 | }, 71 | } 72 | } 73 | } 74 | } 75 | 76 | // Prepare plugin manager and tasks JoinSet. 77 | let plugin_manager = PluginManager::new()?; 78 | let mut plugin_tasks = tokio::task::JoinSet::new(); 79 | 80 | // Retrieve plugin directory and filter out plugins that do not exist. 81 | let plugins_dir = filter_missing_plugins( 82 | config.core.base_dir, 83 | &config.core.workspace, 84 | &mut detections, 85 | ); 86 | 87 | // Collect the keys into a new vector. 88 | let plugin_keys: Vec<_> = detections.keys().cloned().collect(); 89 | 90 | for plugin in plugin_keys { 91 | // Check if the plugin exists. 92 | let plugin_path = plugins_dir.join(&plugin).with_extension("wasm"); 93 | if !plugin_path.exists() { 94 | tracing::warn!( 95 | "ignoring '{}/{}' (no matching plugin).", 96 | config.core.workspace, 97 | plugin 98 | ); 99 | continue; 100 | } 101 | 102 | let plugin_manager = plugin_manager.clone(); 103 | // Now it's safe to remove the plugin's detections from `detections`. 104 | let plugin_detections = detections.remove(&plugin).ok_or_else(|| { 105 | anyhow::anyhow!( 106 | "unexpected error. No detection data found for plugin '{}'.", 107 | plugin 108 | ) 109 | })?; 110 | 111 | // Spawn a task that does both instantiation and validation. 112 | plugin_tasks.spawn(async move { 113 | let (instance, mut store) = plugin_manager.load_plugin(plugin_path).await?; 114 | let mut errors = Vec::new(); 115 | 116 | for (path, content) in &plugin_detections.detections { 117 | if let Err(e) = instance.validate(&mut store, content).await { 118 | errors.push((path.clone(), e)); 119 | } 120 | } 121 | Ok::<_, anyhow::Error>(errors) 122 | }); 123 | } 124 | 125 | // Process the results of each plugin task. 126 | while let Some(join_result) = plugin_tasks.join_next().await { 127 | match join_result { 128 | Ok(result) => { 129 | let errors = result?; 130 | for (path, err) in errors { 131 | tracing::error!("validation failed on '{path}': {err}"); 132 | has_error = true; 133 | } 134 | } 135 | Err(e) => { 136 | // A panic in one of the spawned tasks. 137 | tracing::error!("plugin panicked: {:?}", e); 138 | has_error = true; 139 | } 140 | } 141 | } 142 | 143 | if !self.quiet && !has_error && !has_warning { 144 | tracing::info!("all good, no problem identified."); 145 | } else if has_error { 146 | std::process::exit(1); 147 | } 148 | 149 | Ok(()) 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /apps/lgc/src/lgc.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | #![forbid(unsafe_code)] 5 | 6 | use anyhow::Result; 7 | use std::env; 8 | 9 | use lgc::commands; 10 | use lgc_common::{configuration, utils}; 11 | 12 | #[tokio::main] 13 | async fn main() { 14 | // If an error occurs, log it and exit 15 | if let Err(err) = LogCraftCli::init().await { 16 | tracing::error!("{err}"); 17 | std::process::exit(1); 18 | } 19 | } 20 | 21 | const HELP_TEMPLATE: &str = r#" 22 | {before-help}{name} {version} 23 | 24 | {usage-heading} {usage} 25 | 26 | {all-args}{after-help} 27 | "#; 28 | 29 | /// LogCraft CLI 30 | #[derive(clap::Parser)] 31 | #[clap( 32 | name="LogCraft CLI", 33 | help_template=HELP_TEMPLATE, 34 | version=concat!("v", env!("CARGO_PKG_VERSION") 35 | ))] 36 | struct LogCraftCli { 37 | #[clap(subcommand)] 38 | commands: LogCraftCommands, 39 | 40 | #[clap(skip)] 41 | config: configuration::ProjectConfiguration, 42 | } 43 | 44 | /// LogCraft CLI 45 | #[derive(clap::Subcommand)] 46 | enum LogCraftCommands { 47 | Init(commands::init::InitCommand), 48 | Ping(commands::ping::PingCommand), 49 | Validate(commands::validate::ValidateCommand), 50 | Plan(commands::plan::PlanCommand), 51 | Apply(commands::apply::ApplyCommand), 52 | Destroy(commands::destroy::DestroyCommand), 53 | #[clap(subcommand)] 54 | Services(commands::services::ServicesCommands), 55 | } 56 | 57 | impl LogCraftCli { 58 | /// Initialize and load the configuration. 59 | async fn init() -> Result<()> { 60 | use clap::{builder::styling, CommandFactory}; 61 | use console::{set_colors_enabled, set_colors_enabled_stderr}; 62 | use figment::providers::{Env, Format, Toml}; 63 | 64 | // Prepare style 65 | let styles = styling::Styles::styled() 66 | .header(styling::AnsiColor::Green.on_default().bold().underline()) 67 | .usage(styling::AnsiColor::Green.on_default().bold().underline()) 68 | .literal(styling::AnsiColor::Blue.on_default().bold()); 69 | 70 | // Forces tty colors 71 | if env::var("LGC_FORCE_COLORS").is_ok_and(|t| &t == "true") { 72 | set_colors_enabled(true); 73 | set_colors_enabled_stderr(true); 74 | } 75 | 76 | let matches = LogCraftCli::command().styles(styles).get_matches(); 77 | let mut cli = ::from_arg_matches(&matches)?; 78 | 79 | tracing_subscriber::fmt() 80 | .with_writer(std::io::stdout) 81 | .with_target(false) 82 | .without_time() 83 | .with_env_filter(tracing_subscriber::EnvFilter::from_env("LGC_LOG")) 84 | .with_max_level(tracing::Level::INFO) 85 | .init(); 86 | 87 | // Load configuration 88 | match cli.commands { 89 | LogCraftCommands::Init(cmd) => return cmd.run(), 90 | _ => { 91 | let configuration_path = std::path::PathBuf::from(configuration::LGC_CONFIG_PATH); 92 | 93 | if configuration_path.is_file() { 94 | let mut configuration_file = std::fs::read_to_string(configuration_path)?; 95 | 96 | // Environment variables substitution 97 | if envsubst::is_templated(&configuration_file) { 98 | configuration_file = envsubst::substitute( 99 | configuration_file, 100 | &env::vars() 101 | .filter_map(|(key, value)| { 102 | if !utils::env_forbidden_chars(&key) 103 | && !utils::env_forbidden_chars(&value) 104 | { 105 | Some((key, value)) 106 | } else { 107 | None 108 | } 109 | }) 110 | .collect::>(), 111 | )?; 112 | } 113 | 114 | cli.config = match figment::Figment::new() 115 | .merge(Toml::string(&configuration_file)) 116 | .merge(Env::prefixed("LGC_").split("_")) 117 | .extract() 118 | { 119 | Ok(config) => config, 120 | Err(e) => { 121 | tracing::error!("unable to load configuration: {}", e); 122 | std::process::exit(1) 123 | } 124 | }; 125 | } else { 126 | tracing::error!( 127 | "no configuration file, run 'lgc init' to initialize a new project" 128 | ); 129 | std::process::exit(1) 130 | } 131 | } 132 | }; 133 | 134 | cli.run().await 135 | } 136 | 137 | /// LogCraft CLI entrypoint. 138 | pub async fn run(self) -> Result<()> { 139 | match self.commands { 140 | // General commands 141 | LogCraftCommands::Init(cmd) => cmd.run(), 142 | LogCraftCommands::Ping(cmd) => cmd.run(self.config).await, 143 | LogCraftCommands::Validate(cmd) => cmd.run(self.config).await, 144 | LogCraftCommands::Plan(cmd) => cmd.run(self.config).await, 145 | LogCraftCommands::Apply(cmd) => cmd.run(self.config).await, 146 | LogCraftCommands::Destroy(cmd) => cmd.run(self.config).await, 147 | // Services commands 148 | LogCraftCommands::Services(cmd) => cmd.run(self.config).await, 149 | } 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /apps/lgc/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | // Export commands module 5 | pub mod commands; 6 | -------------------------------------------------------------------------------- /apps/lgc/tests/common.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use std::{env, fs, path, process}; 5 | 6 | use anyhow::Result; 7 | use lgc_common::configuration::ProjectConfiguration; 8 | use lgc_common::configuration::LGC_CONFIG_PATH; 9 | use rexpect::session; 10 | 11 | pub const DEFAULT_WORKSPACE: &str = "rules"; 12 | pub const PLUGIN_NAME: &str = "sample"; 13 | pub const DEFAULT_TIMEOUT: u64 = 600_000; 14 | 15 | /// Provides helpers to run command tests. 16 | pub struct TestingEnv { 17 | pub root_dir: path::PathBuf, 18 | pub bin_path: path::PathBuf, 19 | pub session: session::PtySession, 20 | } 21 | 22 | impl TestingEnv { 23 | pub fn init( 24 | cwd: bool, 25 | root: &path::Path, 26 | workspace: Option<&str>, 27 | create: bool, 28 | ) -> Result { 29 | // Retrieve the CLI binary path & build the command 30 | let bin_path = assert_cmd::cargo::cargo_bin(env!("CARGO_PKG_NAME")); 31 | let mut command = process::Command::new(&bin_path); 32 | if cwd { 33 | command.current_dir(root); 34 | } 35 | 36 | // Construct the init command 37 | command.args([ 38 | "init", 39 | "--root", 40 | root.to_str() 41 | .expect("Failed to convert root path to string"), 42 | ]); 43 | if let Some(workspace) = workspace { 44 | command.arg("--workspace").arg(workspace); 45 | } 46 | if create { 47 | command.arg("--create"); 48 | } 49 | 50 | // Return TestingEnv instance 51 | Ok(Self { 52 | bin_path, 53 | root_dir: root.to_path_buf(), 54 | session: session::spawn_command(command, Some(DEFAULT_TIMEOUT))?, 55 | }) 56 | } 57 | 58 | pub fn init_success(mut self) -> Result { 59 | self.session 60 | .exp_string(&format!("{} saved", LGC_CONFIG_PATH))?; 61 | Ok(self) 62 | } 63 | 64 | pub fn setup_plugin(&self) -> Result<()> { 65 | // Ensure plugin dir exists 66 | let plugin_dir = self.root_dir.join(".logcraft/plugins"); 67 | fs::create_dir_all(&plugin_dir)?; 68 | 69 | let cargo_root = 70 | path::PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("OUT_DIR not set")); 71 | 72 | let plugin_path = cargo_root.join(format!( 73 | "../../target/wasm32-wasip2/release/{PLUGIN_NAME}.wasm" 74 | )); 75 | 76 | if !plugin_path.exists() { 77 | // Build the dummy plugin 78 | let mut command = process::Command::new("cargo"); 79 | command.args([ 80 | "build", 81 | "-p", 82 | PLUGIN_NAME, 83 | "--release", 84 | "--target", 85 | "wasm32-wasip2", 86 | ]); 87 | command.current_dir(cargo_root); 88 | 89 | // Spawn the command 90 | let mut status = session::spawn_command(command, Some(DEFAULT_TIMEOUT))?; 91 | status.exp_eof().expect("Failed to build testing plugin"); 92 | } 93 | 94 | // Copy the dummy plugin to the plugin directory 95 | fs::copy( 96 | plugin_path, 97 | plugin_dir.join(PLUGIN_NAME).with_extension("wasm"), 98 | )?; 99 | 100 | // Load the configuration 101 | let configuration_path = self.root_dir.join(LGC_CONFIG_PATH); 102 | let configuration_content = fs::read_to_string(&configuration_path)?; 103 | 104 | // Update base_dir for plugin retrieval 105 | let mut configuration: ProjectConfiguration = toml::from_str(&configuration_content)?; 106 | configuration.core.base_dir = Some(self.root_dir.join(".logcraft").display().to_string()); 107 | configuration.save_config(Some(configuration_path.to_str().unwrap()))?; 108 | 109 | Ok(()) 110 | } 111 | } 112 | 113 | pub fn assert_file_exists(path: &std::path::Path, expected: bool, message: &str) { 114 | assert_eq!(path.exists(), expected, "{}", message); 115 | } 116 | -------------------------------------------------------------------------------- /apps/lgc/tests/test_init.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use anyhow::Result; 5 | use std::path::Path; 6 | 7 | use lgc_common::configuration::LGC_CONFIG_PATH; 8 | 9 | pub mod common; 10 | 11 | /// Test that initializing a project with the default configuration succeeds. 12 | #[test] 13 | fn init_default_command_without_create() -> Result<()> { 14 | let temp_dir = assert_fs::TempDir::new()?; 15 | common::TestingEnv::init(false, temp_dir.path(), None, false)?.init_success()?; 16 | 17 | let config_file = temp_dir.join(LGC_CONFIG_PATH); 18 | common::assert_file_exists(&config_file, true, "Expected the config file to be created"); 19 | 20 | let workspace_dir = temp_dir.join(common::DEFAULT_WORKSPACE); 21 | common::assert_file_exists( 22 | &workspace_dir, 23 | false, 24 | "Workspace should not exist if '--create' was not given", 25 | ); 26 | 27 | Ok(()) 28 | } 29 | 30 | /// Test that initializing a project with the default configuration and creating the workspace succeeds. 31 | #[test] 32 | fn init_default_command_with_create() -> Result<()> { 33 | let temp_dir = assert_fs::TempDir::new()?; 34 | let mut env = common::TestingEnv::init(false, &temp_dir, None, true)?; 35 | 36 | env.session.exp_string(&format!( 37 | "workspace directory '{}' created", 38 | common::DEFAULT_WORKSPACE 39 | ))?; 40 | env.init_success()?; 41 | 42 | let workspace_dir = temp_dir.join(common::DEFAULT_WORKSPACE); 43 | common::assert_file_exists(&workspace_dir, true, "Expected workspace to be created"); 44 | 45 | let config_file = temp_dir.join(LGC_CONFIG_PATH); 46 | common::assert_file_exists(&config_file, true, "Expected the config file to be created"); 47 | 48 | Ok(()) 49 | } 50 | 51 | /// Test that initializing a project with a custom root path and workspace succeeds. 52 | #[test] 53 | fn init_custom_root_and_workspace() -> Result<()> { 54 | let temp_dir = assert_fs::TempDir::new()?; 55 | // Create a subdirectory for the workspace 56 | let root_dir = temp_dir.path().join("custom_root"); 57 | std::fs::create_dir(&root_dir).expect("Failed to create custom root directory"); 58 | 59 | let mut env = common::TestingEnv::init(false, &root_dir, Some("custom_workspace"), true)?; 60 | 61 | env.session 62 | .exp_string("workspace directory 'custom_workspace' created")?; 63 | env.init_success()?; 64 | 65 | Ok(()) 66 | } 67 | 68 | /// Test that initializing a project with an already existing workspace fails. 69 | #[test] 70 | fn init_workspace_conflict() -> Result<()> { 71 | let temp_dir = assert_fs::TempDir::new()?; 72 | 73 | // Create a subdirectory for the workspace 74 | let workspace_dir = temp_dir.join(common::DEFAULT_WORKSPACE); 75 | std::fs::create_dir(&workspace_dir).expect("Failed to pre-create workspace dir"); 76 | 77 | let mut env = common::TestingEnv::init(false, &temp_dir, None, true)?; 78 | 79 | assert!(workspace_dir.exists(), "Expected workspace to exist"); 80 | 81 | env.session.exp_string(&format!( 82 | "workspace directory '{}' already exists", 83 | common::DEFAULT_WORKSPACE 84 | ))?; 85 | 86 | let config_file = temp_dir.join(LGC_CONFIG_PATH); 87 | common::assert_file_exists( 88 | &config_file, 89 | false, 90 | "Expected the config file to be missing", 91 | ); 92 | 93 | Ok(()) 94 | } 95 | 96 | /// Test that initializing a project with an existing configuration file fails. 97 | #[test] 98 | fn init_config_conflict() -> Result<()> { 99 | let temp_dir = assert_fs::TempDir::new()?; 100 | let mut env = common::TestingEnv::init(false, &temp_dir, None, false)?; 101 | 102 | let config_file = temp_dir.join(LGC_CONFIG_PATH); 103 | std::fs::File::create(&config_file).expect("Failed to pre-create config file"); 104 | 105 | assert!(config_file.exists(), "Expected the config file to exist"); 106 | 107 | env.session 108 | .exp_string(&format!("{} already exists", LGC_CONFIG_PATH))?; 109 | 110 | let workspace_dir = temp_dir.join(common::DEFAULT_WORKSPACE); 111 | common::assert_file_exists(&workspace_dir, false, "Expected workspace to be missing"); 112 | 113 | Ok(()) 114 | } 115 | 116 | /// Test that initializing a project with an invalid root path fails. 117 | #[test] 118 | fn init_invalid_root() -> Result<()> { 119 | // Create a temporary file 120 | let temp_dir = assert_fs::TempDir::new()?; 121 | let invalid_root = temp_dir.path().join("invalid_root"); 122 | std::fs::File::create(&invalid_root).expect("Failed to create invalid root file"); 123 | 124 | let mut env = common::TestingEnv::init(false, &invalid_root, None, false)?; 125 | env.session.exp_string(&format!( 126 | "'{}' is not a directory", 127 | invalid_root 128 | .to_str() 129 | .expect("Failed to convert invalid root path to string") 130 | ))?; 131 | 132 | Ok(()) 133 | } 134 | 135 | /// Test that initializing a project with a missing root path fails. 136 | #[test] 137 | fn init_missing_root() -> Result<()> { 138 | // Path to a non-existent directory 139 | let missing_root = Path::new("/tmp/missing_root"); 140 | let mut env = common::TestingEnv::init(false, missing_root, None, false)?; 141 | 142 | env.session 143 | .exp_regex(&format!("'{}' does not exist", missing_root.display()))?; 144 | 145 | Ok(()) 146 | } 147 | -------------------------------------------------------------------------------- /docs/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'typescript' 2 | type: 'application' 3 | 4 | project: 5 | name: 'docs' 6 | description: 'LogCraft User Documentation' 7 | 8 | tasks: 9 | build: 10 | command: 'pnpm build' 11 | deps: 12 | - '^:build' 13 | inputs: 14 | - 'src/**/*' 15 | - 'package.json' 16 | outputs: 17 | - 'src/.vitepress/dist/**/*' 18 | dev: 19 | command: 'pnpm dev' 20 | check: 21 | # passthrough arguments: 22 | # `lgc docs:check -- --minAlertLevel=suggestion` 23 | command: 'vale --config ./vale/.vale.ini src' 24 | inputs: 25 | - 'src/**/*.md' 26 | -------------------------------------------------------------------------------- /docs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "module", 3 | "devDependencies": { 4 | "vitepress": "^1.6.3" 5 | }, 6 | "scripts": { 7 | "dev": "vitepress dev src", 8 | "build": "vitepress build src", 9 | "preview": "vitepress preview src" 10 | }, 11 | "dependencies": { 12 | "vuetify": "^3.7.14" 13 | } 14 | } -------------------------------------------------------------------------------- /docs/src/.vitepress/config.mts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitepress"; 2 | 3 | export default defineConfig({ 4 | title: "LogCraft CLI", 5 | description: "Detection as Code made simple", 6 | themeConfig: { 7 | logo: "/logo.png", 8 | nav: [ 9 | { text: "Home", link: "/" }, 10 | { text: "Quick Start", link: "/essentials/quickstart" }, 11 | { text: "Support", link: "/support" }, 12 | ], 13 | 14 | sidebar: [ 15 | { 16 | text: "Essentials", 17 | items: [ 18 | { text: "Installation", link: "/essentials/installation" }, 19 | { text: "Quick Start", link: "/essentials/quickstart" }, 20 | { text: "Configuration", link: "/essentials/configuration.md" }, 21 | { text: "GitLab Integration", link: "/essentials/gitlab.md" }, 22 | ], 23 | }, 24 | { 25 | text: "Concepts", 26 | items: [ 27 | { text: "Detections", link: "/concepts/detections" }, 28 | { text: "Identifiers", link: "/concepts/identifiers" }, 29 | { text: "Plugins", link: "/concepts/plugins" }, 30 | { text: "Policies", link: "/concepts/policies" }, 31 | ], 32 | }, 33 | { 34 | text: "Commands", 35 | collapsed: false, 36 | items: [ 37 | { text: "lgc init", link: "/commands/init" }, 38 | { text: "lgc apply", link: "/commands/apply" }, 39 | { text: "lgc destroy", link: "/commands/destroy" }, 40 | { text: "lgc ping", link: "/commands/ping" }, 41 | { text: "lgc plan", link: "/commands/plan" }, 42 | { text: "lgc services", link: "/commands/services" }, 43 | { text: "lgc validate", link: "/commands/validate" }, 44 | ], 45 | }, 46 | { 47 | text: "Plugins", 48 | collapsed: false, 49 | items: [ 50 | { text: "CrowdStrike", link: "/plugins/crowdstrike" }, 51 | { text: "Elastic", link: "/plugins/elastic" }, 52 | { 53 | text: "Google Chronicle (SecOps)", 54 | link: "/plugins/google-chronicle", 55 | }, 56 | { text: "LimaCharlie", link: "/plugins/limacharlie" }, 57 | { text: "Microsoft Sentinel", link: "/plugins/microsoft-sentinel" }, 58 | { text: "Palo Alto Cortex", link: "/plugins/paloalto-cortex" }, 59 | { text: "Sigma", link: "/plugins/sigma" }, 60 | { text: "Splunk", link: "/plugins/splunk" }, 61 | { text: "Sekoia", link: "/plugins/sekoia" }, 62 | { text: "Tanium", link: "/plugins/tanium" }, 63 | { text: "Yara", link: "/plugins/yara" }, 64 | ], 65 | }, 66 | { 67 | text: "Developers", 68 | collapsed: false, 69 | items: [ 70 | { 71 | text: "Docker images", 72 | link: "developers/docker-images.md", 73 | }, 74 | { 75 | text: "State", 76 | link: "developers/state.md", 77 | }, 78 | { 79 | text: "Compiling", 80 | link: "developers/compiling.md", 81 | }, 82 | { 83 | text: "Custom plugins", 84 | link: "/developers/how-to-create-plugins", 85 | }, 86 | ], 87 | }, 88 | { text: "Getting Help", link: "/support" }, 89 | ], 90 | 91 | socialLinks: [ 92 | { icon: "twitter", link: "https://twitter.com/LogCraftIO" }, 93 | { icon: "github", link: "https://github.com/LogCraftIO/logcraft-cli" }, 94 | { icon: "linkedin", link: "https://www.linkedin.com/company/logcraft" }, 95 | ], 96 | footer: { 97 | copyright: "Copyright © 2023-present LogCraft, SAS", 98 | }, 99 | }, 100 | vite: { 101 | ssr: { 102 | noExternal: ["vuetify"], 103 | }, 104 | }, 105 | sitemap: { 106 | hostname: "https://docs.logcraft.io", 107 | }, 108 | head: [ 109 | ["link", { rel: "icon", type: "image/png", href: "/logo.png" }], 110 | ["meta", { property: "og:type", content: "website" }], 111 | ["meta", { property: "og:locale", content: "en" }], 112 | [ 113 | "meta", 114 | { 115 | property: "og:title", 116 | content: "LogCraft | Detection-as-Code for Modern Security Operations", 117 | }, 118 | ], 119 | ["meta", { property: "og:site_name", content: "LogCraft" }], 120 | ["meta", { property: "og:url", content: "https://docs.logcraft.io/" }], 121 | ], 122 | }); 123 | -------------------------------------------------------------------------------- /docs/src/.vitepress/theme/components/plugins/PluginStatus.vue: -------------------------------------------------------------------------------- 1 | 17 | 18 | 26 | 27 | 34 | -------------------------------------------------------------------------------- /docs/src/.vitepress/theme/components/plugins/PluginsIndexPage.vue: -------------------------------------------------------------------------------- 1 | 15 | 16 | 21 | -------------------------------------------------------------------------------- /docs/src/.vitepress/theme/components/plugins/common.js: -------------------------------------------------------------------------------- 1 | export const getCategoryColor = (stage) => { 2 | switch (stage.toLowerCase()) { 3 | case "siem": 4 | return "teal"; 5 | case "xdr": 6 | return "blue"; 7 | case "edr": 8 | return "indigo"; 9 | case "format": 10 | return "purple"; 11 | default: 12 | return "grey"; 13 | } 14 | }; 15 | -------------------------------------------------------------------------------- /docs/src/.vitepress/theme/components/plugins/plugins.data.js: -------------------------------------------------------------------------------- 1 | import { createContentLoader } from "vitepress"; 2 | 3 | const pages = createContentLoader("plugins/*.md", { 4 | includeSrc: false, 5 | render: false, 6 | excerpt: false, 7 | transform(rawData) { 8 | return rawData 9 | .filter((item) => { 10 | // do not return the index page 11 | return item.url !== "/plugins/"; 12 | }) 13 | .sort((a, b) => { 14 | // sort by title alphabetically 15 | return a.frontmatter.title.localeCompare(b.frontmatter.title); 16 | }); 17 | }, 18 | }); 19 | 20 | export default pages; 21 | -------------------------------------------------------------------------------- /docs/src/.vitepress/theme/index.ts: -------------------------------------------------------------------------------- 1 | import type { Theme } from "vitepress"; 2 | import DefaultTheme from "vitepress/theme"; 3 | 4 | // Custom components 5 | import PluginsIndexPage from "./components/plugins/PluginsIndexPage.vue"; 6 | import PluginStatus from "./components/plugins/PluginStatus.vue"; 7 | 8 | // Vuetify 9 | import "vuetify/styles"; 10 | import { createVuetify } from "vuetify"; 11 | import { VChip } from "vuetify/components"; 12 | 13 | const vuetify = createVuetify({ 14 | components: { 15 | VChip, 16 | }, 17 | }); 18 | 19 | export default { 20 | extends: DefaultTheme, 21 | enhanceApp({ app }) { 22 | app.use(vuetify); 23 | app.component("PluginsIndexPage", PluginsIndexPage); 24 | app.component("PluginStatus", PluginStatus); 25 | }, 26 | } satisfies Theme; 27 | -------------------------------------------------------------------------------- /docs/src/commands/apply.md: -------------------------------------------------------------------------------- 1 | # lgc apply 2 | 3 | The apply command performs a plan just like [lgc plan](./plan.md) does, but then actually carries out the planned changes to each remote services using the relevant plugin's API. 4 | 5 | It asks for confirmation from the user before making any changes, unless it was explicitly told to skip approval. 6 | 7 | ```bash 8 | lgc apply [] [options] 9 | ``` 10 | 11 | To apply changes to a specific service: 12 | 13 | ```bash 14 | lgc apply my-service 15 | ``` 16 | 17 | To apply changes to all services belonging to a specific environment: 18 | 19 | ```bash 20 | lgc apply my-environment 21 | ``` 22 | 23 | In the case of an environment, all services belonging to that environment are updated, if, and only if, changes are pending toward these services. 24 | 25 | ::: tip 26 | The apply command is the only command that locks the [state](../developers/state.md) because it is the only command modifying the state. 27 | ::: 28 | 29 | ## Options 30 | 31 | 32 | 33 | ### --auto-approve/-a 34 | 35 | 36 | 37 | The `--auto-approve` flag skips the prompt and immediately apply the changes without requiring user intervention. This is especially handy in CI/CD workflows. 38 | 39 | Normal (interactive) run: 40 | 41 | ```bash 42 | % lgc apply 43 | ... list of changes ... 44 | Apply changes? (y/n) 45 | // changes are applied if the user confirms 'yes' 46 | % 47 | ``` 48 | 49 | Non-interactive run: 50 | 51 | ```bash 52 | % lgc apply --auto-approve 53 | ... list of changes ... 54 | // changes are applied automatically 55 | % 56 | ``` 57 | -------------------------------------------------------------------------------- /docs/src/commands/destroy.md: -------------------------------------------------------------------------------- 1 | # lgc destroy 2 | 3 | This command is a convenient way to clean up remote services, especially ephemeral environments often encountered for development purposes. This command removes all detections from the target services. 4 | 5 | ```sh 6 | % lgc destroy 7 | ``` 8 | 9 | ## Options 10 | 11 | 12 | 13 | ### --auto-approve 14 | 15 | 16 | 17 | The `--auto-approve` flag skips the prompt and immediately remove the detections from the remote services. This is especially handy in CI/CD workflows. 18 | 19 | Normal (interactive) run: 20 | 21 | ```bash 22 | % lgc destroy dev 23 | ... list of suppression ... 24 | Apply changes? (y/n) 25 | // changes are applied if the user confirm 'yes' 26 | % 27 | ``` 28 | 29 | Non-interactive run: 30 | 31 | ```bash 32 | % lgc destroy --auto-approve 33 | ... list of changes ... 34 | // changes are applied automatically 35 | % 36 | ``` 37 | -------------------------------------------------------------------------------- /docs/src/commands/init.md: -------------------------------------------------------------------------------- 1 | # lgc init 2 | 3 | This command helps you kickstart the project by creating the base configuration file `lgc.toml`. 4 | 5 | ```bash 6 | % lgc init 7 | INFO lgc.toml saved 8 | % ls -1 9 | lgc.toml 10 | % 11 | ``` 12 | 13 | Without parameters, `lgc init` creates the configuration file in the current directory. 14 | 15 | If a `lgc.toml` already exists, the command fails. 16 | 17 | ```bash 18 | % lgc init 19 | ERROR lgc.toml already exists 20 | % 21 | ``` 22 | 23 | ## Options 24 | 25 | 26 | 27 | ### --root/-r 28 | 29 | 30 | 31 | The `root` option allows specifying where to initialize the configuration (defaults to `.`) 32 | 33 | The following example initialize the configuration in the directory `/foo/bar` 34 | 35 | ```bash 36 | % lgc init --root /foo/bar 37 | INFO lgc.toml saved 38 | % ls -1 /foo/bar 39 | lgc.toml 40 | % 41 | ``` 42 | 43 | If the provided path doesn't exist, an error is thrown. 44 | 45 | ```bash 46 | % lgc init --root /foo/baz 47 | ERROR directory '/foo/baz' does not exist 48 | % 49 | ``` 50 | 51 | 52 | 53 | ### --workspace/-w 54 | 55 | 56 | 57 | This parameter allows defining the base directory in which detections are stored (default: `rules`). This is a sub directory of the root folder. 58 | 59 | ```bash 60 | % lgc init --root /foo/bar --workspace 'my-rules' 61 | INFO lgc.toml saved 62 | % ls -1 /foo/bar 63 | lgc.toml 64 | % cat /foo/bar/lgc.toml | grep workspace 65 | workspace = "my-rules" 66 | % 67 | ``` 68 | 69 | 70 | 71 | ### --create/-c 72 | 73 | 74 | 75 | When this parameter is specified, lgc creates the workspace in the root directory (default: `false`). 76 | 77 | ```bash 78 | % lgc init --create 79 | INFO workspace directory 'rules' created 80 | INFO lgc.toml saved 81 | % ls -1 82 | lgc.toml 83 | rules/ 84 | % 85 | ``` 86 | 87 | If a directory of the same name already exists, an error is thrown. 88 | 89 | ```bash 90 | % lgc init --create 91 | ERROR workspace directory 'rules' already exists 92 | % 93 | ``` 94 | -------------------------------------------------------------------------------- /docs/src/commands/ping.md: -------------------------------------------------------------------------------- 1 | # lgc ping 2 | 3 | This command validates network connectivity in between lgc and the remote systems by opening a network connection to one or more services. This is a troubleshooting command. 4 | 5 | ```bash 6 | % lgc ping [] 7 | ``` 8 | 9 | When ping is called without parameter, all services are contacted. 10 | 11 | ```bash 12 | % lgc ping 13 | splunk-prod... OK 14 | splunk-dev... when calling ping for plugin 'splunk': ErrorCode::DnsError(DnsErrorPayload { rcode: Some("address not available"), info-code: Some(0) }) 15 | tanium-prod... OK 16 | % 17 | ``` 18 | 19 | If the provided identifier is a service, ping only connects to the specified service. 20 | 21 | ```bash 22 | % lgc ping my-service 23 | my-service... OK 24 | % 25 | ``` 26 | 27 | Finally, if the provided identifier is an environment, then all services belonging to that environment are contacted. 28 | 29 | ```bash 30 | % lgc ping prod 31 | splunk-prod... OK 32 | tanium-prod... OK 33 | % 34 | ``` 35 | 36 | ::: tip 37 | Technically speaking, `lgc ping` opens a socket to the remote host. This ensures that name resolution (DNS), routing, and firewall rules are correctly configured. 38 | ::: 39 | -------------------------------------------------------------------------------- /docs/src/commands/plan.md: -------------------------------------------------------------------------------- 1 | # lgc plan 2 | 3 | 4 | 5 | The plan command lets previewing the changes that lgc will make to your security services. It evaluates all security detections from the workspace and then compare that desired state to the real detections on remote services. 6 | 7 | 8 | 9 | The plan command uses state data and checks the current state of each detection using the relevant API to determine detections that should be created, updated, or removed from remotes services. 10 | 11 | This command does not perform any actual changes to remote services and it is usually run to ensure pending changes are as expected, before [applying the changes](apply). 12 | 13 | ```sh 14 | % lgc plan [] [options] 15 | ``` 16 | 17 | Example: 18 | 19 | ```sh 20 | % lgc plan 21 | INFO [+] 'High Domain Entropy (DGA)' will be created on 'splunk-prod' 22 | INFO [~] 'Crazy High Domain Entropy' will be updated on 'splunk-prod' 23 | INFO [-] 'Test Rule 123' will be removed from 'splunk-prod' 24 | ... 25 | % 26 | ``` 27 | 28 | The plan command presents changes with the following formalism: 29 | 30 | 1. **Creation**: a new detection has been created in the workspace and needs to be deployed (symbol: `+`). 31 | 2. **Edition**: a detection has been modified in the workspace and changes need to be propagated to the remote service (symbol: `~`). 32 | 3. **Deletion**: a detection has been removed from the workspace so it needs to be removed from the remote service (symbol: `-`). 33 | 34 | ## Options 35 | 36 | 37 | 38 | ### --state-only/-s 39 | 40 | 41 | 42 | By default, the plan command uses [state data](/developers/state) and connects to remote services to determine the changes. 43 | 44 | This flag changes this behavior by only using the local state to determine the changes. 45 | 46 | ```sh 47 | % lgc plan --state-only 48 | INFO using local state only, plan might be incorrect or incomplete 49 | INFO [+] 'High Domain Entropy (DGA)' will be created on 'splunk-prod' 50 | % 51 | ``` 52 | 53 | This flag makes the planning operation faster by reducing the number of remote API requests. However, this causes lgc to ignore external changes that occurred outside of normal workflows. This could potentially result in an incomplete or incorrect plan. 54 | 55 | ::: tip 56 | This flag is only interesting in development environments where having incorrect or incomplete plans may be acceptable. 57 | ::: 58 | 59 | 60 | 61 | ### --verbose/-v 62 | 63 | 64 | 65 | By default, the plan command only displays a summary of the changes. With the verbose flag set, the details of the changes are also displayed: 66 | 67 | ```sh 68 | % lgc plan prod --verbose 69 | [+] rule 'High Domain Entropy (DGA)' will be created on 'splunk-prod' 70 | [~] rule 'Crazy High Domain Entropy' will be updated on 'splunk-prod' 71 | | { 72 | | "app": "DemoApp", 73 | | "savedsearch": { 74 | | "cron_schedule": "*/15 0 0 0 0", 75 | | - "disabled": "true", 76 | | + "disabled": "false" 77 | | } 78 | | } 79 | [-] rule 'Test Rule 123' will be removed from 'splunk-prod' 80 | % 81 | ``` 82 | -------------------------------------------------------------------------------- /docs/src/commands/services.md: -------------------------------------------------------------------------------- 1 | # lgc services 2 | 3 | This command lets you manage the services your organisation rely on for its defenses, for example a SIEM instance, an EDR appliance and any other security tools. 4 | 5 | A service must be associated to a specific plugin, a specific technology, and most often services also belong to environments (*production*, *development*, *staging*, etc). 6 | 7 | ::: tip 1 service = 1 plugin 8 | lgc relies on a plugin system to integrate with various technologies. A service is an instantiation of a plugin with specific contextual parameters. 9 | ::: 10 | 11 | ## lgc services create 12 | 13 | ### Usage 14 | 15 | This command creates a new service. 16 | 17 | The following command creates a new service in interactive mode: 18 | 19 | ```sh 20 | % lgc services create 21 | Select the plugin to use: splunk 22 | Service identifier: splunk-prod 23 | Environment name (optional): prod 24 | Do you want to configure the plugin now? (y/n) n 25 | INFO service `splunk-prod` successfully created. 26 | % 27 | ``` 28 | 29 | These parameters can also be set from the command line as illustrated below: 30 | 31 | ```sh 32 | % lgc services create -i splunk-prod -p splunk -e prod 33 | INFO service 'splunk-prod' successfully created. 34 | % 35 | ``` 36 | 37 | ### Options 38 | 39 | | Parameter | Required? | Purpose | 40 | | ------------------------------- | --------- | ---------------------------------------------------- | 41 | | `--identifier\|-i ` | Required | Set the new service identifier | 42 | | `--plugin\|-p ` | Required | Plugin to use | 43 | | `--env\|-e ` | Optional | Environment name this service belongs to | 44 | | `--configure\|-c` | Optional | If set, launch the interactive service configuration | 45 | 46 | ## lgc services configure 47 | 48 | This command configures a service previously created using [`lgc services create`](#lgc-services-create). 49 | 50 | A service is associated to a plugin so each service has different parameters and this command lets you tune these parameters to match your environment. 51 | 52 | ```sh 53 | % lgc services configure 54 | // interactive prompt with plugin specific parameters 55 | % 56 | ``` 57 | 58 | ::: tip Configure a service at creation 59 | The plugin configuration can occur right after the service creation using the `--configure` parameter. See [`lgc services create`](#lgc-services-create) for details. 60 | ::: 61 | 62 | ## lgc services list 63 | 64 | This command lists existing services defined in the configuration file. 65 | 66 | ```sh 67 | % lgc services list 68 | --- 69 | service : mspr-stnl 70 | environment: staging 71 | plugin : sentinel 72 | --- 73 | service : shc-prd-01 74 | environment: production 75 | plugin : splunk 76 | % 77 | ``` 78 | 79 | ## lgc services remove 80 | 81 | This command deletes an existing service. 82 | 83 | ```sh 84 | % lgc services remove 85 | ``` 86 | 87 | Note that the service definition is removed from the configuration file but detections aren't impacted. To clear a remote system, see [`lgc destroy`](destroy.md) 88 | -------------------------------------------------------------------------------- /docs/src/commands/validate.md: -------------------------------------------------------------------------------- 1 | # lgc validate 2 | 3 | This command ensures the detections are correctly formatted, typed, and consistent. In short, the validate command is a linter for security detections and this command shines in a CI/CD or locally to validate the detections before even attempting to deploy them. 4 | 5 | ```sh 6 | % lgc validate 7 | ERROR validation failed on 'rules/splunk/some-detection.yaml': field: 'parameters.disabled', error: invalid type: string "fals", expected a boolean 8 | % 9 | ``` 10 | 11 | If no errors are encountered, lgc exits gracefully: 12 | 13 | ```sh 14 | % lgc validate 15 | INFO all good, no problems identified 16 | % 17 | ``` 18 | 19 | The validation is specific to each technology (see [plugins](/concepts/plugins)). 20 | 21 | ::: info Example 22 | Both [Splunk](../plugins/splunk#linting) and [Microsoft Sentinel](../plugins/microsoft-sentinel) plugins implement the validate command, but they perform different validations: 23 | 24 | - Splunk have a field `disabled` that has to be set to a boolean value (`true` or `false`). The validation process ensure that if the field `disabled` is specified, it has an appropriate value. 25 | - For Microsoft Sentinel, the same validation exists, except it is performed on the field `enabled` because the field `disabled` simply doesn't exists. 26 | ::: 27 | 28 | ::: tip Policies 29 | The `validate` command also ensures that detection rules comply with internal [policies](../concepts/policies.md). 30 | ::: 31 | 32 | ## Options 33 | 34 | 35 | 36 | ### --quiet/-q 37 | 38 | 39 | 40 | The `--quiet` option instructs `validate` to stay quiet, except if errors are encountered. 41 | 42 | ```sh 43 | % lgc validate --quiet 44 | % 45 | ``` 46 | 47 | When errors occur 48 | 49 | ```sh 50 | % lgc validate -q 51 | ERROR validation failed on 'some-detection.yml' 52 | % 53 | ``` 54 | -------------------------------------------------------------------------------- /docs/src/concepts/detections.md: -------------------------------------------------------------------------------- 1 | # Detections 2 | 3 | This section aims at demystifying what security detections are and what is detection as code. 4 | 5 | ## What is a detection ? 6 | 7 | A detection in LogCraft is **the smallest amount of _code_** that a security tool such as a SIEM, an EDR, a XDR, etc, considers at a time. 8 | 9 | The outcome of a security detection is most often a security alert, that a SOAR or a security analyst investigate (or ignore :face_with_peeking_eye:) 10 | 11 | Often, security detections are wrongly considered as only search queries. In reality, a detection is more complex as it embodies at least 3 things: 12 | 13 | - The search query, of course. The _what_ to looking for. 14 | - Some contextual information. Typically a description field containing information about the thing expected to be found. 15 | - Optional parameters. This could range from common scheduling options to esoteric fields deeply tied to the technology used (hi security vendors :wave:). 16 | 17 | ## Plethora of query languages 18 | 19 | First, it is important to understand that there isn't a single, universal format, to describe security detections _effectively_. And you probably noticed that security vendors love to introduce their own query language. 20 | 21 | 22 | 23 | Moreover, at LogCraft, we recognize that each organization choose its own security tech stack. 24 | 25 | 26 | 27 | This also means, that regardless of the detection technology chosen, you should be able to instrument detection as code (DaC) in your environment. That's a requirement for any SecOps team willing to operate at high speed and with high quality standards. 28 | 29 | To accommodate for this broken up landscape, lgc relies on a plugin system to understand and interact with security tools such as SIEM, EDR, and XDR. 30 | 31 | In short, lgc has as many plugins as there are technologies or format out there. 32 | 33 | ## Detections as code 34 | 35 | Adopting detection as code (DaC) may influence your team’s workflows and operational structure, but for the better. 36 | 37 | Detection as code (DaC) enables security teams to provision and manage security detections using code, rather than relying on manual processes and configurations. 38 | 39 | Traditionally, security detections are developed in a testing environment, validated in staging, and then deployed to production. Without detection as code, this workflow is largely manual, making it time-consuming and error-prone, especially when managing detections at scale. 40 | 41 | Detection as code allows defining the desired state of the detections, while LogCraft automates the intermediary deployment steps. This frees security experts to focus on developing and fine-tuning detections instead of manually migrating them across environments. 42 | 43 | Adopting detection as code may require adjustments to team operations, as version control systems like GitLab or GitHub handle deployments, acting as a deployment server. 44 | 45 | By ensuring consistency, traceability, and the ability to detect configuration drifts, detection as code helps teams verify that what is running in production aligns with the intended configuration. 46 | 47 | Organizations implement detection as code to reduce risk and respond more quickly to emerging threats. 48 | 49 | ## Detections filenames 50 | 51 | To store detection rules as individual files, the first step is to adopt a naming convention. 52 | 53 | There are many conventions on naming things and the most popular ones are Snake case (`snake_case`), Kebab case (`kebab-case`), Camel case (`camelCase`) and Pascal case (`PascalCase`). 54 | 55 | Examples: 56 | 57 | - `my-detection.yaml` 58 | - `my_detection.yaml` 59 | - `myDetection.yaml` 60 | - `MyDetection.yaml` 61 | 62 | The convention your team choose doesn't change anything for LogCraft. Pick one that your team agrees on and stick to it, that's the most important part. 63 | 64 | Finally, avoid mixing up naming styles, things get ~~ugly~~ more difficult to maintain :grimacing: 65 | 66 | ::: tip Filename and title 67 | A detection commonly has a `title` field such as `title: My Awesome Detection`. While filenames aren't that important to lgc, it is best to name them according to the title of the detection. In this example, a good name could be `my-awesome-detection.yaml` and a bad name would be `something-unrelated.yaml` 68 | ::: 69 | -------------------------------------------------------------------------------- /docs/src/concepts/identifiers.md: -------------------------------------------------------------------------------- 1 | # Identifiers 2 | 3 | An identifier is a unique ID that references a service or an environment and must be unique across all configuration. An identifier can be anything you like as long as it is expressed in [kebab-case (lowercase)](https://developer.mozilla.org/en-US/docs/Glossary/Kebab_case). 4 | 5 | ::: tip Valid identifiers 6 | 7 | - `prod` 8 | - `siem-prod` 9 | - `service-2-0` 10 | ::: 11 | 12 | ::: warning Invalid identifiers 13 | 14 | - `PROD` _(identifiers must be lowercase)_ 15 | - `siem_prod` _(underscore isn't accepted as a separator)_ 16 | - `service-2.0` _(the dot isn't accepted either)_ 17 | - `foo-` _(ending hyphen)_ 18 | - `-foo` _(leading hyphen)_ 19 | - `foo----bar` _(multiple hyphens)_ 20 | ::: 21 | 22 | To create a service named `my-service`, either use the [services command](../commands/services.md) (recommended approach) or manually edit the configuration file to add the following block. 23 | 24 | ```toml 25 | [services] 26 | [services.my-service] 27 | ... service definition ... 28 | ``` 29 | 30 | The service can then be referenced in the command line, for example: 31 | 32 | ```sh 33 | % lgc ping my-service 34 | ``` 35 | 36 | ::: info Uniqueness 37 | Environments are loosely defined, so make sure they don't overlap with services names. For example, declaring a `prod` environment with a `siem-prod` service belonging to that environment is possible (and recommended!). However, if for some - weird - reason you wanted to have a service `foo` and an environment of the same name, that is not possible as lgc won't be able to distinguish the service from the environment. 38 | ::: 39 | -------------------------------------------------------------------------------- /docs/src/concepts/plugins.md: -------------------------------------------------------------------------------- 1 | # Plugins 2 | 3 | LogCraft rely on plugins to connect to remote security systems. 4 | 5 | 6 | 7 | ## Web Assembly (WASM) 8 | 9 | 10 | 11 | LogCraft plugins are built on the open standard WebAssembly (WASM). 12 | 13 | **These plugins run in memory-safe sandboxes**, making them secure by design. Beyond security, WASM modules offer exceptional performance due to their low-level binary format, optimized for modern processors. **This enables near-native execution speeds**. 14 | 15 | Additionally, WASM supports a wide range of programming languages, allowing LogCraft plugins to be [developed](../developers/how-to-create-plugins.md) in almost any language. 16 | 17 | ::: tip NIST 18 | In a recent study, [NIST](https://csrc.nist.gov/) emphasized the use of WebAssembly to enhance data protection strategies ([NIST IR 8505](https://csrc.nist.gov/News/2024/nist-has-published-nist-ir-8505)) 19 | ::: 20 | -------------------------------------------------------------------------------- /docs/src/developers/compiling.md: -------------------------------------------------------------------------------- 1 | # Compiling 2 | 3 | ::: warning Compiling from the sources 4 | This procedure should only be followed by contributors to the project. If you are a regular user or just want to give a try to `lgc`, follow [this procedure](/essentials/quickstart) that uses pre-compiled binaries instead. 5 | ::: 6 | 7 | ## Monorepo 8 | 9 | LogCraft follows a monorepo approach with almost everything related to the command-line utility located in the same repository tree, including its documentation and all officially maintained plugins. 10 | 11 | The first step in compiling lgc from the sources is to install [moonrepo](https://moonrepo.dev). Please refer to this official documentation. 12 | 13 | ## Building from the sources 14 | 15 | Building `lgc` from the sources is pretty straightforward. 16 | 17 | First, install [moonrepo](https://moonrepo.dev) on your system. 18 | 19 | Then, run `moon lgc:build` from the root of the repository to build the command-line utility. 20 | 21 | As lgc relies on plugins to interact with remote systems, run `moon '#plugin:build'` from the root of the repository to build all plugins. Alternatively, run `moon :build` to only build the desired plugin. 22 | 23 | Finally, the resulting binaries are located in: 24 | - `apps/lgc/target/release/lgc` (command-line utility) 25 | - `plugins//target/release/` (Plugin) 26 | 27 | ## Build commands 28 | 29 | The following table summarise the most used commands to work in this repository. 30 | 31 | | Command | What is does | 32 | |----------------------------|--------------------------------| 33 | | `moon :build` | Build everything | 34 | | `moon lgc:build` | Build the `lgc` command | 35 | | `moon '#plugin:build'` | Build all plugins | 36 | | `moon :build` | Build the plugin | 37 | | `moon docs:build` | Build this documentation :) | 38 | 39 | Other `moon` commands can be useful depending on your need, please refer to the official documentation of moon. 40 | -------------------------------------------------------------------------------- /docs/src/developers/docker-images.md: -------------------------------------------------------------------------------- 1 | # Docker Image 2 | 3 | Pre-built Docker images that includes lgc are accessible through GitHub registry: 4 | 5 | ```bash 6 | docker pull ghcr.io/logcraftio/logcraft-cli:latest 7 | ``` 8 | 9 | LogCraft's containers use [Wolfi "Zero-CVE" images](https://www.chainguard.dev), which are specifically designed to minimize the attack surface and enhance the security of the software supply chain. 10 | 11 | ::: tip Packages 12 | Docker images are available at: https://github.com/LogCraftIO/logcraft-cli/pkgs/container/logcraft-cli 13 | ::: 14 | -------------------------------------------------------------------------------- /docs/src/developers/how-to-create-plugins.md: -------------------------------------------------------------------------------- 1 | # How to create a plugin 2 | 3 | This page details the process of creating a plugin. 4 | 5 | ::: tip Programming Language 6 | A plugin can be created in virtually any language (C/C++, Rust, Python, Go, and many other languages) as plugins are built using WebAssembly (wasm). See [plugins](../concepts/plugins.md#web-assembly-wasm) for more information. 7 | ::: 8 | 9 | ## Interfaces 10 | 11 | A plugin, regardless of the language of implementation, needs to expose the following interfaces: 12 | 13 | | Interface | What it does | 14 | | -------------------------------------------------------- | ------------------------------------------- | 15 | | `create(config, name, params) -> result` | Create a detection on the remote system | 16 | | `read(config, name, params) -> result` | Get/Read a detection from the remote system | 17 | | `update(config, name, params) -> result` | Update a detection on the remote system | 18 | | `delete(config, name, params) -> result` | Remove a detection from the remote system | 19 | | `load() -> metadata` | Load the plugin and return its metadata | 20 | | `settings() -> string` | Returns the expected configuration | 21 | | `schema() -> string` | Return the schema | 22 | | `ping(config) -> result` | Open a connection to the remote system | 23 | 24 | ::: details Wasm Interface (WIT) 25 | Plugin interfaces are defined in the [Plugin Wasm Interface Type (WIT)](https://github.com/LogCraftIO/logcraft-cli/blob/main/libs/bindings/plugin.wit). Always refer to this file to ensure your interfaces return the appropriate data types. 26 | ::: 27 | 28 | ## Python plugin 29 | 30 | This section details the steps to create a python plugin. Splunk is targeted for demonstration purpose but note that [an official Splunk plugin written in Rust exists](../plugins/splunk.md). 31 | 32 | ::: tip Python Global Interpreter (GIL) 33 | 34 | 35 | 36 | As a python developer, your probably know that python is a slow language and that's mainly a consequence of the Python Global Interpreter (GIL). Guess what? Compiling your python code in WebAssembly will actually make it fly :rocket: 37 | 38 | 39 | 40 | ::: 41 | 42 | ### Package manager 43 | 44 | This guide uses [Poetry](https://python-poetry.org/) to manage python packages but feel free to use your preferred package manager. 45 | 46 | ```bash 47 | poetry new pysplunk 48 | ``` 49 | 50 | ### Dependencies 51 | 52 | Then, install `componentize-py`, a tool to convert a Python code to a WebAssembly component. 53 | 54 | ```bash 55 | cd pysplunk 56 | poetry add componentize-py 57 | ``` 58 | 59 | ### WIT 60 | 61 | Next, copy the [bindings/wit files](https://github.com/LogCraftIO/logcraft-cli/tree/main/libs/bindings) from the repository into the python app. These files are basically defining the contracts between lgc and the plugin. 62 | 63 | ```bash 64 | ~$ ls -1 pysplunk/wit 65 | world.wit 66 | plugin.wit 67 | ~$ 68 | ``` 69 | 70 | ### Bindings 71 | 72 | This step is optional as it only creates python bindings in the working directory to integrate with your IDE. Later, when the python code is built as a WASM component, these bindings are automatically generated. 73 | 74 | ```bash 75 | poetry run componentize-py --wit-path pysplunk/wit --world plugins bindings pysplunk 76 | ``` 77 | 78 | This results in a new directory `plugins` that contains the bindings in `pysplunk`. 79 | 80 | ### App 81 | 82 | Then, create a `main.py` to implement the interfaces expected by the WIT files. 83 | 84 | ```python 85 | #!/usr/bin/env python3 86 | from plugins import Plugins 87 | from plugins.exports.plugin import Metadata 88 | from plugins.types import Err, Ok, Some, Result 89 | 90 | 91 | class Plugin(Plugins): 92 | def load(self) -> Metadata: 93 | return Metadata("my-plugin", "0.1.0", "The Batman", "This is a famous plugin") 94 | 95 | def settings(self): 96 | pass 97 | 98 | def schema(self): 99 | pass 100 | 101 | def create(self, config: str, name: str, params: str): 102 | pass 103 | 104 | def read(self, config: str, name: str, params: str): 105 | pass 106 | 107 | def update(self, config: str, name: str, params: str): 108 | pass 109 | 110 | def delete(self, config: str, name: str, params: str): 111 | pass 112 | 113 | def ping(self, config: str) -> int: 114 | pass 115 | ``` 116 | 117 | The class name is important as it is inherited from the WIT files, hence the plugin must start with: 118 | 119 | ```python 120 | class Plugin(Plugins): 121 | ``` 122 | 123 | ::: warning 124 | Regarding the `load()` function, make sure to respect the [identifiers convention](../concepts/identifiers.md) (kebab-case). 125 | ::: 126 | 127 | Finally, compile the plugin: 128 | 129 | ```bash 130 | poetry run componentize-py --wit-path pysplunk/wit --world plugins componentize -p pysplunk main -o my-plugin.wasm 131 | ``` 132 | -------------------------------------------------------------------------------- /docs/src/developers/state.md: -------------------------------------------------------------------------------- 1 | # State 2 | 3 | lgc must store state about your managed detection rules. This state is used by lgc to determine which changes to make. In short, the state is critical in understanding what has been deployed and where it was deployed. 4 | 5 | ::: danger Important 6 | Never edit the state manually and let `lgc` manage it properly. 7 | ::: 8 | 9 | ## local 10 | 11 | By default, the state is stored locally in a file named `.logcraft/state.json`. This is the case when you run lgc on your workstation for example. 12 | 13 | ```toml 14 | [state] 15 | type = "local" 16 | path = ".logcraft/state.json" 17 | ``` 18 | 19 | This whole block is hidden by default in **lgc.toml**. 20 | 21 | ## http 22 | 23 | When lgc is used in a git environment such as GitLab, GitHub and other Version Control Systems (VCS), it is recommended to host the state in a http backend. 24 | 25 | The main benefit of using a http state store is centralised management. The state becomes accessible to all CI/CD jobs and team members, ensuring that multiple developers and CI/CD pipelines do not have conflicting or inconsistent state files, which could lead to infrastructure drift or errors. 26 | 27 | In addition, the state is automatically locked during operations to prevent simultaneous writes, ensuring safe and consistent updates. 28 | 29 | To activate the http backend, edit the `lgc.toml` as follow: 30 | 31 | ```toml 32 | [state] 33 | type = "http" 34 | ``` 35 | 36 | Then, it is recommended to adjust the state settings directly in your CI/CD template (see [GitLab template example](../essentials/gitlab.md#gitlab-template)) rather than in lgc.toml, but if you absolutely need to, lgc.toml accepts these parameters too. 37 | 38 | 39 | ```toml 40 | [state] 41 | type = "http" 42 | 43 | # required backend parameters 44 | address = "" 45 | username = "" 46 | password = "" 47 | lock_address = "" 48 | lock_method = "" 49 | unlock_address = "" 50 | unlock_method = "" 51 | 52 | # optional backend parameters 53 | update_method = "" 54 | skip_cert_verification = "" 55 | timeout = "" 56 | client_ca_certificate_pem = "" 57 | client_certificate_pem = "" 58 | client_private_key_pem = "" 59 | headers = "" 60 | ``` 61 | -------------------------------------------------------------------------------- /docs/src/essentials/configuration.md: -------------------------------------------------------------------------------- 1 | # Configuration 2 | 3 | This section details the core options of `lgc.toml`. For services options, please refer to the appropriate [plugin documentation](../plugins/index.md). 4 | 5 | 6 | 7 | ## `workspace` 8 | 9 | ```toml 10 | [core] 11 | workspace = "rules" 12 | ``` 13 | 14 | This parameter defines the base directory in which detections are stored (default: rules). This parameter can be overridden with the environment variable `LGC_CORE_WORKSPACE`. 15 | 16 | ## `base_dir` 17 | 18 | ```toml 19 | [core] 20 | base_dir = "/opt/logcraft-cli" 21 | ``` 22 | 23 | This parameter defines the home directory of lgc, where the binary and plugins directory are located (default: `/opt/logcraft-cli`). This shouldn't be changed in most situations. This parameter can be overridden with the environment variable `LGC_CORE_BASE_DIR`. 24 | -------------------------------------------------------------------------------- /docs/src/essentials/installation.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | ## Getting the bits 4 | 5 | Download the latest stable version of lgc from the project's [release page](https://github.com/LogCraftIO/logcraft-cli/releases). 6 | 7 | This table summarise the available assets: 8 | 9 | | Files | What it is | 10 | |----------------------------------|--------------------------------| 11 | | `lgc--.tar.gz` | command-line tool with plugins | 12 | | `lgc-minimal--.tar.gz` | command-line tool only | 13 | | `plugins.tar.gz` | All plugins | 14 | | `.wasm` | A specific plugin | 15 | 16 | 17 | In general, if you are just starting with lgc, choose the global package `lgc--.tar.gz` such as `lgc-linux-amd64.tar.gz` to get the command-line tool and all plugins. 18 | 19 | ## Local installation 20 | 21 | While lgc has been built to be used in CI/CD pipelines, it can still be used locally, on a workstation. 22 | 23 | ```bash 24 | % mkdir /opt/logcraft-cli 25 | % tar xzf lgc-linux-amd64.tar.gz -C /opt/logcraft-cli 26 | % ls -1 /opt/logcraft-cli 27 | lgc 28 | plugins/ 29 | README.md 30 | LICENSE 31 | % 32 | ``` 33 | 34 | Then, add lgc to the system's `PATH` by creating a symlink: 35 | 36 | ```bash 37 | % sudo ln -s /opt/logcraft-cli/lgc /usr/local/bin/lgc 38 | ``` 39 | 40 | ::: details Alternative approach 41 | Instead of creating a symlink, add `/opt/logcraft-cli` to the system's PATH 42 | ::: 43 | 44 | Finally, ensure lgc is correctly setup 45 | 46 | ```bash 47 | % cd 48 | % lgc --version 49 | LogCraft CLI v0.2.0 50 | ... 51 | % 52 | ``` 53 | Congratulations, lgc is now installed on your system 🎉 54 | 55 | ## CI/CD installation 56 | 57 | To easily use LogCraft in GitLab, GitHub, Bitbucket or any other version control system (VCS), pre-built containers are available. 58 | 59 | 60 | ::: tip 0-CVE containers 61 | LogCraft's containers use [Wolfi "Zero-CVE" images](https://www.chainguard.dev), which are specifically designed to minimize the attack surface and enhance the security of the software supply chain. 62 | ::: 63 | 64 | ### GitLab 65 | 66 | ```yaml 67 | image: 68 | name: "ghcr.io/logcraftio/logcraft-cli:latest" 69 | ``` 70 | 71 | Refer to the [GitLab Integration guide](./gitlab.md) for detailed instructions in setting up LogCraft in GitLab 72 | -------------------------------------------------------------------------------- /docs/src/index.md: -------------------------------------------------------------------------------- 1 | # Overview 2 | 3 | `lgc` is an open source tool that simplifies the creation of Detection-as-Code pipelines while leveraging native capabilities of GitLab, GitHub, Bitbucket and other Version Control Systems (VCS). 4 | 5 | With `lgc`, easily deploy your security detections into your SIEM, EDR, XDR, and other modern security solutions. 6 | 7 | 8 | 9 | ![LogCraft CLI Overview](/img/logcraft-cli-overview.png) 10 | 11 | 12 | 13 | At a high level, lgc is pretty straightforward as it reads detection files and manage them individually and programmatically through a plugin system. 14 | -------------------------------------------------------------------------------- /docs/src/plugins/crowdstrike.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: CrowdStrike 3 | category: EDR 4 | stage: planning 5 | --- 6 | 7 | # CrowdStrike 8 | 9 | 10 | 11 | ::: tip Need This? 12 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) or [reach out](mailto:hello@logcraft.io) to initiate the integration of this technology :tada: 13 | ::: 14 | -------------------------------------------------------------------------------- /docs/src/plugins/google-chronicle.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Google Chronicle (SecOps) 3 | category: SIEM 4 | stage: planning 5 | --- 6 | 7 | 8 | 9 | # Google Chronicle (SecOps) 10 | 11 | 12 | 13 | 14 | 15 | ::: tip Need This? 16 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) or [reach out](mailto:hello@logcraft.io) to initiate the integration of this technology :tada: 17 | ::: 18 | 19 | ## Example 20 | 21 | ``` 22 | rule malware_httpbrowser 23 | { 24 | meta: 25 | author = "Google Cloud Security" 26 | description = "HTTPBrowser malware" 27 | reference1 = "https://attack.mitre.org/software/S0070/" 28 | reference2 = "https://www.zscaler.com/blogs/research/chinese-cyber-espionage-apt-group-leveraging-recently-leaked-hacking-team-exploits-target-financial-services-firm" 29 | yara_version = "YL2.0" 30 | rule_version = "1.0" 31 | 32 | events: 33 | ( 34 | $e1.metadata.event_type = "REGISTRY_CREATION" and 35 | re.regex($e1.target.registry.registry_key, `(HKCU|HKEY_CURRENT_USER)\\Software\\Microsoft\\Windows\\CurrentVersion\\Run`) nocase and 36 | $e1.target.registry.registry_value_name = "wdm" nocase 37 | ) 38 | or 39 | ( 40 | $e1.metadata.event_type = "FILE_CREATION" and 41 | re.regex($e1.target.file.full_path, `\\vpdn\\VPDN_LU.exe$`) nocase 42 | ) 43 | or 44 | ( 45 | $e1.network.http.user_agent = "HttpBrowser/1.0" and 46 | re.regex($e1.target.url, `/.*c=.*&l=.*&o=.*&u=.*&r=`) 47 | ) 48 | 49 | condition: 50 | $e1 51 | } 52 | ``` 53 | 54 | Source: https://github.com/chronicle/detection-rules/blob/main/malware/httpbrowser.yaral 55 | -------------------------------------------------------------------------------- /docs/src/plugins/index.md: -------------------------------------------------------------------------------- 1 | # Plugins 2 | 3 | LogCraft can support a wide range of technologies. 4 | 5 | 6 | 7 | 8 | 9 | ::: tip What about vendor/technology X? 10 | Contact us at hello@logcraft.io if your favorite technology is not listed there, we'll be happy to review if that's a good fit. 11 | ::: 12 | 13 | 14 | -------------------------------------------------------------------------------- /docs/src/plugins/limacharlie.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: LimaCharlie 3 | category: EDR 4 | stage: planning 5 | --- 6 | 7 | # LimaCharlie 8 | 9 | 10 | 11 | ::: tip Need This? 12 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) or [reach out](mailto:hello@logcraft.io) to initiate the integration of this technology :tada: 13 | ::: 14 | 15 | ## Example 16 | 17 | ```yaml 18 | # Detection 19 | op: ends with 20 | event: NEW_PROCESS 21 | path: event/FILE_PATH 22 | value: wanadecryptor.exe 23 | case sensitive: false 24 | 25 | # Response 26 | - action: report 27 | name: wanacry 28 | - action: task 29 | command: history_dump 30 | - action: task 31 | command: 32 | - deny_tree 33 | - <> 34 | ``` 35 | -------------------------------------------------------------------------------- /docs/src/plugins/microsoft-sentinel.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Microsoft Sentinel 3 | category: SIEM 4 | stage: beta 5 | --- 6 | 7 | 8 | 9 | # Microsoft Azure Sentinel 10 | 11 | 12 | 13 | 14 | 15 | ## File format 16 | 17 | Microsoft Sentinel detections are normalized as follow: 18 | 19 | ```yaml 20 | kind: Scheduled 21 | name: 22 | 23 | properties: 24 | query: |- 25 | 26 | 27 | 28 | ``` 29 | 30 | This is a pretty simple and straightforward format (example below). 31 | 32 | ## File names 33 | 34 | Each detection must be stored in its own YAML file under the plugin directory inside the workspace root. 35 | 36 | Example: 37 | 38 | - `rules/sentinel/detect-foo.yaml` 39 | - `rules/sentinel/high-entropy-domain-name.yaml` 40 | 41 | This ensure each detection is tracked individually. 42 | 43 | ## Example 44 | 45 | ```yaml 46 | name: Some detection 47 | kind: Scheduled 48 | # ruleId: 04df2776-e230-4df0-9624-56364de3f902 49 | properties: 50 | enabled: true 51 | severity: Medium 52 | query: |- 53 | AzureDiagnostics 54 | | where Category == 'JobLogs' 55 | | extend RunbookName = RunbookName_s 56 | | project TimeGenerated,RunbookName,ResultType,CorrelationId,JobId_g 57 | | summarize StartTime = minif(TimeGenerated,ResultType == 'Started'),EndTime = minif(TimeGenerated,ResultType in ('Completed','Failed','Failed')), Status = tostring(parse_json(make_list_if(ResultType,ResultType in ('Completed','Failed','Stopped')))[0]) by JobId_g,RunbookName 58 | | extend DurationSec = datetime_diff('second', EndTime,StartTime) 59 | | join kind=leftouter (AzureDiagnostics 60 | | where Category == "JobStreams" 61 | | where StreamType_s == "Error" 62 | | summarize TotalErrors = dcount(StreamType_s) by JobId_g, StreamType_s) on $left. JobId_g == $right. JobId_g 63 | | extend HasErrors = iff(StreamType_s == 'Error',true,false) 64 | | project StartTime, EndTime, DurationSec,RunbookName,Status,HasErrors,TotalErrors,JobId_g 65 | ``` 66 | -------------------------------------------------------------------------------- /docs/src/plugins/paloalto-cortex.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Palo Alto Cortex 3 | category: XDR 4 | stage: planning 5 | --- 6 | 7 | 8 | 9 | # Palo Alto Cortex 10 | 11 | 12 | 13 | 14 | 15 | ::: tip Need This? 16 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) or [reach out](mailto:hello@logcraft.io) to initiate the integration of this technology :tada: 17 | ::: 18 | -------------------------------------------------------------------------------- /docs/src/plugins/sekoia.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Sekoia 3 | category: XDR 4 | stage: planning 5 | --- 6 | 7 | # Sekoia 8 | 9 | 10 | 11 | ::: tip Need This? 12 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) or [reach out](mailto:hello@logcraft.io) to initiate the integration of this technology :tada: 13 | ::: 14 | -------------------------------------------------------------------------------- /docs/src/plugins/sigma.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Sigma 3 | category: Format 4 | stage: alpha 5 | --- 6 | 7 | # Sigma 8 | 9 | 10 | 11 | ::: tip Work In Progress 12 | The Sigma plugin is currently under development. If you want to contribute by testing this plugin, just [reach out](mailto:hello@logcraft.io) 13 | ::: 14 | 15 | ## Example 16 | 17 | ```yaml 18 | title: Exploitation Indicator Of CVE-2022-42475 19 | id: 293ccb8c-bed8-4868-8296-bef30e303b7e 20 | status: test 21 | description: Detects exploitation indicators of CVE-2022-42475 a heap-based buffer overflow in sslvpnd. 22 | references: 23 | - https://www.fortiguard.com/psirt/FG-IR-22-398 24 | - https://www.bleepingcomputer.com/news/security/fortinet-says-ssl-vpn-pre-auth-rce-bug-is-exploited-in-attacks/ 25 | - https://www.deepwatch.com/labs/customer-advisory-fortios-ssl-vpn-vulnerability-cve-2022-42475-exploited-in-the-wild/ 26 | - https://community.fortinet.com/t5/FortiGate/Technical-Tip-Critical-vulnerability-Protect-against-heap-based/ta-p/239420 27 | author: Nasreddine Bencherchali (Nextron Systems), Nilaa Maharjan, Douglasrose75 28 | date: 2024-02-08 29 | tags: 30 | - attack.initial-access 31 | - cve.2022-42475 32 | - detection.emerging-threats 33 | logsource: 34 | product: fortios 35 | service: sslvpnd 36 | definition: 'Requirements: file creation events or equivalent must be collected from the FortiOS SSL-VPN appliance in order for this detection to function correctly' 37 | detection: 38 | keywords: 39 | - '/data/etc/wxd.conf' 40 | - '/data/lib/libgif.so' 41 | ... 42 | condition: keywords 43 | falsepositives: 44 | - Unknown 45 | level: high 46 | ``` 47 | 48 | Source: https://github.com/SigmaHQ/sigma/blob/master/rules-emerging-threats/2022/Exploits/CVE-2022-42475/fortios_sslvpnd_exploit_cve_2022_42475_exploitation_indicators.yml/ 49 | -------------------------------------------------------------------------------- /docs/src/plugins/tanium.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Tanium 3 | category: EDR 4 | stage: planning 5 | --- 6 | 7 | # Tanium 8 | 9 | 10 | 11 | ::: tip Need This? 12 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) or [reach out](mailto:hello@logcraft.io) to initiate the integration of this technology :tada: 13 | ::: 14 | -------------------------------------------------------------------------------- /docs/src/plugins/yara.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Yara 3 | category: Format 4 | stage: planning 5 | --- 6 | 7 | # Yara rules 8 | 9 | 10 | 11 | ::: tip Need This? 12 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) or [reach out](mailto:hello@logcraft.io) to initiate the integration of this technology :tada: 13 | ::: 14 | -------------------------------------------------------------------------------- /docs/src/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/src/public/favicon.ico -------------------------------------------------------------------------------- /docs/src/public/img/gitlab-3-stages.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/src/public/img/gitlab-3-stages.png -------------------------------------------------------------------------------- /docs/src/public/img/gitlab-cicd-variables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/src/public/img/gitlab-cicd-variables.png -------------------------------------------------------------------------------- /docs/src/public/img/logcraft-cli-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/src/public/img/logcraft-cli-overview.png -------------------------------------------------------------------------------- /docs/src/public/img/logcraft-cli.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/src/public/img/logcraft-cli.webp -------------------------------------------------------------------------------- /docs/src/public/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/src/public/logo.png -------------------------------------------------------------------------------- /docs/src/public/robots.txt: -------------------------------------------------------------------------------- 1 | User-agent: * 2 | Allow: / 3 | -------------------------------------------------------------------------------- /docs/src/support.md: -------------------------------------------------------------------------------- 1 | --- 2 | vale: 3 | Google.We: NO 4 | Google.Will: NO 5 | --- 6 | 7 | # Getting help 8 | 9 | 10 | 11 | 12 | Reach out if you are experiencing any issue, have a suggestion for improvement, or need any other type of assistance. We will be happy to connect. 13 | 14 | 15 | 16 | 17 | ## Customer support 18 | 19 | Contact the customer service team at [support@logcraft.io](mailto:support@logcraft.io) 20 | 21 | ## Community support 22 | 23 | [Open a ticket](https://github.com/LogCraftIO/logcraft-cli/issues) with enough details about the issue encountered or the feature requested. You now the drill ;) 24 | -------------------------------------------------------------------------------- /docs/vale/.vale.ini: -------------------------------------------------------------------------------- 1 | StylesPath = styles 2 | MinAlertLevel = warning 3 | 4 | # installed packages, use `vale sync` to install/update 5 | Packages = Google, proselint 6 | 7 | # LogCraft terms (case sensitive) 8 | # `styles/config/vocabularies/LogCraft/{accept.txt,reject.txt}` 9 | Vocab = LogCraft 10 | 11 | [*.{md}] 12 | BasedOnStyles = Vale, Google, proselint 13 | TokenIgnores = (\w+@logcraft\.io) 14 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/AMPM.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Use 'AM' or 'PM' (preceded by a space)." 3 | link: "https://developers.google.com/style/word-list" 4 | level: error 5 | nonword: true 6 | tokens: 7 | - '\d{1,2}[AP]M\b' 8 | - '\d{1,2} ?[ap]m\b' 9 | - '\d{1,2} ?[aApP]\.[mM]\.' 10 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Acronyms.yml: -------------------------------------------------------------------------------- 1 | extends: conditional 2 | message: "Spell out '%s', if it's unfamiliar to the audience." 3 | link: 'https://developers.google.com/style/abbreviations' 4 | level: suggestion 5 | ignorecase: false 6 | # Ensures that the existence of 'first' implies the existence of 'second'. 7 | first: '\b([A-Z]{3,5})\b' 8 | second: '(?:\b[A-Z][a-z]+ )+\(([A-Z]{3,5})\)' 9 | # ... with the exception of these: 10 | exceptions: 11 | - API 12 | - ASP 13 | - CLI 14 | - CPU 15 | - CSS 16 | - CSV 17 | - DEBUG 18 | - DOM 19 | - DPI 20 | - FAQ 21 | - GCC 22 | - GDB 23 | - GET 24 | - GPU 25 | - GTK 26 | - GUI 27 | - HTML 28 | - HTTP 29 | - HTTPS 30 | - IDE 31 | - JAR 32 | - JSON 33 | - JSX 34 | - LESS 35 | - LLDB 36 | - NET 37 | - NOTE 38 | - NVDA 39 | - OSS 40 | - PATH 41 | - PDF 42 | - PHP 43 | - POST 44 | - RAM 45 | - REPL 46 | - RSA 47 | - SCM 48 | - SCSS 49 | - SDK 50 | - SQL 51 | - SSH 52 | - SSL 53 | - SVG 54 | - TBD 55 | - TCP 56 | - TODO 57 | - URI 58 | - URL 59 | - USB 60 | - UTF 61 | - XML 62 | - XSS 63 | - YAML 64 | - ZIP 65 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Colons.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' should be in lowercase." 3 | link: 'https://developers.google.com/style/colons' 4 | nonword: true 5 | level: warning 6 | scope: sentence 7 | tokens: 8 | - ':\s[A-Z]' 9 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Contractions.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: "Use '%s' instead of '%s'." 3 | link: 'https://developers.google.com/style/contractions' 4 | level: suggestion 5 | ignorecase: true 6 | action: 7 | name: replace 8 | swap: 9 | are not: aren't 10 | cannot: can't 11 | could not: couldn't 12 | did not: didn't 13 | do not: don't 14 | does not: doesn't 15 | has not: hasn't 16 | have not: haven't 17 | how is: how's 18 | is not: isn't 19 | it is: it's 20 | should not: shouldn't 21 | that is: that's 22 | they are: they're 23 | was not: wasn't 24 | we are: we're 25 | we have: we've 26 | were not: weren't 27 | what is: what's 28 | when is: when's 29 | where is: where's 30 | will not: won't 31 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/DateFormat.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Use 'July 31, 2016' format, not '%s'." 3 | link: 'https://developers.google.com/style/dates-times' 4 | ignorecase: true 5 | level: error 6 | nonword: true 7 | tokens: 8 | - '\d{1,2}(?:\.|/)\d{1,2}(?:\.|/)\d{4}' 9 | - '\d{1,2} (?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)|May|Jun(?:e)|Jul(?:y)|Aug(?:ust)|Sep(?:tember)?|Oct(?:ober)|Nov(?:ember)?|Dec(?:ember)?) \d{4}' 10 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Ellipses.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "In general, don't use an ellipsis." 3 | link: 'https://developers.google.com/style/ellipses' 4 | nonword: true 5 | level: warning 6 | action: 7 | name: remove 8 | tokens: 9 | - '\.\.\.' 10 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/EmDash.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't put a space before or after a dash." 3 | link: "https://developers.google.com/style/dashes" 4 | nonword: true 5 | level: error 6 | action: 7 | name: edit 8 | params: 9 | - trim 10 | - " " 11 | tokens: 12 | - '\s[—–]\s' 13 | 14 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Exclamation.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't use exclamation points in text." 3 | link: "https://developers.google.com/style/exclamation-points" 4 | nonword: true 5 | level: error 6 | action: 7 | name: edit 8 | params: 9 | - trim_right 10 | - "!" 11 | tokens: 12 | - '\w+!(?:\s|$)' 13 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/FirstPerson.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Avoid first-person pronouns such as '%s'." 3 | link: 'https://developers.google.com/style/pronouns#personal-pronouns' 4 | ignorecase: true 5 | level: warning 6 | nonword: true 7 | tokens: 8 | - (?:^|\s)I\s 9 | - (?:^|\s)I,\s 10 | - \bI'm\b 11 | - \bme\b 12 | - \bmy\b 13 | - \bmine\b 14 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Gender.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't use '%s' as a gender-neutral pronoun." 3 | link: 'https://developers.google.com/style/pronouns#gender-neutral-pronouns' 4 | level: error 5 | ignorecase: true 6 | tokens: 7 | - he/she 8 | - s/he 9 | - \(s\)he 10 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/GenderBias.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: "Consider using '%s' instead of '%s'." 3 | ignorecase: true 4 | link: "https://developers.google.com/style/inclusive-documentation" 5 | level: error 6 | action: 7 | name: replace 8 | swap: 9 | (?:alumna|alumnus): graduate 10 | (?:alumnae|alumni): graduates 11 | air(?:m[ae]n|wom[ae]n): pilot(s) 12 | anchor(?:m[ae]n|wom[ae]n): anchor(s) 13 | authoress: author 14 | camera(?:m[ae]n|wom[ae]n): camera operator(s) 15 | door(?:m[ae]|wom[ae]n): concierge(s) 16 | draft(?:m[ae]n|wom[ae]n): drafter(s) 17 | fire(?:m[ae]n|wom[ae]n): firefighter(s) 18 | fisher(?:m[ae]n|wom[ae]n): fisher(s) 19 | fresh(?:m[ae]n|wom[ae]n): first-year student(s) 20 | garbage(?:m[ae]n|wom[ae]n): waste collector(s) 21 | lady lawyer: lawyer 22 | ladylike: courteous 23 | mail(?:m[ae]n|wom[ae]n): mail carriers 24 | man and wife: husband and wife 25 | man enough: strong enough 26 | mankind: human kind|humanity 27 | manmade: manufactured 28 | manpower: personnel 29 | middle(?:m[ae]n|wom[ae]n): intermediary 30 | news(?:m[ae]n|wom[ae]n): journalist(s) 31 | ombuds(?:man|woman): ombuds 32 | oneupmanship: upstaging 33 | poetess: poet 34 | police(?:m[ae]n|wom[ae]n): police officer(s) 35 | repair(?:m[ae]n|wom[ae]n): technician(s) 36 | sales(?:m[ae]n|wom[ae]n): salesperson or sales people 37 | service(?:m[ae]n|wom[ae]n): soldier(s) 38 | steward(?:ess)?: flight attendant 39 | tribes(?:m[ae]n|wom[ae]n): tribe member(s) 40 | waitress: waiter 41 | woman doctor: doctor 42 | woman scientist[s]?: scientist(s) 43 | work(?:m[ae]n|wom[ae]n): worker(s) 44 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/HeadingPunctuation.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't put a period at the end of a heading." 3 | link: "https://developers.google.com/style/capitalization#capitalization-in-titles-and-headings" 4 | nonword: true 5 | level: warning 6 | scope: heading 7 | action: 8 | name: edit 9 | params: 10 | - trim_right 11 | - "." 12 | tokens: 13 | - '[a-z0-9][.]\s*$' 14 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Headings.yml: -------------------------------------------------------------------------------- 1 | extends: capitalization 2 | message: "'%s' should use sentence-style capitalization." 3 | link: "https://developers.google.com/style/capitalization#capitalization-in-titles-and-headings" 4 | level: warning 5 | scope: heading 6 | match: $sentence 7 | indicators: 8 | - ":" 9 | exceptions: 10 | - Azure 11 | - CLI 12 | - Cosmos 13 | - Docker 14 | - Emmet 15 | - gRPC 16 | - I 17 | - Kubernetes 18 | - Linux 19 | - macOS 20 | - Marketplace 21 | - MongoDB 22 | - REPL 23 | - Studio 24 | - TypeScript 25 | - URLs 26 | - Visual 27 | - VS 28 | - Windows 29 | - JSON 30 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Latin.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: "Use '%s' instead of '%s'." 3 | link: 'https://developers.google.com/style/abbreviations' 4 | ignorecase: true 5 | level: error 6 | nonword: true 7 | action: 8 | name: replace 9 | swap: 10 | '\b(?:eg|e\.g\.)(?=[\s,;])': for example 11 | '\b(?:ie|i\.e\.)(?=[\s,;])': that is 12 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/LyHyphens.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' doesn't need a hyphen." 3 | link: "https://developers.google.com/style/hyphens" 4 | level: error 5 | ignorecase: false 6 | nonword: true 7 | action: 8 | name: edit 9 | params: 10 | - regex 11 | - "-" 12 | - " " 13 | tokens: 14 | - '\b[^\s-]+ly-\w+\b' 15 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/OptionalPlurals.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't use plurals in parentheses such as in '%s'." 3 | link: "https://developers.google.com/style/plurals-parentheses" 4 | level: error 5 | nonword: true 6 | action: 7 | name: edit 8 | params: 9 | - trim_right 10 | - "(s)" 11 | tokens: 12 | - '\b\w+\(s\)' 13 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Ordinal.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Spell out all ordinal numbers ('%s') in text." 3 | link: 'https://developers.google.com/style/numbers' 4 | level: error 5 | nonword: true 6 | tokens: 7 | - \d+(?:st|nd|rd|th) 8 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/OxfordComma.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Use the Oxford comma in '%s'." 3 | link: 'https://developers.google.com/style/commas' 4 | scope: sentence 5 | level: warning 6 | tokens: 7 | - '(?:[^,]+,){1,}\s\w+\s(?:and|or)' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Parens.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Use parentheses judiciously." 3 | link: 'https://developers.google.com/style/parentheses' 4 | nonword: true 5 | level: suggestion 6 | tokens: 7 | - '\(.+\)' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Passive.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | link: 'https://developers.google.com/style/voice' 3 | message: "In general, use active voice instead of passive voice ('%s')." 4 | ignorecase: true 5 | level: suggestion 6 | raw: 7 | - \b(am|are|were|being|is|been|was|be)\b\s* 8 | tokens: 9 | - '[\w]+ed' 10 | - awoken 11 | - beat 12 | - become 13 | - been 14 | - begun 15 | - bent 16 | - beset 17 | - bet 18 | - bid 19 | - bidden 20 | - bitten 21 | - bled 22 | - blown 23 | - born 24 | - bought 25 | - bound 26 | - bred 27 | - broadcast 28 | - broken 29 | - brought 30 | - built 31 | - burnt 32 | - burst 33 | - cast 34 | - caught 35 | - chosen 36 | - clung 37 | - come 38 | - cost 39 | - crept 40 | - cut 41 | - dealt 42 | - dived 43 | - done 44 | - drawn 45 | - dreamt 46 | - driven 47 | - drunk 48 | - dug 49 | - eaten 50 | - fallen 51 | - fed 52 | - felt 53 | - fit 54 | - fled 55 | - flown 56 | - flung 57 | - forbidden 58 | - foregone 59 | - forgiven 60 | - forgotten 61 | - forsaken 62 | - fought 63 | - found 64 | - frozen 65 | - given 66 | - gone 67 | - gotten 68 | - ground 69 | - grown 70 | - heard 71 | - held 72 | - hidden 73 | - hit 74 | - hung 75 | - hurt 76 | - kept 77 | - knelt 78 | - knit 79 | - known 80 | - laid 81 | - lain 82 | - leapt 83 | - learnt 84 | - led 85 | - left 86 | - lent 87 | - let 88 | - lighted 89 | - lost 90 | - made 91 | - meant 92 | - met 93 | - misspelt 94 | - mistaken 95 | - mown 96 | - overcome 97 | - overdone 98 | - overtaken 99 | - overthrown 100 | - paid 101 | - pled 102 | - proven 103 | - put 104 | - quit 105 | - read 106 | - rid 107 | - ridden 108 | - risen 109 | - run 110 | - rung 111 | - said 112 | - sat 113 | - sawn 114 | - seen 115 | - sent 116 | - set 117 | - sewn 118 | - shaken 119 | - shaven 120 | - shed 121 | - shod 122 | - shone 123 | - shorn 124 | - shot 125 | - shown 126 | - shrunk 127 | - shut 128 | - slain 129 | - slept 130 | - slid 131 | - slit 132 | - slung 133 | - smitten 134 | - sold 135 | - sought 136 | - sown 137 | - sped 138 | - spent 139 | - spilt 140 | - spit 141 | - split 142 | - spoken 143 | - spread 144 | - sprung 145 | - spun 146 | - stolen 147 | - stood 148 | - stridden 149 | - striven 150 | - struck 151 | - strung 152 | - stuck 153 | - stung 154 | - stunk 155 | - sung 156 | - sunk 157 | - swept 158 | - swollen 159 | - sworn 160 | - swum 161 | - swung 162 | - taken 163 | - taught 164 | - thought 165 | - thrived 166 | - thrown 167 | - thrust 168 | - told 169 | - torn 170 | - trodden 171 | - understood 172 | - upheld 173 | - upset 174 | - wed 175 | - wept 176 | - withheld 177 | - withstood 178 | - woken 179 | - won 180 | - worn 181 | - wound 182 | - woven 183 | - written 184 | - wrung 185 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Periods.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't use periods with acronyms or initialisms such as '%s'." 3 | link: 'https://developers.google.com/style/abbreviations' 4 | level: error 5 | nonword: true 6 | tokens: 7 | - '\b(?:[A-Z]\.){3,}' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Quotes.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Commas and periods go inside quotation marks." 3 | link: 'https://developers.google.com/style/quotation-marks' 4 | level: error 5 | nonword: true 6 | tokens: 7 | - '"[^"]+"[.,?]' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Ranges.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't add words such as 'from' or 'between' to describe a range of numbers." 3 | link: 'https://developers.google.com/style/hyphens' 4 | nonword: true 5 | level: warning 6 | tokens: 7 | - '(?:from|between)\s\d+\s?-\s?\d+' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Semicolons.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Use semicolons judiciously." 3 | link: 'https://developers.google.com/style/semicolons' 4 | nonword: true 5 | scope: sentence 6 | level: suggestion 7 | tokens: 8 | - ';' 9 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Slang.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Don't use internet slang abbreviations such as '%s'." 3 | link: 'https://developers.google.com/style/abbreviations' 4 | ignorecase: true 5 | level: error 6 | tokens: 7 | - 'tl;dr' 8 | - ymmv 9 | - rtfm 10 | - imo 11 | - fwiw 12 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Spacing.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' should have one space." 3 | link: 'https://developers.google.com/style/sentence-spacing' 4 | level: error 5 | nonword: true 6 | action: 7 | name: remove 8 | tokens: 9 | - '[a-z][.?!] {2,}[A-Z]' 10 | - '[a-z][.?!][A-Z]' 11 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Spelling.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "In general, use American spelling instead of '%s'." 3 | link: 'https://developers.google.com/style/spelling' 4 | ignorecase: true 5 | level: warning 6 | tokens: 7 | - '(?:\w+)nised?' 8 | - 'colour' 9 | - 'labour' 10 | - 'centre' 11 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Units.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Put a nonbreaking space between the number and the unit in '%s'." 3 | link: "https://developers.google.com/style/units-of-measure" 4 | nonword: true 5 | level: error 6 | tokens: 7 | - \b\d+(?:B|kB|MB|GB|TB) 8 | - \b\d+(?:ns|ms|s|min|h|d) 9 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/We.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Try to avoid using first-person plural like '%s'." 3 | link: 'https://developers.google.com/style/pronouns#personal-pronouns' 4 | level: warning 5 | ignorecase: true 6 | tokens: 7 | - we 8 | - we'(?:ve|re) 9 | - ours? 10 | - us 11 | - let's 12 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/Will.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Avoid using '%s'." 3 | link: 'https://developers.google.com/style/tense' 4 | ignorecase: true 5 | level: warning 6 | tokens: 7 | - will 8 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/WordList.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: "Use '%s' instead of '%s'." 3 | link: "https://developers.google.com/style/word-list" 4 | level: warning 5 | ignorecase: false 6 | action: 7 | name: replace 8 | swap: 9 | "(?:API Console|dev|developer) key": API key 10 | "(?:cell ?phone|smart ?phone)": phone|mobile phone 11 | "(?:dev|developer|APIs) console": API console 12 | "(?:e-mail|Email|E-mail)": email 13 | "(?:file ?path|path ?name)": path 14 | "(?:kill|terminate|abort)": stop|exit|cancel|end 15 | "(?:OAuth ?2|Oauth)": OAuth 2.0 16 | "(?:ok|Okay)": OK|okay 17 | "(?:WiFi|wifi)": Wi-Fi 18 | '[\.]+apk': APK 19 | '3\-D': 3D 20 | 'Google (?:I\-O|IO)': Google I/O 21 | "tap (?:&|and) hold": touch & hold 22 | "un(?:check|select)": clear 23 | above: preceding 24 | account name: username 25 | action bar: app bar 26 | admin: administrator 27 | Ajax: AJAX 28 | a\.k\.a|aka: or|also known as 29 | Android device: Android-powered device 30 | android: Android 31 | API explorer: APIs Explorer 32 | application: app 33 | approx\.: approximately 34 | authN: authentication 35 | authZ: authorization 36 | autoupdate: automatically update 37 | cellular data: mobile data 38 | cellular network: mobile network 39 | chapter: documents|pages|sections 40 | check box: checkbox 41 | CLI: command-line tool 42 | click on: click|click in 43 | Cloud: Google Cloud Platform|GCP 44 | Container Engine: Kubernetes Engine 45 | content type: media type 46 | curated roles: predefined roles 47 | data are: data is 48 | Developers Console: Google API Console|API Console 49 | disabled?: turn off|off 50 | ephemeral IP address: ephemeral external IP address 51 | fewer data: less data 52 | file name: filename 53 | firewalls: firewall rules 54 | functionality: capability|feature 55 | Google account: Google Account 56 | Google accounts: Google Accounts 57 | Googling: search with Google 58 | grayed-out: unavailable 59 | HTTPs: HTTPS 60 | in order to: to 61 | ingest: import|load 62 | k8s: Kubernetes 63 | long press: touch & hold 64 | network IP address: internal IP address 65 | omnibox: address bar 66 | open-source: open source 67 | overview screen: recents screen 68 | regex: regular expression 69 | SHA1: SHA-1|HAS-SHA1 70 | sign into: sign in to 71 | sign-?on: single sign-on 72 | static IP address: static external IP address 73 | stylesheet: style sheet 74 | synch: sync 75 | tablename: table name 76 | tablet: device 77 | touch: tap 78 | url: URL 79 | vs\.: versus 80 | World Wide Web: web 81 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/meta.json: -------------------------------------------------------------------------------- 1 | { 2 | "feed": "https://github.com/errata-ai/Google/releases.atom", 3 | "vale_version": ">=1.0.0" 4 | } 5 | -------------------------------------------------------------------------------- /docs/vale/styles/Google/vocab.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/vale/styles/Google/vocab.txt -------------------------------------------------------------------------------- /docs/vale/styles/config/vocabularies/LogCraft/accept.txt: -------------------------------------------------------------------------------- 1 | # LogCraft 2 | LogCraft 3 | lgc 4 | 5 | # Security Vendors 6 | CrowdStrike 7 | LimaCharlie 8 | Microsoft 9 | Palo 10 | Sekoia 11 | Splunk 12 | Tanium 13 | 14 | # Version Control Systems 15 | Bitbucket 16 | GitHub 17 | GitLab 18 | 19 | # MISC 20 | [tT]ada 21 | 22 | # Tech terms 23 | bool 24 | boolean 25 | dev 26 | enum 27 | https? 28 | (?i)jwt 29 | (?i)local 30 | [mM]onorepo 31 | moonrepo 32 | prod 33 | string 34 | Yara 35 | (?i)wasm 36 | Wolfi 37 | camelCase 38 | 39 | # Splunk 40 | savedsearches 41 | -------------------------------------------------------------------------------- /docs/vale/styles/config/vocabularies/LogCraft/reject.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LogCraftIO/logcraft-cli/f02ab1d1cc786c97676bb32803b95bb47bff0f37/docs/vale/styles/config/vocabularies/LogCraft/reject.txt -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Airlinese.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is airlinese." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - enplan(?:e|ed|ing|ement) 7 | - deplan(?:e|ed|ing|ement) 8 | - taking off momentarily 9 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/AnimalLabels.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: "Consider using '%s' instead of '%s'." 3 | level: error 4 | action: 5 | name: replace 6 | swap: 7 | (?:bull|ox)-like: taurine 8 | (?:calf|veal)-like: vituline 9 | (?:crow|raven)-like: corvine 10 | (?:leopard|panther)-like: pardine 11 | bird-like: avine 12 | centipede-like: scolopendrine 13 | crab-like: cancrine 14 | crocodile-like: crocodiline 15 | deer-like: damine 16 | eagle-like: aquiline 17 | earthworm-like: lumbricine 18 | falcon-like: falconine 19 | ferine: wild animal-like 20 | fish-like: piscine 21 | fox-like: vulpine 22 | frog-like: ranine 23 | goat-like: hircine 24 | goose-like: anserine 25 | gull-like: laridine 26 | hare-like: leporine 27 | hawk-like: accipitrine 28 | hippopotamus-like: hippopotamine 29 | lizard-like: lacertine 30 | mongoose-like: viverrine 31 | mouse-like: murine 32 | ostrich-like: struthionine 33 | peacock-like: pavonine 34 | porcupine-like: hystricine 35 | rattlesnake-like: crotaline 36 | sable-like: zibeline 37 | sheep-like: ovine 38 | shrew-like: soricine 39 | sparrow-like: passerine 40 | swallow-like: hirundine 41 | swine-like: suilline 42 | tiger-like: tigrine 43 | viper-like: viperine 44 | vulture-like: vulturine 45 | wasp-like: vespine 46 | wolf-like: lupine 47 | woodpecker-like: picine 48 | zebra-like: zebrine 49 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Annotations.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' left in text." 3 | ignorecase: false 4 | level: error 5 | tokens: 6 | - XXX 7 | - FIXME 8 | - TODO 9 | - NOTE 10 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Apologizing.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Excessive apologizing: '%s'" 3 | ignorecase: true 4 | level: error 5 | action: 6 | name: remove 7 | tokens: 8 | - More research is needed 9 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Archaisms.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is archaic." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - alack 7 | - anent 8 | - begat 9 | - belike 10 | - betimes 11 | - boughten 12 | - brocage 13 | - brokage 14 | - camarade 15 | - chiefer 16 | - chiefest 17 | - Christiana 18 | - completely obsolescent 19 | - cozen 20 | - divers 21 | - deflexion 22 | - fain 23 | - forsooth 24 | - foreclose from 25 | - haply 26 | - howbeit 27 | - illumine 28 | - in sooth 29 | - maugre 30 | - meseems 31 | - methinks 32 | - nigh 33 | - peradventure 34 | - perchance 35 | - saith 36 | - shew 37 | - sistren 38 | - spake 39 | - to wit 40 | - verily 41 | - whilom 42 | - withal 43 | - wot 44 | - enclosed please find 45 | - please find enclosed 46 | - enclosed herewith 47 | - enclosed herein 48 | - inforce 49 | - ex postfacto 50 | - foreclose from 51 | - forewent 52 | - for ever 53 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/But.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Do not start a paragraph with a 'but'." 3 | level: error 4 | scope: paragraph 5 | action: 6 | name: remove 7 | tokens: 8 | - ^But 9 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/CorporateSpeak.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is corporate speak." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - at the end of the day 7 | - back to the drawing board 8 | - hit the ground running 9 | - get the ball rolling 10 | - low-hanging fruit 11 | - thrown under the bus 12 | - think outside the box 13 | - let's touch base 14 | - get my manager's blessing 15 | - it's on my radar 16 | - ping me 17 | - i don't have the bandwidth 18 | - no brainer 19 | - par for the course 20 | - bang for your buck 21 | - synergy 22 | - move the goal post 23 | - apples to apples 24 | - win-win 25 | - circle back around 26 | - all hands on deck 27 | - take this offline 28 | - drill-down 29 | - elephant in the room 30 | - on my plate 31 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Currency.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Incorrect use of symbols in '%s'." 3 | ignorecase: true 4 | raw: 5 | - \$[\d]* ?(?:dollars|usd|us dollars) 6 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Cursing.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Consider replacing '%s'." 3 | level: error 4 | ignorecase: true 5 | tokens: 6 | - shit 7 | - piss 8 | - fuck 9 | - cunt 10 | - cocksucker 11 | - motherfucker 12 | - tits 13 | - fart 14 | - turd 15 | - twat 16 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/DateCase.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: With lowercase letters, the periods are standard. 3 | ignorecase: false 4 | level: error 5 | nonword: true 6 | tokens: 7 | - '\d{1,2} ?[ap]m\b' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/DateMidnight.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Use 'midnight' or 'noon'." 3 | ignorecase: true 4 | level: error 5 | nonword: true 6 | tokens: 7 | - '12 ?[ap]\.?m\.?' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/DateRedundancy.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'a.m.' is always morning; 'p.m.' is always night." 3 | ignorecase: true 4 | level: error 5 | nonword: true 6 | tokens: 7 | - '\d{1,2} ?a\.?m\.? in the morning' 8 | - '\d{1,2} ?p\.?m\.? in the evening' 9 | - '\d{1,2} ?p\.?m\.? at night' 10 | - '\d{1,2} ?p\.?m\.? in the afternoon' 11 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/DateSpacing.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "It's standard to put a space before '%s'" 3 | ignorecase: true 4 | level: error 5 | nonword: true 6 | tokens: 7 | - '\d{1,2}[ap]\.?m\.?' 8 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/DenizenLabels.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: Did you mean '%s'? 3 | ignorecase: false 4 | action: 5 | name: replace 6 | swap: 7 | (?:Afrikaaner|Afrikander): Afrikaner 8 | (?:Hong Kongite|Hong Kongian): Hong Konger 9 | (?:Indianan|Indianian): Hoosier 10 | (?:Michiganite|Michiganian): Michigander 11 | (?:New Hampshireite|New Hampshireman): New Hampshirite 12 | (?:Newcastlite|Newcastleite): Novocastrian 13 | (?:Providencian|Providencer): Providentian 14 | (?:Trentian|Trentonian): Tridentine 15 | (?:Warsawer|Warsawian): Varsovian 16 | (?:Wolverhamptonite|Wolverhamptonian): Wulfrunian 17 | Alabaman: Alabamian 18 | Albuquerquian: Albuquerquean 19 | Anchoragite: Anchorageite 20 | Arizonian: Arizonan 21 | Arkansawyer: Arkansan 22 | Belarusan: Belarusian 23 | Cayman Islander: Caymanian 24 | Coloradoan: Coloradan 25 | Connecticuter: Nutmegger 26 | Fairbanksian: Fairbanksan 27 | Fort Worther: Fort Worthian 28 | Grenadian: Grenadan 29 | Halifaxer: Haligonian 30 | Hartlepoolian: Hartlepudlian 31 | Illinoisian: Illinoisan 32 | Iowegian: Iowan 33 | Leedsian: Leodenisian 34 | Liverpoolian: Liverpudlian 35 | Los Angelean: Angeleno 36 | Manchesterian: Mancunian 37 | Minneapolisian: Minneapolitan 38 | Missouran: Missourian 39 | Monacan: Monegasque 40 | Neopolitan: Neapolitan 41 | New Jerseyite: New Jerseyan 42 | New Orleansian: New Orleanian 43 | Oklahoma Citian: Oklahoma Cityan 44 | Oklahomian: Oklahoman 45 | Saudi Arabian: Saudi 46 | Seattlite: Seattleite 47 | Surinamer: Surinamese 48 | Tallahassean: Tallahasseean 49 | Tennesseean: Tennessean 50 | Trois-Rivièrester: Trifluvian 51 | Utahan: Utahn 52 | Valladolidian: Vallisoletano 53 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Diacritical.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: Consider using '%s' instead of '%s'. 3 | ignorecase: true 4 | level: error 5 | action: 6 | name: replace 7 | swap: 8 | beau ideal: beau idéal 9 | boutonniere: boutonnière 10 | bric-a-brac: bric-à-brac 11 | cafe: café 12 | cause celebre: cause célèbre 13 | chevre: chèvre 14 | cliche: cliché 15 | consomme: consommé 16 | coup de grace: coup de grâce 17 | crudites: crudités 18 | creme brulee: crème brûlée 19 | creme de menthe: crème de menthe 20 | creme fraice: crème fraîche 21 | creme fresh: crème fraîche 22 | crepe: crêpe 23 | debutante: débutante 24 | decor: décor 25 | deja vu: déjà vu 26 | denouement: dénouement 27 | facade: façade 28 | fiance: fiancé 29 | fiancee: fiancée 30 | flambe: flambé 31 | garcon: garçon 32 | lycee: lycée 33 | maitre d: maître d 34 | menage a trois: ménage à trois 35 | negligee: négligée 36 | protege: protégé 37 | protegee: protégée 38 | puree: purée 39 | my resume: my résumé 40 | your resume: your résumé 41 | his resume: his résumé 42 | her resume: her résumé 43 | a resume: a résumé 44 | the resume: the résumé 45 | risque: risqué 46 | roue: roué 47 | soiree: soirée 48 | souffle: soufflé 49 | soupcon: soupçon 50 | touche: touché 51 | tete-a-tete: tête-à-tête 52 | voila: voilà 53 | a la carte: à la carte 54 | a la mode: à la mode 55 | emigre: émigré 56 | 57 | # Spanish loanwords 58 | El Nino: El Niño 59 | jalapeno: jalapeño 60 | La Nina: La Niña 61 | pina colada: piña colada 62 | senor: señor 63 | senora: señora 64 | senorita: señorita 65 | 66 | # Portuguese loanwords 67 | acai: açaí 68 | 69 | # German loanwords 70 | doppelganger: doppelgänger 71 | Fuhrer: Führer 72 | Gewurztraminer: Gewürztraminer 73 | vis-a-vis: vis-à-vis 74 | Ubermensch: Übermensch 75 | 76 | # Swedish loanwords 77 | filmjolk: filmjölk 78 | smorgasbord: smörgåsbord 79 | 80 | # Names, places, and companies 81 | Beyonce: Beyoncé 82 | Bronte: Brontë 83 | Champs-Elysees: Champs-Élysées 84 | Citroen: Citroën 85 | Curacao: Curaçao 86 | Lowenbrau: Löwenbräu 87 | Monegasque: Monégasque 88 | Motley Crue: Mötley Crüe 89 | Nescafe: Nescafé 90 | Queensryche: Queensrÿche 91 | Quebec: Québec 92 | Quebecois: Québécois 93 | Angstrom: Ångström 94 | angstrom: ångström 95 | Skoda: Škoda 96 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/GenderBias.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: Consider using '%s' instead of '%s'. 3 | ignorecase: true 4 | level: error 5 | action: 6 | name: replace 7 | swap: 8 | (?:alumnae|alumni): graduates 9 | (?:alumna|alumnus): graduate 10 | air(?:m[ae]n|wom[ae]n): pilot(s) 11 | anchor(?:m[ae]n|wom[ae]n): anchor(s) 12 | authoress: author 13 | camera(?:m[ae]n|wom[ae]n): camera operator(s) 14 | chair(?:m[ae]n|wom[ae]n): chair(s) 15 | congress(?:m[ae]n|wom[ae]n): member(s) of congress 16 | door(?:m[ae]|wom[ae]n): concierge(s) 17 | draft(?:m[ae]n|wom[ae]n): drafter(s) 18 | fire(?:m[ae]n|wom[ae]n): firefighter(s) 19 | fisher(?:m[ae]n|wom[ae]n): fisher(s) 20 | fresh(?:m[ae]n|wom[ae]n): first-year student(s) 21 | garbage(?:m[ae]n|wom[ae]n): waste collector(s) 22 | lady lawyer: lawyer 23 | ladylike: courteous 24 | landlord: building manager 25 | mail(?:m[ae]n|wom[ae]n): mail carriers 26 | man and wife: husband and wife 27 | man enough: strong enough 28 | mankind: human kind 29 | manmade: manufactured 30 | men and girls: men and women 31 | middle(?:m[ae]n|wom[ae]n): intermediary 32 | news(?:m[ae]n|wom[ae]n): journalist(s) 33 | ombuds(?:man|woman): ombuds 34 | oneupmanship: upstaging 35 | poetess: poet 36 | police(?:m[ae]n|wom[ae]n): police officer(s) 37 | repair(?:m[ae]n|wom[ae]n): technician(s) 38 | sales(?:m[ae]n|wom[ae]n): salesperson or sales people 39 | service(?:m[ae]n|wom[ae]n): soldier(s) 40 | steward(?:ess)?: flight attendant 41 | tribes(?:m[ae]n|wom[ae]n): tribe member(s) 42 | waitress: waiter 43 | woman doctor: doctor 44 | woman scientist[s]?: scientist(s) 45 | work(?:m[ae]n|wom[ae]n): worker(s) 46 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/GroupTerms.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: Consider using '%s' instead of '%s'. 3 | ignorecase: true 4 | action: 5 | name: replace 6 | swap: 7 | (?:bunch|group|pack|flock) of chickens: brood of chickens 8 | (?:bunch|group|pack|flock) of crows: murder of crows 9 | (?:bunch|group|pack|flock) of hawks: cast of hawks 10 | (?:bunch|group|pack|flock) of parrots: pandemonium of parrots 11 | (?:bunch|group|pack|flock) of peacocks: muster of peacocks 12 | (?:bunch|group|pack|flock) of penguins: muster of penguins 13 | (?:bunch|group|pack|flock) of sparrows: host of sparrows 14 | (?:bunch|group|pack|flock) of turkeys: rafter of turkeys 15 | (?:bunch|group|pack|flock) of woodpeckers: descent of woodpeckers 16 | (?:bunch|group|pack|herd) of apes: shrewdness of apes 17 | (?:bunch|group|pack|herd) of baboons: troop of baboons 18 | (?:bunch|group|pack|herd) of badgers: cete of badgers 19 | (?:bunch|group|pack|herd) of bears: sloth of bears 20 | (?:bunch|group|pack|herd) of bullfinches: bellowing of bullfinches 21 | (?:bunch|group|pack|herd) of bullocks: drove of bullocks 22 | (?:bunch|group|pack|herd) of caterpillars: army of caterpillars 23 | (?:bunch|group|pack|herd) of cats: clowder of cats 24 | (?:bunch|group|pack|herd) of colts: rag of colts 25 | (?:bunch|group|pack|herd) of crocodiles: bask of crocodiles 26 | (?:bunch|group|pack|herd) of dolphins: school of dolphins 27 | (?:bunch|group|pack|herd) of foxes: skulk of foxes 28 | (?:bunch|group|pack|herd) of gorillas: band of gorillas 29 | (?:bunch|group|pack|herd) of hippopotami: bloat of hippopotami 30 | (?:bunch|group|pack|herd) of horses: drove of horses 31 | (?:bunch|group|pack|herd) of jellyfish: fluther of jellyfish 32 | (?:bunch|group|pack|herd) of kangeroos: mob of kangeroos 33 | (?:bunch|group|pack|herd) of monkeys: troop of monkeys 34 | (?:bunch|group|pack|herd) of oxen: yoke of oxen 35 | (?:bunch|group|pack|herd) of rhinoceros: crash of rhinoceros 36 | (?:bunch|group|pack|herd) of wild boar: sounder of wild boar 37 | (?:bunch|group|pack|herd) of wild pigs: drift of wild pigs 38 | (?:bunch|group|pack|herd) of zebras: zeal of wild pigs 39 | (?:bunch|group|pack|school) of trout: hover of trout 40 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Hedging.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is hedging." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - I would argue that 7 | - ', so to speak' 8 | - to a certain degree 9 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Hyperbole.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is hyperbolic." 3 | level: error 4 | nonword: true 5 | tokens: 6 | - '[a-z]+[!?]{2,}' 7 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Jargon.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is jargon." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - in the affirmative 7 | - in the negative 8 | - agendize 9 | - per your order 10 | - per your request 11 | - disincentivize 12 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/LGBTOffensive.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is offensive. Remove it or consider the context." 3 | ignorecase: true 4 | tokens: 5 | - fag 6 | - faggot 7 | - dyke 8 | - sodomite 9 | - homosexual agenda 10 | - gay agenda 11 | - transvestite 12 | - homosexual lifestyle 13 | - gay lifestyle 14 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/LGBTTerms.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: "Consider using '%s' instead of '%s'." 3 | ignorecase: true 4 | action: 5 | name: replace 6 | swap: 7 | homosexual man: gay man 8 | homosexual men: gay men 9 | homosexual woman: lesbian 10 | homosexual women: lesbians 11 | homosexual people: gay people 12 | homosexual couple: gay couple 13 | sexual preference: sexual orientation 14 | (?:admitted homosexual|avowed homosexual): openly gay 15 | special rights: equal rights 16 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Malapropisms.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is a malapropism." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - the infinitesimal universe 7 | - a serial experience 8 | - attack my voracity 9 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Nonwords.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: "Consider using '%s' instead of '%s'." 3 | ignorecase: true 4 | level: error 5 | action: 6 | name: replace 7 | swap: 8 | affrontery: effrontery 9 | analyzation: analysis 10 | annoyment: annoyance 11 | confirmant: confirmand 12 | confirmants: confirmands 13 | conversate: converse 14 | crained: craned 15 | discomforture: discomfort|discomfiture 16 | dispersement: disbursement|dispersal 17 | doubtlessly: doubtless|undoubtedly 18 | forebearance: forbearance 19 | improprietous: improper 20 | inclimate: inclement 21 | inimicable: inimical 22 | irregardless: regardless 23 | minimalize: minimize 24 | minimalized: minimized 25 | minimalizes: minimizes 26 | minimalizing: minimizing 27 | optimalize: optimize 28 | paralyzation: paralysis 29 | pettifogger: pettifog 30 | proprietous: proper 31 | relative inexpense: relatively low price|affordability 32 | seldomly: seldom 33 | thusly: thus 34 | uncategorically: categorically 35 | undoubtably: undoubtedly|indubitably 36 | unequivocable: unequivocal 37 | unmercilessly: mercilessly 38 | unrelentlessly: unrelentingly|relentlessly 39 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Oxymorons.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is an oxymoron." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - amateur expert 7 | - increasingly less 8 | - advancing backwards 9 | - alludes explicitly to 10 | - explicitly alludes to 11 | - totally obsolescent 12 | - completely obsolescent 13 | - generally always 14 | - usually always 15 | - increasingly less 16 | - build down 17 | - conspicuous absence 18 | - exact estimate 19 | - found missing 20 | - intense apathy 21 | - mandatory choice 22 | - organized mess 23 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/P-Value.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "You should use more decimal places, unless '%s' is really true." 3 | ignorecase: true 4 | level: suggestion 5 | tokens: 6 | - 'p = 0\.0{2,4}' 7 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/RASSyndrome.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is redundant." 3 | level: error 4 | action: 5 | name: edit 6 | params: 7 | - split 8 | - ' ' 9 | - '0' 10 | tokens: 11 | - ABM missile 12 | - ACT test 13 | - ABM missiles 14 | - ABS braking system 15 | - ATM machine 16 | - CD disc 17 | - CPI Index 18 | - GPS system 19 | - GUI interface 20 | - HIV virus 21 | - ISBN number 22 | - LCD display 23 | - PDF format 24 | - PIN number 25 | - RAS syndrome 26 | - RIP in peace 27 | - please RSVP 28 | - SALT talks 29 | - SAT test 30 | - UPC codes 31 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/README.md: -------------------------------------------------------------------------------- 1 | Copyright © 2014–2015, Jordan Suchow, Michael Pacer, and Lara A. Ross 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 13 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Skunked.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is a bit of a skunked term — impossible to use without issue." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - bona fides 7 | - deceptively 8 | - decimate 9 | - effete 10 | - fulsome 11 | - hopefully 12 | - impassionate 13 | - Thankfully 14 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Spelling.yml: -------------------------------------------------------------------------------- 1 | extends: consistency 2 | message: "Inconsistent spelling of '%s'." 3 | level: error 4 | ignorecase: true 5 | either: 6 | advisor: adviser 7 | centre: center 8 | colour: color 9 | emphasise: emphasize 10 | finalise: finalize 11 | focussed: focused 12 | labour: labor 13 | learnt: learned 14 | organise: organize 15 | organised: organized 16 | organising: organizing 17 | recognise: recognize 18 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Typography.yml: -------------------------------------------------------------------------------- 1 | extends: substitution 2 | message: Consider using the '%s' symbol instead of '%s'. 3 | level: error 4 | nonword: true 5 | swap: 6 | '\.\.\.': … 7 | '\([cC]\)': © 8 | '\(TM\)': ™ 9 | '\(tm\)': ™ 10 | '\([rR]\)': ® 11 | '[0-9]+ ?x ?[0-9]+': × 12 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Uncomparables.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "'%s' is not comparable" 3 | ignorecase: true 4 | level: error 5 | action: 6 | name: edit 7 | params: 8 | - split 9 | - ' ' 10 | - '1' 11 | raw: 12 | - \b(?:absolutely|most|more|less|least|very|quite|largely|extremely|increasingly|kind of|mildy|hardly|greatly|sort of)\b\s* 13 | tokens: 14 | - absolute 15 | - adequate 16 | - complete 17 | - correct 18 | - certain 19 | - devoid 20 | - entire 21 | - 'false' 22 | - fatal 23 | - favorite 24 | - final 25 | - ideal 26 | - impossible 27 | - inevitable 28 | - infinite 29 | - irrevocable 30 | - main 31 | - manifest 32 | - only 33 | - paramount 34 | - perfect 35 | - perpetual 36 | - possible 37 | - preferable 38 | - principal 39 | - singular 40 | - stationary 41 | - sufficient 42 | - 'true' 43 | - unanimous 44 | - unavoidable 45 | - unbroken 46 | - uniform 47 | - unique 48 | - universal 49 | - void 50 | - whole 51 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/Very.yml: -------------------------------------------------------------------------------- 1 | extends: existence 2 | message: "Remove '%s'." 3 | ignorecase: true 4 | level: error 5 | tokens: 6 | - very 7 | -------------------------------------------------------------------------------- /docs/vale/styles/proselint/meta.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": "jdkato", 3 | "description": "A Vale-compatible implementation of the proselint linter.", 4 | "email": "support@errata.ai", 5 | "lang": "en", 6 | "url": "https://github.com/errata-ai/proselint/releases/latest/download/proselint.zip", 7 | "feed": "https://github.com/errata-ai/proselint/releases.atom", 8 | "issues": "https://github.com/errata-ai/proselint/issues/new", 9 | "license": "BSD-3-Clause", 10 | "name": "proselint", 11 | "sources": [ 12 | "https://github.com/amperser/proselint" 13 | ], 14 | "vale_version": ">=1.0.0", 15 | "coverage": 0.0, 16 | "version": "0.1.0" 17 | } 18 | -------------------------------------------------------------------------------- /libs/bindings/plugin.wit: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | interface plugin { 5 | type bytes = list; 6 | 7 | /// The metadata for a plugin used for registration and setup 8 | record metadata { 9 | /// The friendly name of the plugin 10 | name: string, 11 | /// The version of the plugin 12 | version: string, 13 | } 14 | 15 | // Plugin actions 16 | load: func() -> metadata; 17 | settings: func() -> result; 18 | schema: func() -> result; 19 | validate: func(detection: bytes) -> result<_, string>; 20 | 21 | // Service actions 22 | // CRUD definition 23 | create: func(config: bytes, detection: bytes) -> result<_, string>; 24 | read: func(config: bytes, detection: bytes) -> result, string>; 25 | update: func(config: bytes, detection: bytes) -> result<_, string>; 26 | delete: func(config: bytes, detection: bytes) -> result<_, string>; 27 | 28 | // Miscellaneous 29 | ping: func(config: bytes) -> result; 30 | } -------------------------------------------------------------------------------- /libs/bindings/world.wit: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | package logcraft:lgc@0.1.0; 5 | 6 | /// The logcraft world for the component to target. 7 | world plugins { 8 | // Imports 9 | 10 | // Exports 11 | export plugin; 12 | } 13 | -------------------------------------------------------------------------------- /libs/lgc-common/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lgc-common" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | repository.workspace = true 7 | 8 | [dependencies] 9 | # Local dependencies 10 | lgc-runtime.workspace = true 11 | lgc-policies.workspace = true 12 | 13 | # Utils 14 | tracing.workspace = true 15 | tracing-subscriber.workspace = true 16 | dialoguer.workspace = true 17 | uuid = { version = "1.13", features = ["serde", "v4"] } 18 | similar = "2.7" 19 | console.workspace = true 20 | once_cell = "1.20" 21 | regex = "1.10" 22 | tempfile = "3.10" 23 | fs4 = { version = "0.13", features = ["tokio"] } 24 | 25 | # Async 26 | tokio.workspace = true 27 | 28 | # Ser / Deser 29 | anyhow.workspace = true 30 | serde.workspace = true 31 | serde_json.workspace = true 32 | serde_yaml_ng.workspace = true 33 | toml.workspace = true 34 | serde_with = "3.8" 35 | jsonschema.workspace = true 36 | schemars.workspace = true 37 | 38 | # HTTP related 39 | reqwest.workspace = true 40 | url.workspace = true 41 | 42 | # WASM Related 43 | wasmtime.workspace = true 44 | -------------------------------------------------------------------------------- /libs/lgc-common/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'rust' 2 | type: 'library' 3 | 4 | project: 5 | name: 'lgc-common' 6 | description: 'LogCraft CLI common library' -------------------------------------------------------------------------------- /libs/lgc-common/src/detections.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use serde_json::Value; 5 | use std::collections; 6 | 7 | // Helper types to store detections per service 8 | pub type PluginsDetections = collections::HashMap>; 9 | 10 | /// Detection type alias for a detection path and its content. 11 | pub type Detection = (String, Vec); 12 | -------------------------------------------------------------------------------- /libs/lgc-common/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | //LogCraft common library 5 | pub mod configuration; 6 | pub mod detections; 7 | pub mod diff; 8 | pub mod plugins; 9 | pub mod state; 10 | pub mod utils; 11 | -------------------------------------------------------------------------------- /libs/lgc-common/src/plugins/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | // Local modules 5 | pub mod manager; 6 | 7 | pub const LGC_PLUGINS_PATH: &str = ".logcraft/plugins"; 8 | -------------------------------------------------------------------------------- /libs/lgc-common/src/state/backends/local.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use anyhow::{bail, Context, Result}; 5 | use fs4::tokio::AsyncFileExt; 6 | use serde::{Deserialize, Serialize}; 7 | use std::{path::PathBuf, sync::Arc}; 8 | use tokio::{fs, sync::Mutex}; 9 | use uuid::Uuid; 10 | 11 | use super::BackendActions; 12 | use crate::state::{State, LGC_DEFAULT_STATE_PATH}; 13 | 14 | // Define the ENOLCK error code (37 on Linux) 15 | const ENOLCK: i32 = 37; 16 | 17 | #[derive(Serialize, Deserialize, Clone)] 18 | /// Mimic OpenTofu HTTP state backend configuration variables. 19 | /// [Documentation](https://opentofu.org/docs/language/settings/backends/http) 20 | pub struct LocalBackend { 21 | /// REST endpoint. 22 | path: PathBuf, 23 | /// This field is skipped during serialization/deserialization. 24 | #[serde(skip)] 25 | lock_file: Arc>>, 26 | } 27 | 28 | impl Default for LocalBackend { 29 | fn default() -> Self { 30 | Self { 31 | path: PathBuf::from(LGC_DEFAULT_STATE_PATH), 32 | lock_file: Arc::new(Mutex::new(None)), 33 | } 34 | } 35 | } 36 | 37 | impl BackendActions for LocalBackend { 38 | /// Loads the state. 39 | /// If the file does not exist, returns (false, State::default()). 40 | async fn load(&self) -> Result<(bool, State)> { 41 | if fs::metadata(&self.path).await.is_err() { 42 | return Ok((false, State::default())); 43 | } 44 | let contents = fs::read_to_string(&self.path) 45 | .await 46 | .with_context(|| format!("unable to read state file: {}", self.path.display()))?; 47 | let state: State = serde_json::from_str(&contents) 48 | .with_context(|| format!("unable to parse state file: {}", self.path.display()))?; 49 | Ok((true, state)) 50 | } 51 | 52 | /// Saves the state. 53 | async fn save(&self, state: &mut State) -> Result<()> { 54 | state.serial += 1; 55 | state.lgc_version = env!("CARGO_PKG_VERSION").to_string(); 56 | let contents = serde_json::to_string_pretty(state).with_context(|| { 57 | format!( 58 | "unable to serialize state for file: {}", 59 | self.path.display() 60 | ) 61 | })?; 62 | 63 | // Create parent directories if they don't exist. 64 | if let Some(parent) = self.path.parent() { 65 | fs::create_dir_all(parent).await.with_context(|| { 66 | format!("unable to create directories for {}", parent.display()) 67 | })?; 68 | } 69 | 70 | // Write the state to disk. 71 | fs::write(&self.path, contents) 72 | .await 73 | .with_context(|| format!("unable to write state file: {}", self.path.display())) 74 | } 75 | 76 | /// Locks the state. 77 | /// The locked file handle is stored so that the lock remains active. 78 | async fn lock(&self) -> Result> { 79 | // Try to open the file. If it doesn't exist, just skip locking. 80 | let file = match fs::OpenOptions::new() 81 | .read(true) 82 | .write(true) 83 | .open(&self.path) 84 | .await 85 | { 86 | Ok(file) => file, 87 | Err(e) if e.kind() == std::io::ErrorKind::NotFound => { 88 | // File not found, so we ignore locking. 89 | return Ok(None); 90 | } 91 | Err(e) => { 92 | bail!("unable to open state file {}: {}", self.path.display(), e); 93 | } 94 | }; 95 | 96 | if let Err(e) = file.try_lock_exclusive() { 97 | if let Some(code) = e.raw_os_error() { 98 | if code == ENOLCK { 99 | // Proceed without locking. 100 | tracing::warn!( 101 | "filesystem does not support file locking on `{}`; proceeding without lock", 102 | self.path.display() 103 | ); 104 | } else { 105 | match e.kind() { 106 | std::io::ErrorKind::WouldBlock => { 107 | bail!("state file `{}` is locked", self.path.display()); 108 | } 109 | _ => { 110 | bail!( 111 | "unable to acquire lock on state file `{}`: {}", 112 | self.path.display(), 113 | e 114 | ) 115 | } 116 | } 117 | } 118 | } else { 119 | bail!( 120 | "unable to acquire lock on state file `{}`: {}", 121 | self.path.display(), 122 | e 123 | ); 124 | } 125 | } 126 | 127 | // Store the file handle in our async lock. 128 | let mut guard = self.lock_file.lock().await; 129 | *guard = Some(file); 130 | Ok(None) 131 | } 132 | 133 | /// Unlocks the state. 134 | async fn unlock(&self, _lock_token: Option) -> Result<()> { 135 | let mut guard = self.lock_file.lock().await; 136 | if let Some(file) = guard.as_mut() { 137 | // Try to unlock the file. 138 | match file.unlock_async().await { 139 | Ok(()) => {} 140 | // If the unlock fails because the file is gone, ignore that error. 141 | Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} 142 | Err(e) => { 143 | return Err(e).with_context(|| { 144 | format!("unable to unlock state file {}", self.path.display()) 145 | }); 146 | } 147 | } 148 | } 149 | // Clear the stored file handle. 150 | *guard = None; 151 | Ok(()) 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /libs/lgc-common/src/state/backends/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | mod http; 5 | mod local; 6 | 7 | use anyhow::Result; 8 | use serde::{Deserialize, Serialize}; 9 | use uuid::Uuid; 10 | 11 | /// Represents the state backend configuration. 12 | #[derive(Serialize, Deserialize, Clone)] 13 | #[serde(tag = "type", rename_all = "lowercase")] 14 | pub enum StateBackend { 15 | /// Local state backend. 16 | Local(local::LocalBackend), 17 | /// HTTP state backend. 18 | Http(Box), 19 | } 20 | 21 | impl StateBackend { 22 | /// Loads the state. 23 | pub async fn load(&self) -> Result<(bool, super::State)> { 24 | match self { 25 | Self::Local(backend) => backend.load().await, 26 | Self::Http(backend) => backend.load().await, 27 | } 28 | } 29 | 30 | /// Saves the state. 31 | pub async fn save(&self, state: &mut super::State) -> Result<()> { 32 | match self { 33 | Self::Local(backend) => backend.save(state).await, 34 | Self::Http(backend) => backend.save(state).await, 35 | } 36 | } 37 | 38 | /// Locks the state. 39 | pub async fn lock(&self) -> Result> { 40 | match self { 41 | Self::Local(backend) => backend.lock().await, 42 | Self::Http(backend) => backend.lock().await, 43 | } 44 | } 45 | 46 | /// Unlocks the state. 47 | pub async fn unlock(&self, token: Option) -> Result<()> { 48 | match self { 49 | Self::Local(backend) => backend.unlock(token).await, 50 | Self::Http(backend) => backend.unlock(token).await, 51 | } 52 | } 53 | } 54 | 55 | impl Default for StateBackend { 56 | fn default() -> Self { 57 | Self::Local(local::LocalBackend::default()) 58 | } 59 | } 60 | 61 | /// State backends actions. 62 | pub trait BackendActions { 63 | fn load( 64 | &self, 65 | ) -> impl std::future::Future> + Send; 66 | fn save( 67 | &self, 68 | state: &mut super::State, 69 | ) -> impl std::future::Future> + Send; 70 | fn lock(&self) -> impl std::future::Future>> + Send; 71 | fn unlock( 72 | &self, 73 | token: Option, 74 | ) -> impl std::future::Future> + Send; 75 | } 76 | -------------------------------------------------------------------------------- /libs/lgc-common/src/state/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use std::collections; 5 | 6 | use crate::detections::PluginsDetections; 7 | use serde::{Deserialize, Serialize}; 8 | use uuid::Uuid; 9 | 10 | const LGC_DEFAULT_STATE_PATH: &str = ".logcraft/state.json"; 11 | const LGC_STATE_VERSION: usize = 1; 12 | 13 | pub mod backends; 14 | 15 | #[derive(Serialize, Deserialize, Clone, Debug)] 16 | pub struct State { 17 | /// State unique ID 18 | lineage: Uuid, 19 | /// Serial number of the state file. 20 | /// Increments every time the state file is written. 21 | serial: usize, 22 | /// Version of the state schema 23 | version: usize, 24 | /// Version of LogCraft CLI 25 | lgc_version: String, 26 | /// List of rules to track service_name => (rule_name, rule_settings) 27 | pub services: PluginsDetections, 28 | } 29 | 30 | impl Default for State { 31 | fn default() -> Self { 32 | Self { 33 | lineage: Uuid::new_v4(), 34 | serial: 0, 35 | version: LGC_STATE_VERSION, 36 | lgc_version: env!("CARGO_PKG_VERSION").to_string(), 37 | services: std::collections::HashMap::new(), 38 | } 39 | } 40 | } 41 | 42 | impl State { 43 | pub fn merge_synced(&mut self, detections: PluginsDetections) { 44 | for (service, plugin_rules) in detections { 45 | // If the service already exists, update or remove retrieved rules. 46 | if let Some(existing_rules) = self.services.get_mut(&service) { 47 | for (rule_key, rule_val) in plugin_rules { 48 | if rule_val.is_null() { 49 | existing_rules.remove(&rule_key); 50 | } else { 51 | existing_rules.insert(rule_key, rule_val); 52 | } 53 | } 54 | } else { 55 | // Remove null values retrieved rules. 56 | let plugin_rules = plugin_rules 57 | .into_iter() 58 | .filter(|(_, val)| !val.is_null()) 59 | .collect(); 60 | 61 | // Or insert the new service with its rules. 62 | self.services.insert(service, plugin_rules); 63 | } 64 | } 65 | } 66 | 67 | /// Consumes the detection data for the given service from the state 68 | /// and returns a mapping of rule keys to their JSON‐serialized values. 69 | /// 70 | /// If no detection data is found, an info message is logged and `Ok(None)` is returned. 71 | pub fn take_serialized_detections( 72 | &mut self, 73 | service_name: &str, 74 | ) -> Result>>, serde_json::Error> { 75 | if let Some(detections) = self.services.remove(service_name) { 76 | // Return the serialized detections. 77 | Ok(Some( 78 | detections 79 | .into_iter() 80 | .map(|(rule_key, rule_val)| { 81 | // If serialization fails, propagate the error. 82 | Ok((rule_key, serde_json::to_vec(&rule_val)?)) 83 | }) 84 | .collect::, _>>()?, 85 | )) 86 | } else { 87 | Ok(None) 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /libs/lgc-common/src/utils.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use std::{collections::HashMap, path::PathBuf}; 5 | 6 | use anyhow::{bail, Result}; 7 | 8 | use crate::configuration::{DetectionContext, LGC_BASE_DIR}; 9 | 10 | /// Ensure that a string is in kebab-case format 11 | pub fn ensure_kebab_case(name: String) -> Result { 12 | let mut chars = name.chars(); 13 | 14 | // Validate the first character must be alphanumeric (lowercase or digit). 15 | match chars.next() { 16 | Some(c) if c.is_ascii_lowercase() || c.is_ascii_digit() => (), 17 | _ => bail!("invalid format `{}`, must be kebab-case", name), 18 | } 19 | 20 | // Iterate over the remaining characters 21 | while let Some(current) = chars.next() { 22 | if current == '-' { 23 | // A hyphen cannot be the last character 24 | // and must be followed by a valid alphanumeric character. 25 | match chars.next() { 26 | Some(next) if next.is_ascii_lowercase() || next.is_ascii_digit() => (), 27 | // Either no character after the hyphen or it's invalid 28 | _ => bail!("invalid format `{}`, must be kebab-case", name), 29 | } 30 | } else if current.is_ascii_lowercase() || current.is_ascii_digit() { 31 | // Valid alphanumeric — continue checking 32 | } else { 33 | // Invalid character found 34 | bail!("invalid format `{}`, must be kebab-case", name); 35 | } 36 | } 37 | 38 | // If all checks pass, return the original String 39 | Ok(name) 40 | } 41 | 42 | /// Check if a string contains forbidden characters for environment variables 43 | pub fn env_forbidden_chars(s: &str) -> bool { 44 | for c in s.chars() { 45 | if c == '$' || c == '{' || c == '}' { 46 | return true; 47 | } 48 | } 49 | false 50 | } 51 | 52 | /// Convert a string to kebab-case 53 | pub fn to_kebab_case(input: &str) -> Result { 54 | // Check if input is empty 55 | if input.is_empty() { 56 | return Err("invalid input, must not be empty"); 57 | } 58 | 59 | // Create a new string with the same capacity as the input 60 | let mut kebab = String::with_capacity(input.len()); 61 | let mut prev_char_was_delimiter = true; // Avoids leading hyphen 62 | let chars = input.chars().peekable(); 63 | 64 | for c in chars { 65 | if c.is_ascii_alphabetic() || c.is_ascii_digit() { 66 | if c.is_uppercase() { 67 | kebab.push(c.to_ascii_lowercase()); 68 | } else { 69 | kebab.push(c); 70 | } 71 | prev_char_was_delimiter = false; 72 | } else if c == ' ' || c == '_' || c == '-' { 73 | // Replace delimiters with a single hyphen. 74 | if !prev_char_was_delimiter && !kebab.ends_with('-') { 75 | kebab.push('-'); 76 | prev_char_was_delimiter = true; 77 | } 78 | } else { 79 | // For any other characters, you can choose to skip or handle them. 80 | // Here, we'll skip them. 81 | // Alternatively, you could replace them with a hyphen or remove them. 82 | } 83 | } 84 | 85 | // Check if formatted string is empty 86 | if kebab.is_empty() { 87 | return Err("invalid input, must have at least one alphanumeric character"); 88 | } 89 | 90 | // Remove trailing hyphen if present. 91 | if kebab.ends_with('-') { 92 | kebab.pop(); 93 | } 94 | 95 | Ok(kebab) 96 | } 97 | 98 | pub fn filter_missing_plugins( 99 | base_dir: Option, 100 | workspace: &str, 101 | context: &mut HashMap, 102 | ) -> PathBuf 103 | where 104 | T: AsRef, 105 | { 106 | let plugins_dir = PathBuf::from(base_dir.as_deref().unwrap_or(LGC_BASE_DIR)).join("plugins"); 107 | 108 | context.retain(|name, _| { 109 | let exists = plugins_dir.join(name).with_extension("wasm").exists(); 110 | if !exists { 111 | tracing::warn!("ignoring '{}/{}' (no matching plugin).", workspace, name); 112 | } 113 | exists 114 | }); 115 | 116 | plugins_dir 117 | } 118 | -------------------------------------------------------------------------------- /libs/lgc-policies/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lgc-policies" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | repository.workspace = true 7 | 8 | [dependencies] 9 | serde.workspace = true 10 | serde_json.workspace = true 11 | 12 | [dev-dependencies] 13 | rstest = "0.25.0" 14 | jsonschema.workspace = true 15 | serde_yaml_ng.workspace = true 16 | -------------------------------------------------------------------------------- /libs/lgc-policies/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'rust' 2 | type: 'library' 3 | 4 | project: 5 | name: 'lgc-policies' 6 | description: 'LogCraft CLI policies library' -------------------------------------------------------------------------------- /libs/lgc-policies/src/helpers.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use serde_json::{json, Value}; 5 | 6 | /// Parses the target field into a list of parts for path composition. 7 | pub(crate) fn parse_field(field: &str) -> Vec<&str> { 8 | if field.starts_with('/') { 9 | field.trim_start_matches('/').split('/').collect() 10 | } else { 11 | field.split('.').collect() 12 | } 13 | } 14 | 15 | /// Builds a nested JSON Schema. 16 | pub(crate) fn build_nested(parts: &[&str], leaf: Value) -> Value { 17 | parts.iter().rev().fold(leaf, |acc, &part| { 18 | json!({ 19 | "type": "object", 20 | "properties": { part: acc }, 21 | "required": [part] 22 | }) 23 | }) 24 | } 25 | -------------------------------------------------------------------------------- /libs/lgc-policies/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | pub(crate) mod helpers; 5 | pub mod policy; 6 | pub(crate) mod schema; 7 | -------------------------------------------------------------------------------- /libs/lgc-policies/src/policy.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use serde::Deserialize; 5 | 6 | /// Policy defining a rule for a given field. 7 | #[derive(Debug, Deserialize)] 8 | pub struct Policy { 9 | /// Field in JSON Pointer style (e.g. "/parameters/disabled"). 10 | pub field: String, 11 | /// Type of check. 12 | pub check: CheckKind, 13 | /// Severity of the policy: warning or error. 14 | pub severity: Severity, 15 | /// Custom error message. May contain the placeholder `${fieldName}`. 16 | pub message: Option, 17 | /// Whether matching is case-insensitive (default is false). 18 | pub ignorecase: Option, 19 | /// Pattern checks. 20 | /// jsonschema uses ECMA 262 regex. 21 | /// [information](https://json-schema.org/understanding-json-schema/reference/regular_expressions) 22 | pub regex: Option, 23 | /// For constraint checks: additional parameters. 24 | pub validations: Option, 25 | } 26 | 27 | /// Type of check to perform. 28 | #[derive(Debug, Deserialize)] 29 | #[serde(rename_all = "lowercase")] 30 | pub enum CheckKind { 31 | Existence, 32 | Absence, 33 | Pattern, 34 | Constraint, 35 | } 36 | 37 | /// Constraint parameters for the "constraint" check. 38 | #[derive(Debug, Deserialize)] 39 | pub struct Constraint { 40 | #[serde(rename = "minLength")] 41 | pub min_length: Option, 42 | #[serde(rename = "maxLength")] 43 | pub max_length: Option, 44 | /// Optional list of allowed values. 45 | pub values: Option>, 46 | } 47 | 48 | /// Severity output level. 49 | #[derive(Debug, Deserialize)] 50 | #[serde(rename_all = "lowercase")] 51 | pub enum Severity { 52 | Warning, 53 | Error, 54 | } 55 | 56 | impl std::fmt::Display for Severity { 57 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 58 | match self { 59 | Severity::Warning => write!(f, "warning"), 60 | Severity::Error => write!(f, "error"), 61 | } 62 | } 63 | } 64 | 65 | /// Default error messages for policies. 66 | impl Policy { 67 | /// Returns the default error message for this policy, with `${fieldName}` replaced. 68 | pub fn default_message(&self) -> String { 69 | match (&self.check, &self.severity) { 70 | (CheckKind::Existence, Severity::Warning) => { 71 | format!("field '{}' should be present", self.field) 72 | } 73 | (CheckKind::Existence, Severity::Error) => { 74 | format!("field '{}' must be present", self.field) 75 | } 76 | (CheckKind::Absence, Severity::Warning) => { 77 | format!("field '{}' shouldn't be present", self.field) 78 | } 79 | (CheckKind::Absence, Severity::Error) => { 80 | format!("field '{}' must not be present", self.field) 81 | } 82 | (CheckKind::Constraint, Severity::Warning) => { 83 | format!("field '{}' doesn't respect constraint", self.field) 84 | } 85 | (CheckKind::Constraint, Severity::Error) => { 86 | format!("field '{}' doesn't respect constraint", self.field) 87 | } 88 | (CheckKind::Pattern, Severity::Warning) => { 89 | format!("field '{}' doesn't match pattern", self.field) 90 | } 91 | (CheckKind::Pattern, Severity::Error) => { 92 | format!("field '{}' doesn't match pattern", self.field) 93 | } 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /libs/lgc-policies/src/schema.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use super::{ 5 | helpers, 6 | policy::{CheckKind, Policy}, 7 | }; 8 | use serde_json::{json, Value}; 9 | 10 | const FIELD_PARAM: &str = "${fieldName}"; 11 | 12 | impl Policy { 13 | /// Generates a JSON Schema for a given policy. 14 | pub fn to_schema(&self) -> Result { 15 | // Use default message if no custom message is provided. 16 | let msg = if let Some(ref m) = self.message { 17 | m.replace(FIELD_PARAM, &self.field) 18 | } else { 19 | self.default_message() 20 | }; 21 | 22 | // Prepare the schema with the custom message. 23 | let mut schema = json!({ 24 | "$schema": "http://json-schema.org/draft-07/schema#", 25 | "type": "object", 26 | "x-message": msg, 27 | }); 28 | 29 | let parts = helpers::parse_field(&self.field); 30 | 31 | // Enforce string type for Pattern and Constraint checks. 32 | let enforced_type = match self.check { 33 | CheckKind::Pattern | CheckKind::Constraint => Some("string"), 34 | _ => None, 35 | }; 36 | 37 | // Build the schema based on the check kind. 38 | let leaf_schema = self.build_leaf_schema(enforced_type)?; 39 | match parts.as_slice() { 40 | [] => schema["properties"] = json!({}), 41 | [field] => match self.check { 42 | CheckKind::Absence => schema["not"] = json!({ "required": [field] }), 43 | _ => { 44 | schema["properties"] = json!({ *field: leaf_schema }); 45 | schema["required"] = json!([*field]); 46 | } 47 | }, 48 | _ => { 49 | let nested_schema = helpers::build_nested(&parts, leaf_schema); 50 | match self.check { 51 | CheckKind::Absence => schema["not"] = nested_schema, 52 | _ => { 53 | schema["properties"] = nested_schema["properties"].clone(); 54 | schema["required"] = nested_schema["required"].clone(); 55 | } 56 | } 57 | } 58 | } 59 | Ok(schema) 60 | } 61 | 62 | /// Builds the leaf schema for a given policy. 63 | fn build_leaf_schema(&self, enforced_type: Option<&str>) -> Result { 64 | let ignore = self.ignorecase.unwrap_or(false); 65 | let mut leaf_schema = if let Some(t) = enforced_type { 66 | json!({ "type": t }) 67 | } else { 68 | json!({}) 69 | }; 70 | 71 | match self.check { 72 | CheckKind::Pattern => { 73 | if let Some(ref regex) = self.regex { 74 | let pattern = if ignore && !regex.starts_with("(?i)") { 75 | format!("(?i){}", regex) 76 | } else { 77 | regex.clone() 78 | }; 79 | leaf_schema["pattern"] = json!(pattern); 80 | } else { 81 | return Err("pattern check requires a regex."); 82 | } 83 | } 84 | CheckKind::Constraint => { 85 | if let Some(ref cons) = self.validations { 86 | match (cons.min_length, cons.max_length) { 87 | (Some(min), Some(max)) => { 88 | if min > max { 89 | return Err("minLength must be less than or equal to maxLength."); 90 | } else { 91 | leaf_schema["minLength"] = json!(min); 92 | leaf_schema["maxLength"] = json!(max); 93 | } 94 | } 95 | (Some(min), None) => { 96 | leaf_schema["minLength"] = json!(min); 97 | } 98 | (None, Some(max)) => { 99 | leaf_schema["maxLength"] = json!(max); 100 | } 101 | _ => {} 102 | } 103 | if let Some(ref vals) = cons.values { 104 | if ignore { 105 | let pattern = format!("^(?i:({}))$", vals.join("|")); 106 | leaf_schema["pattern"] = json!(pattern); 107 | } else { 108 | leaf_schema["enum"] = json!(vals); 109 | } 110 | } 111 | } else { 112 | return Err("constraint check requires validations to be defined."); 113 | } 114 | } 115 | _ => {} 116 | } 117 | Ok(leaf_schema) 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /libs/lgc-runtime/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lgc-runtime" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | repository.workspace = true 7 | 8 | [dependencies] 9 | # Utils 10 | anyhow.workspace = true 11 | tracing.workspace = true 12 | tracing-subscriber.workspace = true 13 | 14 | # Async 15 | tokio.workspace = true 16 | crossbeam-channel = "0.5" 17 | 18 | # HTTP related 19 | http.workspace = true 20 | hyper = { version = "1.0.1", features = ["full"] } 21 | tokio-native-tls = "0.3.1" 22 | http-body-util = "0.1.1" 23 | 24 | # WASM Related 25 | wasmtime.workspace = true 26 | wasmtime-wasi.workspace = true 27 | wasmtime-wasi-http.workspace = true 28 | -------------------------------------------------------------------------------- /libs/lgc-runtime/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'rust' 2 | type: 'library' 3 | 4 | project: 5 | name: 'lgc-runtime' 6 | description: 'LogCraft CLI runtime library' 7 | 8 | dependsOn: 9 | - 'bindings' -------------------------------------------------------------------------------- /libs/lgc-runtime/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use std::time::Duration; 5 | 6 | mod engine; 7 | pub mod state; 8 | pub use engine::{Config, Engine}; 9 | 10 | /// The default [`EngineBuilder::epoch_tick_interval`]. 11 | pub const DEFAULT_EPOCH_TICK_INTERVAL: Duration = Duration::from_millis(10); 12 | 13 | // Plugins wit definition macro builder 14 | #[cfg(debug_assertions)] 15 | wasmtime::component::bindgen!({ 16 | path: "../bindings", 17 | world: "logcraft:lgc/plugins", 18 | async: true, 19 | ownership: Borrowing { 20 | duplicate_if_necessary: true 21 | }, 22 | tracing: true, // Enable tracing in debug mode 23 | }); 24 | 25 | #[cfg(not(debug_assertions))] 26 | wasmtime::component::bindgen!({ 27 | path: "../bindings", 28 | world: "logcraft:lgc/plugins", 29 | async: true, 30 | ownership: Borrowing { 31 | duplicate_if_necessary: true 32 | } 33 | }); 34 | 35 | /// Plugin component bindings created by the wasm-bindgen macro 36 | pub mod plugin_component { 37 | pub use crate::exports::logcraft::lgc::plugin; 38 | 39 | pub use crate::Plugins; 40 | } 41 | -------------------------------------------------------------------------------- /plugins/sample/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sample" 3 | description = "LogCraft CLI testing plugin" 4 | version = "0.1.0" 5 | edition.workspace = true 6 | repository.workspace = true 7 | rust-version.workspace = true 8 | 9 | [lib] 10 | crate-type = ["cdylib"] 11 | 12 | [dependencies] 13 | # Utils 14 | serde.workspace = true 15 | serde_json.workspace = true 16 | serde_yaml_ng = "0.10" 17 | once_cell = "1.20" 18 | 19 | # Schema related 20 | schemars.workspace = true 21 | jsonschema.workspace = true 22 | 23 | # WASM Related 24 | wit-bindgen.workspace = true 25 | -------------------------------------------------------------------------------- /plugins/sample/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'rust' 2 | type: 'library' 3 | 4 | project: 5 | name: 'sample' 6 | description: 'LogCraft CLI demonstration plugin' 7 | 8 | dependsOn: 9 | - 'bindings' 10 | 11 | tags: 12 | - 'plugin' 13 | -------------------------------------------------------------------------------- /plugins/sample/src/backend.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use serde::{Deserialize, Serialize}; 5 | 6 | const RE_TOKEN: &str = r#"^([A-Za-z0-9+/=]+|\w+\.\w+\.\w+)$"#; 7 | 8 | #[derive(Serialize, Deserialize, schemars::JsonSchema)] 9 | #[serde(default)] 10 | pub(super) struct SampleBackend { 11 | // Custom definitions 12 | /// Authorization scheme 13 | authorization_scheme: AuthorizationScheme, 14 | 15 | // Common types 16 | #[validate(length(min = 1, max = 10))] 17 | /// Backend name 18 | name: String, 19 | 20 | #[validate(url)] 21 | /// Backend URL 22 | url: String, 23 | 24 | #[validate(email)] 25 | /// Contact email 26 | email: String, 27 | 28 | #[validate(regex = "RE_TOKEN")] 29 | /// Authorization token 30 | token: String, 31 | 32 | #[validate(range(min = 0, max = 30))] 33 | /// Timeout in seconds 34 | timeout: u64, 35 | 36 | /// Custom type 37 | custom_type: CustomType, 38 | } 39 | 40 | impl Default for SampleBackend { 41 | fn default() -> Self { 42 | Self { 43 | authorization_scheme: AuthorizationScheme::Bearer, 44 | name: "dev".to_string(), 45 | url: "https://example.com".to_string(), 46 | email: "john.doe@foo.bar".to_string(), 47 | token: "someToken".to_string(), 48 | timeout: 10, 49 | custom_type: CustomType::default(), 50 | } 51 | } 52 | } 53 | 54 | #[derive(Serialize, Deserialize, schemars::JsonSchema)] 55 | struct CustomType { 56 | custom_field: String, 57 | } 58 | 59 | impl Default for CustomType { 60 | fn default() -> Self { 61 | Self { 62 | custom_field: "custom_field_value".to_string(), 63 | } 64 | } 65 | } 66 | 67 | #[derive(Default, Serialize, Deserialize, schemars::JsonSchema)] 68 | enum AuthorizationScheme { 69 | #[default] 70 | Bearer, 71 | Basic, 72 | } 73 | -------------------------------------------------------------------------------- /plugins/sample/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use bindings::{ 5 | export, 6 | exports::logcraft::lgc::plugin::{Bytes, Guest, Metadata}, 7 | }; 8 | use once_cell::sync::Lazy; 9 | 10 | mod bindings { 11 | wit_bindgen::generate!({ 12 | path: "../../libs/bindings", 13 | world: "logcraft:lgc/plugins" 14 | }); 15 | } 16 | 17 | // Local modules 18 | mod backend; 19 | mod schema; 20 | use backend::SampleBackend; 21 | use schema::SampleRule; 22 | 23 | // static BACKEND_SCHEMA: Lazy = Lazy::new(|| { 24 | // serde_json::to_value( 25 | // schemars::schema_for!(SampleBackend) 26 | // ) 27 | // .expect("Failed to generate schema") 28 | // }); 29 | 30 | static RULE_SCHEMA: Lazy = Lazy::new(|| { 31 | serde_json::to_value(schemars::schema_for!(SampleRule)).expect("Failed to generate schema") 32 | }); 33 | 34 | static SCHEMA_VALIDATOR: Lazy = Lazy::new(|| { 35 | jsonschema::validator_for(&RULE_SCHEMA).expect("Failed to create schema validator") 36 | }); 37 | 38 | impl Guest for SampleBackend { 39 | /// Retrieve plugin metadata 40 | fn load() -> Metadata { 41 | Metadata { 42 | name: env!("CARGO_PKG_NAME").to_string(), 43 | version: env!("CARGO_PKG_VERSION").to_string(), 44 | } 45 | } 46 | 47 | /// Retrieve plugin settings schema 48 | fn settings() -> Result { 49 | match serde_json::to_vec(&schemars::schema_for!(SampleBackend)) { 50 | Ok(schema) => Ok(schema), 51 | Err(e) => Err(e.to_string()), 52 | } 53 | } 54 | 55 | /// Retrieve plugin detection schema 56 | fn schema() -> Result { 57 | match serde_json::to_vec(&schemars::schema_for!(SampleRule)) { 58 | Ok(schema) => Ok(schema), 59 | Err(e) => Err(e.to_string()), 60 | } 61 | } 62 | 63 | /// Validate detection rule 64 | fn validate(detection: Bytes) -> Result<(), String> { 65 | let detection: SampleRule = serde_yaml_ng::from_slice(&detection) 66 | .map_err(|e| format!("Failed to deserialize detection content: {e}"))?; 67 | 68 | if let Err(e) = SCHEMA_VALIDATOR.validate( 69 | &serde_json::to_value(&detection) 70 | .map_err(|e| format!("Failed to serialize detection content: {e}"))?, 71 | ) { 72 | return Err(format!( 73 | "Detection content failed schema validation at {}: {}", 74 | e.instance, e.instance_path 75 | )); 76 | } 77 | 78 | Ok(()) 79 | } 80 | 81 | /// Create SavedSearch 82 | fn create(_config: Bytes, _detection: Bytes) -> Result<(), String> { 83 | unimplemented!() 84 | } 85 | 86 | /// Get SavedSearch 87 | fn read(_config: Bytes, _detection: Bytes) -> Result, String> { 88 | unimplemented!() 89 | } 90 | 91 | /// Update SavedSearch 92 | fn update(_config: Bytes, _detection: Bytes) -> Result<(), String> { 93 | unimplemented!() 94 | } 95 | 96 | /// Delete SavedSearch 97 | fn delete(_config: Bytes, _detection: Bytes) -> Result<(), String> { 98 | unimplemented!() 99 | } 100 | 101 | /// Ping service 102 | fn ping(_config: Bytes) -> Result { 103 | unimplemented!() 104 | } 105 | } 106 | 107 | export!(SampleBackend with_types_in bindings); 108 | -------------------------------------------------------------------------------- /plugins/sample/src/schema.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use serde::{de, Deserialize, Deserializer, Serialize}; 5 | use std::collections::HashMap; 6 | 7 | const RE_CRON: &str = r#"^(\*|[0-5]?[0-9]|\*\/[0-9]+)\s+(\*|1?[0-9]|2[0-3]|\*\/[0-9]+)\s+(\*|[1-2]?[0-9]|3[0-1]|\*\/[0-9]+)\s+(\*|[0-9]|1[0-2]|\*\/[0-9]+|jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)\s+(\*\/[0-9]+|\*|[0-7]|sun|mon|tue|wed|thu|fri|sat)\s*(\*\/[0-9]+|\*|[0-9]+)?"#; 8 | 9 | // Custom deserializer to handle boolean values that could be numbers 10 | fn deserialize_opt_boolean<'de, D>(deserializer: D) -> Result, D::Error> 11 | where 12 | D: Deserializer<'de>, 13 | { 14 | // First, try to deserialize an Option (the field could be missing or null). 15 | let opt_str = Option::::deserialize(deserializer)?; 16 | match opt_str { 17 | None => Ok(None), // Key wasn't present 18 | Some(s) => match s.trim() { 19 | "1" | "true" => Ok(Some(true)), 20 | "0" | "false" => Ok(Some(false)), 21 | other => Err(de::Error::custom(format!("Invalid bool '{}'", other))), 22 | }, 23 | } 24 | } 25 | 26 | #[derive(Serialize, Deserialize, schemars::JsonSchema)] 27 | pub struct SampleRule { 28 | pub title: String, 29 | pub search: String, 30 | pub parameters: Parameters, 31 | } 32 | 33 | #[derive(Serialize, Deserialize, schemars::JsonSchema)] 34 | pub struct Parameters { 35 | #[serde(flatten)] 36 | pub unknown_fields: HashMap, // Capture unknown fields here 37 | 38 | #[serde(default, deserialize_with = "deserialize_opt_boolean")] 39 | pub disabled: Option, 40 | 41 | #[serde(default)] 42 | #[schemars(regex = "RE_CRON")] 43 | pub cron_schedule: Option, 44 | 45 | #[serde(default, deserialize_with = "deserialize_opt_boolean")] 46 | pub is_visible: Option, 47 | 48 | #[serde(default)] 49 | pub description: Option, 50 | } 51 | -------------------------------------------------------------------------------- /plugins/sentinel/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sentinel" 3 | description = "LogCraft CLI Sentinel plugin" 4 | version = "0.2.0" 5 | edition.workspace = true 6 | repository.workspace = true 7 | rust-version.workspace = true 8 | 9 | [lib] 10 | crate-type = ["cdylib"] 11 | 12 | [dependencies] 13 | # Utils 14 | serde.workspace = true 15 | serde_json.workspace = true 16 | serde_with_macros.workspace = true 17 | serde_path_to_error.workspace = true 18 | once_cell.workspace = true 19 | uuid = { version = "1.14", features = ["v4", "serde"] } 20 | 21 | # Schema related 22 | schemars = { workspace = true, features = ["uuid1"] } 23 | jsonschema.workspace = true 24 | 25 | # WASM Related 26 | wit-bindgen.workspace = true 27 | 28 | # HTTP 29 | waki = { workspace = true, features = ["json"] } 30 | url.workspace = true 31 | http.workspace = true 32 | -------------------------------------------------------------------------------- /plugins/sentinel/README.md: -------------------------------------------------------------------------------- 1 | # Sentinel 2 | 3 | The documentation is available at https://docs.logcraft.io -------------------------------------------------------------------------------- /plugins/sentinel/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'rust' 2 | type: 'library' 3 | 4 | project: 5 | name: 'sentinel' 6 | description: 'LogCraft CLI Sentinel plugin' 7 | 8 | dependsOn: 9 | - 'bindings' 10 | 11 | tags: 12 | - 'plugin' 13 | -------------------------------------------------------------------------------- /plugins/sentinel/src/helpers.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | pub fn filter_response( 5 | detection: &serde_json::Value, 6 | mut response: serde_json::Value, 7 | ) -> serde_json::Value { 8 | match (detection, &mut response) { 9 | // If both detection and response are objects, iterate and filter in place. 10 | (serde_json::Value::Object(det_obj), serde_json::Value::Object(resp_obj)) => { 11 | // Collect keys from response as we may remove some. 12 | let keys: Vec = resp_obj.keys().cloned().collect(); 13 | for key in keys { 14 | if let Some(det_val) = det_obj.get(&key) { 15 | // Recursively filter the value if the key exists in detection. 16 | if let Some(entry) = resp_obj.get_mut(&key) { 17 | let filtered = filter_response(det_val, std::mem::take(entry)); 18 | *entry = filtered; 19 | } 20 | } else { 21 | // Remove keys that are not present in detection. 22 | resp_obj.remove(&key); 23 | } 24 | } 25 | response 26 | } 27 | // If both detection and response are arrays, use the first element of detection as a template. 28 | (serde_json::Value::Array(det_arr), serde_json::Value::Array(resp_arr)) => { 29 | if let Some(template) = det_arr.first() { 30 | for item in resp_arr.iter_mut() { 31 | let filtered = filter_response(template, std::mem::take(item)); 32 | *item = filtered; 33 | } 34 | } 35 | response 36 | } 37 | // For non-object types, just return the response as is. 38 | _ => response, 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /plugins/sentinel/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use bindings::{ 5 | export, 6 | exports::logcraft::lgc::plugin::{Bytes, Guest, Metadata}, 7 | }; 8 | 9 | mod bindings { 10 | wit_bindgen::generate!({ 11 | path: "../../libs/bindings", 12 | world: "logcraft:lgc/plugins" 13 | }); 14 | } 15 | 16 | // Local modules 17 | mod helpers; 18 | mod schemas; 19 | use schemas::{ 20 | rule::SentinelRule, 21 | settings::{AzureError, Sentinel}, 22 | }; 23 | 24 | impl Guest for Sentinel { 25 | /// Retrieve plugin metadata 26 | fn load() -> Metadata { 27 | Metadata { 28 | name: env!("CARGO_PKG_NAME").to_string(), 29 | version: env!("CARGO_PKG_VERSION").to_string(), 30 | } 31 | } 32 | 33 | /// Retrieve plugin settings 34 | fn settings() -> Result { 35 | let generator = schemars::gen::SchemaSettings::default() 36 | .with(|s| { 37 | s.option_add_null_type = false; 38 | }) 39 | .into_generator(); 40 | 41 | match serde_json::to_vec(&generator.into_root_schema_for::()) { 42 | Ok(schema) => Ok(schema), 43 | Err(e) => Err(e.to_string()), 44 | } 45 | } 46 | 47 | /// Retrieve plugin detection schema 48 | fn schema() -> Result { 49 | match serde_json::to_vec(&schemars::schema_for!(SentinelRule)) { 50 | Ok(schema) => Ok(schema), 51 | Err(e) => Err(e.to_string()), 52 | } 53 | } 54 | 55 | /// Validate detection rule 56 | fn validate(detection: Bytes) -> Result<(), String> { 57 | SentinelRule::deserialize(&detection)? 58 | .validate() 59 | .map(|_| ()) 60 | } 61 | 62 | /// Create SavedSearch 63 | fn create(config: Bytes, detection: Bytes) -> Result<(), String> { 64 | // Parse settings 65 | let settings = Sentinel::deserialize(&config)?; 66 | 67 | // Convert JSON to SentinelRule 68 | let rule = SentinelRule::deserialize(&detection)?; 69 | 70 | // Prepare the request 71 | let request = settings 72 | .client(waki::Method::Put, &rule.name)? 73 | .header("Content-Type", "application/json") 74 | .json(&rule); 75 | 76 | let res = request.send().map_err(|e| e.to_string())?; 77 | match res.status_code() { 78 | // 200 (Update) and 201 (Create). 79 | // Update uses this method but the only change is the response code. 80 | 200 | 201 => Ok(()), 81 | 400 => Err(AzureError::from_slices( 82 | res.body().map_err(|e| e.to_string())?, 83 | )), 84 | code => Err(http::StatusCode::from_u16(code) 85 | .map(|status| status.to_string()) 86 | .unwrap_or_else(|_| format!("HTTP/{} Invalid status code", code))), 87 | } 88 | } 89 | 90 | /// Get SavedSearch 91 | fn read(config: Bytes, detection: Bytes) -> Result, String> { 92 | // Parse settings 93 | let settings = Sentinel::deserialize(&config)?; 94 | 95 | // Convert JSON to SentinelRule 96 | let rule = SentinelRule::deserialize(&detection)?; 97 | 98 | // Validate the detection rule and retrieve the detection as serde_json::Value. 99 | let detection_value = rule.validate()?; 100 | 101 | // Prepare the request 102 | let request = settings.client(waki::Method::Get, &rule.name)?; 103 | 104 | let res = request.send().map_err(|e| e.to_string())?; 105 | match res.status_code() { 106 | // Returned string isn't used for now. 107 | 200 => { 108 | let body: serde_json::Value = res.json().map_err(|e| e.to_string())?; 109 | // Recursively filter the response using the detection as a template. 110 | let filtered = helpers::filter_response(&detection_value, body); 111 | Ok(Some( 112 | serde_json::to_vec(&filtered).map_err(|e| e.to_string())?, 113 | )) 114 | } 115 | 404 => Ok(None), 116 | 400 => Err(AzureError::from_slices( 117 | res.body().map_err(|e| e.to_string())?, 118 | )), 119 | code => Err(http::StatusCode::from_u16(code) 120 | .map(|status| status.to_string()) 121 | .unwrap_or_else(|_| format!("HTTP/{} Invalid status code", code))), 122 | } 123 | } 124 | 125 | /// Update SavedSearch 126 | fn update(config: Bytes, detection: Bytes) -> Result<(), String> { 127 | // Azure Sentinel uses the same method for creating and updating rules. 128 | Self::create(config, detection) 129 | } 130 | 131 | /// Delete SavedSearch 132 | fn delete(config: Bytes, detection: Bytes) -> Result<(), String> { 133 | // Parse settings 134 | let settings = Sentinel::deserialize(&config)?; 135 | 136 | // Convert JSON to SentinelRule 137 | let rule = SentinelRule::deserialize(&detection)?; 138 | 139 | // Prepare the request 140 | let request = settings.client(waki::Method::Delete, &rule.name)?; 141 | 142 | let res = request.send().map_err(|e| e.to_string())?; 143 | match res.status_code() { 144 | // Returned string isn't used for now. 145 | 200 | 404 => Ok(()), 146 | 400 => Err(AzureError::from_slices( 147 | res.body().map_err(|e| e.to_string())?, 148 | )), 149 | code => Err(http::StatusCode::from_u16(code) 150 | .map(|status| status.to_string()) 151 | .unwrap_or_else(|_| format!("HTTP/{} Invalid status code", code))), 152 | } 153 | } 154 | 155 | /// Ping service 156 | fn ping(config: Bytes) -> Result { 157 | // Parse settings 158 | let settings = Sentinel::deserialize(&config)?; 159 | // Check workspace connection 160 | settings.check_workspace().map(|_| true) 161 | } 162 | } 163 | 164 | export!(Sentinel with_types_in bindings); 165 | -------------------------------------------------------------------------------- /plugins/sentinel/src/schemas/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | pub(super) mod rule; 5 | pub(super) mod settings; 6 | mod types; 7 | -------------------------------------------------------------------------------- /plugins/splunk/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "splunk" 3 | description = "LogCraft CLI Splunk plugin" 4 | version = "0.2.0" 5 | edition.workspace = true 6 | repository.workspace = true 7 | rust-version.workspace = true 8 | 9 | [lib] 10 | crate-type = ["cdylib"] 11 | 12 | [dependencies] 13 | # Utils 14 | serde.workspace = true 15 | serde_json.workspace = true 16 | serde_with_macros.workspace = true 17 | serde_path_to_error.workspace = true 18 | once_cell.workspace = true 19 | 20 | # Schema related 21 | schemars.workspace = true 22 | jsonschema.workspace = true 23 | 24 | # WASM Related 25 | wit-bindgen.workspace = true 26 | 27 | # HTTP 28 | waki = { workspace = true, features = ["json"] } 29 | url.workspace = true 30 | http.workspace = true 31 | -------------------------------------------------------------------------------- /plugins/splunk/README.md: -------------------------------------------------------------------------------- 1 | # Splunk 2 | 3 | The documentation is available at https://docs.logcraft.io -------------------------------------------------------------------------------- /plugins/splunk/moon.yml: -------------------------------------------------------------------------------- 1 | language: 'rust' 2 | type: 'library' 3 | 4 | project: 5 | name: 'splunk' 6 | description: 'LogCraft CLI Splunk plugin' 7 | 8 | dependsOn: 9 | - 'bindings' 10 | 11 | tags: 12 | - 'plugin' 13 | -------------------------------------------------------------------------------- /plugins/splunk/src/schemas/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | pub(super) mod rule; 5 | pub(super) mod settings; 6 | mod types; 7 | -------------------------------------------------------------------------------- /plugins/splunk/src/schemas/settings.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use serde::{Deserialize, Serialize}; 5 | use std::{fmt::Display, str::FromStr, time::Duration}; 6 | use url::{ParseError, Url}; 7 | 8 | use crate::bindings::exports::logcraft::lgc::plugin::Bytes; 9 | 10 | const DEFAULT_USER: &str = "nobody"; 11 | const DEFAULT_APP: &str = "search"; 12 | 13 | // Regular expressions used for token validation 14 | const RE_TOKEN: &str = r#"^(?:[A-Za-z0-9+/=]+|[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+)$"#; 15 | 16 | #[derive(Serialize, Deserialize, schemars::JsonSchema)] 17 | #[serde(default)] 18 | /// Splunk backend configuration 19 | pub struct Splunk { 20 | #[validate(url)] 21 | /// Splunk URL 22 | pub url: String, 23 | 24 | /// Authorization type 25 | auth_type: AuthorizationType, 26 | 27 | /// Authorization token 28 | #[validate(regex = "RE_TOKEN")] 29 | token: String, 30 | 31 | #[validate(range(min = 1, max = 60))] 32 | /// Timeout (seconds) 33 | timeout: u64, 34 | 35 | /// Application context 36 | app: Option, 37 | 38 | /// User context 39 | user: Option, 40 | } 41 | 42 | impl Default for Splunk { 43 | fn default() -> Self { 44 | Self { 45 | url: "https://splunk-server:8089".to_string(), 46 | auth_type: AuthorizationType::Bearer, 47 | token: "myToken==".to_string(), 48 | timeout: 30, 49 | app: Some(DEFAULT_APP.to_string()), 50 | user: Some(DEFAULT_USER.to_string()), 51 | } 52 | } 53 | } 54 | 55 | #[derive(Default, Serialize, Deserialize, schemars::JsonSchema)] 56 | enum AuthorizationType { 57 | #[default] 58 | Bearer, 59 | Basic, 60 | } 61 | 62 | impl Display for AuthorizationType { 63 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 64 | match self { 65 | AuthorizationType::Bearer => write!(f, "Bearer"), 66 | AuthorizationType::Basic => write!(f, "Basic"), 67 | } 68 | } 69 | } 70 | 71 | impl Splunk { 72 | pub fn client( 73 | &self, 74 | method: waki::Method, 75 | path: &str, 76 | ) -> Result { 77 | // Prepare the URI 78 | let uri = Url::from_str(&format!( 79 | "{}/servicesNS/{}/{}/saved/searches/", 80 | &self.url, 81 | self.user.as_deref().unwrap_or(DEFAULT_USER), 82 | self.app.as_deref().unwrap_or(DEFAULT_APP) 83 | ))? 84 | .join(path)?; 85 | 86 | // Build and return the client 87 | Ok(waki::Client::new() 88 | .request(method, uri.as_str()) 89 | .connect_timeout(Duration::from_secs(self.timeout)) 90 | .header(waki::header::AUTHORIZATION, self.format_token())) 91 | } 92 | 93 | pub fn deserialize(detection: &Bytes) -> Result { 94 | let mut de = serde_json::Deserializer::from_slice(detection); 95 | 96 | serde_path_to_error::deserialize(&mut de).map_err(|e| { 97 | format!( 98 | "field: {}, error: {}", 99 | e.path(), 100 | e.inner() 101 | .to_string() 102 | .split_once(" at") 103 | .map(|(msg, _)| msg) 104 | .unwrap_or(&e.inner().to_string()) 105 | ) 106 | }) 107 | } 108 | 109 | pub fn check_app(&self) -> Result<(), String> { 110 | // Prepare the URI 111 | let uri = Url::from_str(&format!( 112 | "{}/services/apps/local/{}", 113 | &self.url, 114 | self.app.as_deref().unwrap_or(DEFAULT_APP) 115 | )) 116 | .map_err(|e| e.to_string())?; 117 | 118 | match waki::Client::new() 119 | .get(uri.as_str()) 120 | .header(waki::header::AUTHORIZATION, self.format_token()) 121 | .connect_timeout(std::time::Duration::from_secs(self.timeout)) 122 | .send() 123 | { 124 | Ok(response) => match response.status_code() { 125 | 200 => Ok(()), 126 | 404 => Err(format!( 127 | "target app '{}' not found", 128 | self.app.as_deref().unwrap_or(DEFAULT_APP) 129 | )), 130 | code => Err(format!( 131 | "unable to check target app '{}': {}", 132 | self.app.as_deref().unwrap_or(DEFAULT_APP), 133 | http::StatusCode::from_u16(code) 134 | .map(|status| status.to_string()) 135 | .unwrap_or_else(|_| format!("HTTP/{} Invalid status code", code)) 136 | )), 137 | }, 138 | Err(e) => Err(e.to_string()), 139 | } 140 | } 141 | 142 | fn format_token(&self) -> String { 143 | format!("{} {}", self.auth_type, self.token) 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /plugins/splunk/src/schemas/types.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2023 LogCraft.io. 2 | // SPDX-License-Identifier: MPL-2.0 3 | 4 | use schemars::JsonSchema; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | /// The user context under which the saved search runs. 8 | #[derive(Default, Serialize, Deserialize, JsonSchema)] 9 | #[serde(rename_all = "lowercase")] 10 | pub enum DispatchAs { 11 | #[default] 12 | Owner, 13 | User, 14 | } 15 | 16 | /// Saved search scheduling priority. 17 | #[derive(Default, Serialize, Deserialize, JsonSchema)] 18 | #[serde(rename_all = "lowercase")] 19 | pub enum SchedulePriority { 20 | #[default] 21 | Default, 22 | Higher, 23 | Highest, 24 | } 25 | 26 | /// Saved search schedule window. 27 | #[derive(Serialize, Deserialize, JsonSchema)] 28 | #[serde(rename_all = "lowercase")] 29 | pub enum ScheduleWindow { 30 | Auto, 31 | // A fixed schedule window specified in minutes. 32 | Minutes(u32), 33 | } 34 | 35 | impl Default for ScheduleWindow { 36 | fn default() -> Self { 37 | ScheduleWindow::Minutes(0) 38 | } 39 | } 40 | 41 | /// Specifies whether to use parallel reduce processing. 42 | #[derive(Default, Serialize, Deserialize, JsonSchema)] 43 | #[serde(rename_all = "lowercase")] 44 | pub enum ScheduleAs { 45 | #[default] 46 | Auto, 47 | Classic, 48 | Prjob, 49 | } 50 | 51 | /// Alerting count type. 52 | #[derive(Serialize, Deserialize, JsonSchema)] 53 | pub enum CountType { 54 | #[serde(rename = "number of events")] 55 | NumberOfEvents, 56 | 57 | #[serde(rename = "number of hosts")] 58 | NumberOfHosts, 59 | 60 | #[serde(rename = "number of sources")] 61 | NumberOfSources, 62 | 63 | #[serde(rename = "custom")] 64 | Custom, 65 | 66 | #[serde(rename = "always")] 67 | Always, 68 | } 69 | 70 | /// Alerting relation. 71 | #[derive(Serialize, Deserialize, JsonSchema)] 72 | pub enum Relation { 73 | #[serde(rename = "greater than")] 74 | GreaterThan, 75 | 76 | #[serde(rename = "less than")] 77 | LessThan, 78 | 79 | #[serde(rename = "equal to")] 80 | EqualTo, 81 | 82 | #[serde(rename = "not equal to")] 83 | NotEqualTo, 84 | 85 | #[serde(rename = "drops by")] 86 | DropsBy, 87 | 88 | #[serde(rename = "rises by")] 89 | RisesBy, 90 | } 91 | 92 | /// Summary Index types 93 | #[derive(Default, Serialize, Deserialize, JsonSchema)] 94 | #[serde(rename_all = "lowercase")] 95 | pub enum SummaryIndex { 96 | #[default] 97 | Event, 98 | Metric, 99 | } 100 | 101 | /// Durable track time types 102 | #[derive(Default, Serialize, Deserialize, JsonSchema)] 103 | #[serde(rename_all = "lowercase")] 104 | pub enum DurableTrackTime { 105 | #[default] 106 | None, 107 | _Time, 108 | _IndexTime, 109 | } 110 | 111 | /// Backfill types 112 | #[derive(Default, Serialize, Deserialize, JsonSchema)] 113 | #[serde(rename_all = "snake_case")] 114 | pub enum Backfill { 115 | #[default] 116 | Auto, 117 | TimeInterval, 118 | TimeWhole, 119 | } 120 | 121 | /// Alert track types 122 | #[derive(Default, Serialize, Deserialize, JsonSchema)] 123 | #[serde(rename_all = "lowercase")] 124 | pub enum AlertTrack { 125 | #[default] 126 | Auto, 127 | True, 128 | False, 129 | } 130 | -------------------------------------------------------------------------------- /scripts/package-plugins.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | ############################################################# 5 | # This script creates a tarball (`plugins.tar.gz`) # 6 | # containing all plugins and their checksums. # 7 | # The resulting files are moved to a `releases/` directory. # 8 | ############################################################# 9 | 10 | # Create a `releases` directory 11 | mkdir -p releases 12 | 13 | # Create a tarball with the plugins, placing them under a `plugins/` folder in the tarball 14 | find target/wasm32-wasip2/release/ \ 15 | -type f \( -name "*.wasm" \) -print0 \ 16 | | tar --null --transform 's|.*/|plugins/|' -czvf plugins.tar.gz --files-from - 17 | 18 | # Move the tarball to the releases directory and generate its checksum 19 | sha256sum plugins.tar.gz > releases/plugins.tar.gz.sha256 20 | mv plugins.tar.gz releases/ 21 | 22 | echo "Tarball created successfully." -------------------------------------------------------------------------------- /scripts/package.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | ######################################################################################### 5 | # This script creates two tarballs for Linux and macOS: # 6 | # 1. `lgc-minimal-{os}-{arch}.tar.gz`: Contains `target/release/lgc` and `README.md`. # 7 | # 2. `lgc-{os}-{arch}.tar.gz`: Extends the above by including `.wasm` files. # 8 | ######################################################################################### 9 | 10 | # Ensure RUNNER_OS and RUNNER_ARCH are set 11 | if [ -z "$RUNNER_OS" ]; then 12 | echo "Error: RUNNER_OS is not set. Exiting." 13 | exit 1 14 | fi 15 | if [ -z "$RUNNER_ARCH" ]; then 16 | echo "Error: RUNNER_ARCH is not set. Exiting." 17 | exit 1 18 | fi 19 | 20 | # Convert RUNNER_OS to lowercase 21 | os=$(echo "$RUNNER_OS" | tr '[:upper:]' '[:lower:]') 22 | arch=$(echo "$RUNNER_ARCH" | tr '[:upper:]' '[:lower:]') 23 | 24 | # Define package names 25 | minimal_package="lgc-minimal-${os}-${arch}.tar.gz" 26 | full_package="lgc-${os}-${arch}.tar.gz" 27 | 28 | # Create a `releases` directory 29 | mkdir -p releases 30 | 31 | # Create tarball with CLI and Readme 32 | if [ "$os" == "linux" ]; then 33 | tar --null --transform 's|target/release/||' -czvf "$minimal_package" target/release/lgc README.md 34 | else 35 | # For macOS, manually adjust file paths 36 | tar -czvf "$minimal_package" -C target/release lgc -C ../../ README.md 37 | fi 38 | 39 | # Check for .wasm files and create tarball with CLI, plugins, and Readme 40 | wasm_files=$(find target/wasm32-wasip2/release/ -type f -name "*.wasm") 41 | if [ -n "$wasm_files" ]; then 42 | if [ "$os" == "linux" ]; then 43 | find target/wasm32-wasip2/release/ \ 44 | -type f \( -name "*.wasm" \) -print0 \ 45 | | tar --null --transform 's|target/release/||' --transform 's|.*/|plugins/|' \ 46 | -czvf "$full_package" target/release/lgc README.md --files-from - 47 | else 48 | # Move wasm plugins in the correct directory 49 | mkdir -p temp/plugins 50 | cp target/wasm32-wasip2/release/*.wasm temp/plugins/ 51 | cp target/release/lgc README.md temp/ 52 | # For macOS, manually adjust file paths 53 | tar -czvf "$full_package" -C temp .; 54 | rm -rf temp 55 | fi 56 | else 57 | echo "No .wasm files found. Skipping creation of $full_package." 58 | fi 59 | 60 | # Move the tarballs to the releases directory and generate checksums 61 | sha256sum "$full_package" > "releases/$full_package.sha256" 62 | mv "$full_package" releases/ 63 | sha256sum "$minimal_package" > "releases/$minimal_package.sha256" 64 | mv "$minimal_package" releases/ 65 | 66 | echo "Tarballs created successfully." --------------------------------------------------------------------------------