├── .github └── workflows │ ├── generate_builds.yml │ └── lint_python.yml ├── .gitignore ├── Dockerfile ├── Makefile ├── README.md ├── Taskfile.yml ├── bin ├── create_universal_bin_macos.sh ├── evtx_dump_lin ├── evtx_dump_lin_arm ├── evtx_dump_mac ├── evtx_dump_win.exe └── package_evtx_binaries.sh ├── config └── fieldMappings.json ├── docs ├── .nojekyll ├── Advanced.md ├── Internals.md ├── README.md ├── Usage.md ├── Zircolite_manual.pdf ├── _sidebar.md ├── index.html └── pics │ ├── Zircolite.png │ └── gui.jpg ├── gui └── zircogui.zip ├── pics ├── Zircolite.gif ├── Zircolite.png ├── Zircolite.svg ├── Zircolite_v2.9.gif ├── gui-matrix.webp ├── gui-timeline.webp ├── gui.jpg ├── gui.webp ├── zircolite_200.png ├── zircolite_400.png ├── zircolite_600.png └── zircolite_800.png ├── requirements.full.txt ├── requirements.txt ├── rules ├── README.md ├── rules_linux.json ├── rules_windows_generic.json ├── rules_windows_generic_full.json ├── rules_windows_generic_high.json ├── rules_windows_generic_medium.json ├── rules_windows_generic_pysigma.json ├── rules_windows_sysmon.json ├── rules_windows_sysmon_full.json ├── rules_windows_sysmon_high.json ├── rules_windows_sysmon_medium.json └── rules_windows_sysmon_pysigma.json ├── templates ├── exportForELK.tmpl ├── exportForSplunk.tmpl ├── exportForSplunkWithRuleID.tmpl ├── exportForTimesketch.tmpl ├── exportForZinc.tmpl └── exportForZircoGui.tmpl └── zircolite.py /.github/workflows/generate_builds.yml: -------------------------------------------------------------------------------- 1 | name: Generate build 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | build-and-upload: 8 | name: Build and upload 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | matrix: 12 | include: 13 | - build: linux 14 | os: ubuntu-latest 15 | 16 | - build: windows-gnu 17 | os: windows-latest 18 | 19 | steps: 20 | - name: Clone repository 21 | uses: actions/checkout@v4 22 | - name: Install python 3.9 23 | uses: actions/setup-python@v4 24 | with: 25 | python-version: '3.9' 26 | - name: Install dependencies 27 | run: | 28 | pip install -r requirements.txt 29 | pip install nuitka zstandard orderedset 30 | 31 | - name: Build with Nuitka 32 | uses: Nuitka/Nuitka-Action@main 33 | with: 34 | nuitka-version: main 35 | script-name: zircolite.py 36 | onefile: true 37 | 38 | -------------------------------------------------------------------------------- /.github/workflows/lint_python.yml: -------------------------------------------------------------------------------- 1 | name: lint_python 2 | on: [pull_request, push] 3 | jobs: 4 | lint_python: 5 | runs-on: ubuntu-latest 6 | steps: 7 | - uses: actions/checkout@v2 8 | - uses: actions/setup-python@v2 9 | - run: pip install bandit black codespell flake8 isort mypy pytest pyupgrade 10 | - run: bandit --recursive --skip B101 . || true # B101 is assert statements 11 | - run: black --check . || true 12 | - run: codespell || true # --ignore-words-list="" --skip=" 13 | - run: flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 14 | - run: flake8 . --count --exit-zero --max-complexity=10 --max-line-length=88 --show-source --statistics 15 | - run: isort --check-only --profile black . || true 16 | - run: pip install -r requirements.txt || true 17 | - run: mypy --install-types --non-interactive . || true 18 | - run: pytest . || true 19 | - run: pytest --doctest-modules . || true 20 | - run: shopt -s globstar && pyupgrade --py36-plus **/*.py || true 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # General 2 | .DS_Store 3 | .AppleDouble 4 | .LSOverride 5 | 6 | # Icon must end with two \r 7 | Icon 8 | 9 | 10 | # Thumbnails 11 | ._* 12 | 13 | # Files that might appear in the root of a volume 14 | .DocumentRevisions-V100 15 | .fseventsd 16 | .Spotlight-V100 17 | .TemporaryItems 18 | .Trashes 19 | .VolumeIcon.icns 20 | .com.apple.timemachine.donotpresent 21 | 22 | # Directories potentially created on remote AFP share 23 | .AppleDB 24 | .AppleDesktop 25 | Network Trash Folder 26 | Temporary Items 27 | .apdisk 28 | 29 | TODO 30 | detected_events.json 31 | tmp 32 | *.log 33 | *.evtx 34 | *.evtx_data 35 | *.tar 36 | *.tar.sha256 37 | .pdm-python 38 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION="3.13-slim" 2 | 3 | FROM python:${PYTHON_VERSION} 4 | 5 | ARG ZIRCOLITE_INSTALL_PREFIX="/opt" 6 | ARG ZIRCOLITE_REQUIREMENTS_FILE="requirements.full.txt" 7 | 8 | WORKDIR ${ZIRCOLITE_INSTALL_PREFIX}/zircolite 9 | 10 | # Copy requirements first to leverage Docker cache 11 | COPY ${ZIRCOLITE_REQUIREMENTS_FILE} . 12 | RUN pip install --no-cache-dir -r ${ZIRCOLITE_REQUIREMENTS_FILE} 13 | 14 | # Install git only when needed for rule updates 15 | RUN apt-get update && \ 16 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends git && \ 17 | apt-get clean && \ 18 | rm -rf /var/lib/apt/lists/* 19 | 20 | # Copy files in order of change frequency (least to most) 21 | COPY README.md . 22 | COPY docs/ docs/ 23 | COPY pics/ pics/ 24 | COPY templates/ templates/ 25 | COPY config/ config/ 26 | COPY bin/ bin/ 27 | COPY gui/ gui/ 28 | COPY rules/ rules/ 29 | COPY zircolite.py . 30 | 31 | LABEL author="wagga40" \ 32 | description="A standalone SIGMA-based detection tool for EVTX, Auditd and Sysmon for Linux logs." \ 33 | maintainer="wagga40" 34 | 35 | RUN chmod 0755 zircolite.py && \ 36 | python3 zircolite.py -U 37 | 38 | ENTRYPOINT ["python3", "zircolite.py"] 39 | CMD ["--help"] 40 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | #!make 2 | 3 | DOCKER?=docker 4 | DOCKER_BUILD_FLAGS?= 5 | DOCKER_REGISTRY?=docker.io 6 | DOCKER_TAG?=2.40.0 7 | GIT?=git 8 | PY3?=python3 9 | DATE=$(shell date +%s) 10 | TMP_GIT=tmp-$(shell date +%s) 11 | 12 | define HELP_MENU 13 | Usage: make [] [ ...] 14 | 15 | Main targets: 16 | all (default) call the default target(s) 17 | build build the Docker image 18 | clean remove all default artifacts 19 | help show this help 20 | save save the Docker image to an archive 21 | rulesets update default rulesets (files will be created in current directory) 22 | 23 | Refer to the documentation for use cases and examples. 24 | endef 25 | 26 | .PHONY: all build clean help save 27 | 28 | all: clean 29 | 30 | build: 31 | ifndef DOCKER 32 | $(error Docker (https://docs.docker.com/install/) is required. Please install it first) 33 | endif 34 | $(DOCKER) image build \ 35 | --rm \ 36 | --tag $(DOCKER_REGISTRY)/wagga40/zircolite:$(DOCKER_TAG) \ 37 | $(DOCKER_BUILD_FLAGS) \ 38 | . 39 | 40 | help: 41 | $(info $(HELP_MENU)) 42 | 43 | clean: 44 | rm -rf "detected_events.json" 45 | rm -rf ./tmp-* 46 | rm -f zircolite.log 47 | rm -f fields.json 48 | rm -f zircolite.tar 49 | 50 | save: 51 | ifndef DOCKER 52 | $(error Docker (https://docs.docker.com/install/) is required. Please install it first) 53 | endif 54 | $(DOCKER) image save \ 55 | --output zircolite.tar \ 56 | $(DOCKER_REGISTRY)/wagga40/zircolite:$(DOCKER_TAG) 57 | 58 | rulesets: 59 | $(info Please check docs to generate rulesets) 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | #

![](pics/zircolite_400.png)

2 | 3 | ## Standalone SIGMA-based detection tool for EVTX, Auditd, Sysmon for linux, XML, CSV or JSONL/NDJSON Logs 4 | ![](pics/Zircolite_v2.9.gif) 5 | 6 | [![python](https://img.shields.io/badge/python-3.10-blue)](https://www.python.org/) 7 | ![version](https://img.shields.io/badge/Architecture-64bit-red) 8 | 9 | **Zircolite** is a standalone tool written in Python 3 that allows you to use SIGMA rules on: 10 | 11 | - MS Windows EVTX (EVTX, XML, and JSONL formats) 12 | - Auditd logs 13 | - Sysmon for Linux 14 | - EVTXtract 15 | - CSV and XML logs 16 | - JSON Array logs 17 | 18 | ### Key Features 19 | 20 | - **Fast Processing**: Zircolite is relatively fast and can parse large datasets in just seconds. 21 | - **SIGMA Backend**: It is based on a SIGMA backend (SQLite) and does not use internal SIGMA-to-something conversion. 22 | - **Advanced Log Manipulation**: It can manipulate input logs by splitting fields and applying transformations, allowing for more flexible and powerful log analysis. 23 | - **Flexible Export**: Zircolite can export results to multiple formats using Jinja [templates](templates), including JSON, CSV, JSONL, Splunk, Elastic, Zinc, Timesketch, and more. 24 | - **Multiple Input Formats**: Support for various log formats including EVTX, JSON Lines, JSON Arrays, CSV, XML, and more. 25 | 26 | **You can use Zircolite directly in Python or use the binaries provided in the [releases](https://github.com/wagga40/Zircolite/releases).** 27 | 28 | **Documentation is available [here](https://wagga40.github.io/Zircolite/) (dedicated site) or [here](docs) (repo directory).** 29 | 30 | ## Requirements / Installation 31 | 32 | The project has only been tested with Python 3.10. If you only want to use base functionnalities of Zircolite, you can install dependencies with : `pip3 install -r requirements.txt`. But `pip3 install -r requirements.full.txt` is strongly recommended. 33 | 34 | The use of [evtx_dump](https://github.com/omerbenamram/evtx) is **optional but required by default (because it is -for now- much faster)**, If you do not want to use it you have to use the `--noexternal` option. The tool is provided if you clone the Zircolite repository (the official repository is [here](https://github.com/omerbenamram/evtx)). 35 | 36 | :warning: On some systems (Mac, Arm, ...) the `evtx` Python library may need Rust and Cargo to be installed. 37 | 38 | ## Quick start 39 | 40 | Check tutorials made by other (EN, SP and FR) [here](#tutorials). 41 | 42 | ### EVTX files : 43 | 44 | Help is available with: 45 | 46 | ```shell 47 | python3 zircolite.py -h 48 | ``` 49 | 50 | If your EVTX files have the extension ".evtx" : 51 | 52 | ```shell 53 | # python3 zircolite.py --evtx --ruleset [--ruleset ] 54 | python3 zircolite.py --evtx sysmon.evtx --ruleset rules/rules_windows_sysmon_pysigma.json 55 | ``` 56 | 57 | ### Other Log Formats: 58 | 59 | ```shell 60 | # For Auditd logs 61 | python3 zircolite.py --events auditd.log --ruleset rules/rules_linux.json --auditd 62 | 63 | # For Sysmon for Linux logs 64 | python3 zircolite.py --events sysmon.log --ruleset rules/rules_linux.json --sysmon4linux 65 | 66 | # For JSONL or NDJSON logs 67 | python3 zircolite.py --events --ruleset rules/rules_windows_sysmon_pysigma.json --jsononly 68 | 69 | # For JSON Array logs 70 | python3 zircolite.py --events --ruleset rules/rules_windows_sysmon_pysigma.json --json-array 71 | 72 | # For CSV logs 73 | python3 zircolite.py --events --ruleset rules/rules_windows_sysmon_pysigma.json --csv-input 74 | 75 | # For XML logs 76 | python3 zircolite.py --events --ruleset rules/rules_windows_sysmon_pysigma.json --xml-input 77 | ``` 78 | 79 | - The `--events` argument can be a file or a folder. If it is a folder, all EVTX files in the current folder and subfolders will be selected. 80 | - The SYSMON ruleset used is a default one, intended for analyzing logs from endpoints with SYSMON installed. 81 | 82 | > [!TIP] 83 | > If you want to try the tool you can test with [EVTX-ATTACK-SAMPLES](https://github.com/sbousseaden/EVTX-ATTACK-SAMPLES) (EVTX Files). 84 | 85 | ### Running with Docker 86 | 87 | ```bash 88 | # Pull docker image 89 | docker pull wagga40/zircolite:latest 90 | # If your logs and rules are in a specific directory 91 | docker run --rm --tty \ 92 | -v $PWD:/case/input:ro \ 93 | -v $PWD:/case/output \ 94 | wagga40/zircolite:latest \ 95 | -e /case/input \ 96 | -o /case/output/detected_events.json \ 97 | -r /case/input/a_sigma_rule.yml 98 | ``` 99 | 100 | - Replace `$PWD` with the directory (absolute path only) where your logs and rules/rulesets are stored. 101 | 102 | ### Updating default rulesets 103 | 104 | ```shell 105 | python3 zircolite.py -U 106 | ``` 107 | 108 | > [!IMPORTANT] 109 | > Please note these rulesets are provided to use Zircolite out-of-the-box, but [you should generate your own rulesets](#why-you-should-build-your-own-rulesets) as they can be very noisy or slow. These auto-updated rulesets are available in the dedicated repository: [Zircolite-Rules](https://github.com/wagga40/Zircolite-Rules). 110 | 111 | ## Docs 112 | 113 | Complete documentation is available [here](docs). 114 | 115 | ## Mini-Gui 116 | 117 | The Mini-GUI can be used totally offline. It allows you to display and search results. You can automatically generate a Mini-GUI "package" with the `--package` option. To learn how to use the Mini-GUI, check the docs [here](docs/Advanced.md#mini-gui). 118 | 119 | ### Detected events by Mitre Att&ck (c) techniques and criticity levels 120 | 121 | ![](pics/gui.webp) 122 | 123 | ### Detected events Timeline 124 | 125 | ![](pics/gui-timeline.webp) 126 | 127 | ### Detected events by Mitre Att&ck (c) techniques displayed on the Matrix 128 | 129 | ![](pics/gui-matrix.webp) 130 | 131 | ## Tutorials, references and related projects 132 | 133 | ### Tutorials 134 | 135 | - **English**: [Russ McRee](https://holisticinfosec.io) has published a detailed [tutorial](https://holisticinfosec.io/post/2021-09-28-zircolite/) on SIGMA and Zircolite on his blog. 136 | 137 | - **Spanish**: **César Marín** has published a tutorial in Spanish [here](https://derechodelared.com/zircolite-ejecucion-de-reglas-sigma-en-ficheros-evtx/). 138 | 139 | - **French**: [IT-connect.fr](https://www.it-connect.fr/) has published [an extensive tutorial](https://www.it-connect.fr/) on Zircolite in French. 140 | 141 | - **French**: [IT-connect.fr](https://www.it-connect.fr/) has also published a [Hack the Box challenge Write-Up](https://www.it-connect.fr/hack-the-box-sherlocks-tracer-solution/) using Zircolite. 142 | 143 | ### References 144 | 145 | - [Florian Roth](https://github.com/Neo23x0/) cited Zircolite in his [**SIGMA Hall of Fame**](https://github.com/Neo23x0/Talks/blob/master/Sigma_Hall_of_Fame_20211022.pdf) during his talk at the October 2021 EU ATT&CK Workshop. 146 | - Zircolite has been cited and presented during [JSAC 2023](https://jsac.jpcert.or.jp/archive/2023/pdf/JSAC2023_workshop_sigma_jp.pdf). 147 | - Zircolite has been cited and used in multiple research papers: 148 | - **CIDRE Team**: 149 | - [PWNJUTSU - Website](https://pwnjutsu.irisa.fr) 150 | - [PWNJUTSU - Academic Paper](https://hal.inria.fr/hal-03694719/document) 151 | - [CERBERE: Cybersecurity Exercise for Red and Blue Team Entertainment, Reproducibility](https://centralesupelec.hal.science/hal-04285565/file/CERBERE_final.pdf) 152 | - **Universidad de la República**: 153 | - [A Process Mining-Based Method for Attacker Profiling Using the MITRE ATT&CK Taxonomy](https://journals-sol.sbc.org.br/index.php/jisa/article/view/3902/2840) 154 | 155 | --- 156 | 157 | ## License 158 | 159 | - All the **code** of the project is licensed under the [GNU Lesser General Public License](https://www.gnu.org/licenses/lgpl-3.0.en.html). 160 | - `evtx_dump` is under the MIT license. 161 | - The rules are released under the [Detection Rule License (DRL) 1.0](https://github.com/SigmaHQ/Detection-Rule-License/blob/main/LICENSE.Detection.Rules.md). 162 | 163 | --- 164 | -------------------------------------------------------------------------------- /Taskfile.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | vars: 4 | DOCKER: docker 5 | DOCKER_BUILD_FLAGS: '' 6 | DOCKER_REGISTRY: docker.io 7 | DOCKER_REPO: wagga40/zircolite 8 | RULES_URL: https://github.com/wagga40/Zircolite-Rules/archive/refs/heads/main.tar.gz 9 | PLATFORMS: linux/amd64,linux/arm64 10 | 11 | tasks: 12 | default: 13 | deps: [clean] 14 | 15 | get-version: 16 | desc: Get the version from zircolite.py 17 | cmds: 18 | - 'echo "Version: {{.VERSION}}"' 19 | vars: 20 | VERSION: 21 | sh: | 22 | cat zircolite.py | grep "version = \"" | cut -d'"' -f2 23 | 24 | docker-build: 25 | desc: Build the Docker image 26 | preconditions: 27 | - sh: command -v {{.DOCKER}} 28 | msg: "Docker (https://docs.docker.com/install/) is required. Please install it first" 29 | cmds: 30 | - '{{.DOCKER}} image build --rm --tag {{.DOCKER_REPO}}:dev {{.DOCKER_BUILD_FLAGS}} .' 31 | 32 | docker-build-multi-arch: 33 | desc: Build the Docker image for multiple architectures 34 | cmds: 35 | - '{{.DOCKER}} image build --rm --tag {{.DOCKER_REPO}}:{{.VERSION}} --platform {{.PLATFORMS}} {{.DOCKER_BUILD_FLAGS}} .' 36 | - '{{.DOCKER}} image build --rm --tag {{.DOCKER_REPO}}:latest --platform {{.PLATFORMS}} {{.DOCKER_BUILD_FLAGS}} .' 37 | vars: 38 | VERSION: 39 | sh: | 40 | cat zircolite.py | grep "version = \"" | cut -d'"' -f2 41 | 42 | docker-push: 43 | desc: Push the Docker image to docker hub 44 | deps: [docker-build-multi-arch] 45 | cmds: 46 | - '{{.DOCKER}} image push {{.DOCKER_REGISTRY}}/{{.DOCKER_REPO}}:{{.VERSION}}' 47 | - '{{.DOCKER}} image push {{.DOCKER_REGISTRY}}/{{.DOCKER_REPO}}:latest' 48 | vars: 49 | VERSION: 50 | sh: | 51 | cat zircolite.py | grep "version = \"" | cut -d'"' -f2 52 | 53 | clean: 54 | desc: Remove all default artifacts 55 | cmds: 56 | - rm -rf detected_events.json 57 | - rm -rf ./tmp-* 58 | - rm -f zircolite.log 59 | - rm -f fields.json 60 | - rm -f zircolite.tar 61 | 62 | save: 63 | desc: Save the Docker image to an archive 64 | cmds: 65 | - | 66 | if ! command -v docker &> /dev/null; then 67 | echo "Docker (https://docs.docker.com/install/) is required. Please install it first" 68 | exit 1 69 | fi 70 | - '{{.DOCKER}} image save --output zircolite.tar {{.DOCKER_REGISTRY}}/{{.DOCKER_REPO}}:{{.DOCKER_TAG}}' 71 | 72 | update-rules: 73 | desc: Update default rulesets using Zircolite-Rules repository 74 | preconditions: 75 | - sh: command -v curl 76 | msg: "curl is required. Please install it first" 77 | prompt: "This task will overwrite the existing rulesets. Are you sure you want to continue?" 78 | cmds: 79 | - curl -sSL {{.RULES_URL}} | tar -xzf - 80 | - defer: rm -rf Zircolite-Rules-main 81 | - mv Zircolite-Rules-main/rules_*.json ./rules/ 82 | 83 | -------------------------------------------------------------------------------- /bin/create_universal_bin_macos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # =================================================== 4 | # Universal Binary Creator for macOS 5 | # Creates a universal binary from x86_64 and arm64 binaries 6 | # =================================================== 7 | 8 | # Function for formatted message display 9 | print_msg() { 10 | local type=$1 11 | local message=$2 12 | 13 | case "$type" in 14 | "ERROR") echo -e "\033[1;31mERROR:\033[0m $message" ;; 15 | "INFO") echo -e "\033[1;32mINFO:\033[0m $message" ;; 16 | "WARNING") echo -e "\033[1;33mWARNING:\033[0m $message" ;; 17 | "USAGE") echo -e "\033[1;34mUSAGE:\033[0m $message" ;; 18 | "OPTIONS") echo -e "\033[1;34mOPTIONS:\033[0m $message" ;; 19 | "NOTE") echo -e "\033[1;34mNOTE:\033[0m $message" ;; 20 | "PROCESS") echo -e "\033[1;34mPROCESS:\033[0m $message" ;; 21 | "SUCCESS") echo -e "\033[1;32mSUCCESS:\033[0m $message" ;; 22 | "DRY_RUN") echo -e "\033[1;33m[DRY RUN]\033[0m $message" ;; 23 | *) echo "$message" ;; 24 | esac 25 | } 26 | 27 | # Display title 28 | echo "=====================================================" 29 | echo " Universal Binary Creator for macOS " 30 | echo "=====================================================" 31 | 32 | # Check for required tools 33 | if ! command -v lipo &> /dev/null; then 34 | print_msg "ERROR" "'lipo' command not found. Please install Xcode command line tools." 35 | exit 1 36 | fi 37 | 38 | # Initialize dry-run flag 39 | DRY_RUN=false 40 | 41 | # Parse options 42 | while getopts "d" opt; do 43 | case $opt in 44 | d) DRY_RUN=true ;; 45 | *) ;; 46 | esac 47 | done 48 | shift $((OPTIND-1)) 49 | 50 | # Validate command line arguments 51 | if [ "$#" -lt 2 ] || [ "$#" -gt 3 ]; then 52 | print_msg "USAGE" "$0 [-d] []" 53 | print_msg "OPTIONS" "" 54 | echo " -d Dry run mode (show commands without executing them)" 55 | print_msg "NOTE" "The script will automatically detect the architecture of each binary." 56 | echo " If no output binary name is provided, a random name will be generated." 57 | exit 1 58 | fi 59 | 60 | # Store input and output paths 61 | BINARY1="$1" 62 | BINARY2="$2" 63 | OUTPUT_BINARY="$3" 64 | 65 | # Generate random output name if not provided 66 | if [ -z "$OUTPUT_BINARY" ]; then 67 | RANDOM_SUFFIX=$(cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 8 | head -n 1) 68 | OUTPUT_BINARY="universal_binary_${RANDOM_SUFFIX}" 69 | print_msg "WARNING" "No output name provided. Using random name: $OUTPUT_BINARY" 70 | fi 71 | 72 | # Validate input files exist 73 | for BINARY in "$BINARY1" "$BINARY2"; do 74 | if [ ! -f "$BINARY" ]; then 75 | print_msg "ERROR" "Binary '$BINARY' not found!" 76 | exit 1 77 | fi 78 | done 79 | 80 | # Detect architectures of input binaries 81 | ARCH1=$(lipo -archs "$BINARY1") 82 | ARCH2=$(lipo -archs "$BINARY2") 83 | 84 | # Check if we have both required architectures 85 | if [[ ! "$ARCH1 $ARCH2" =~ "x86_64" ]] || [[ ! "$ARCH1 $ARCH2" =~ "arm64" ]]; then 86 | print_msg "ERROR" "Missing required architecture!" 87 | echo "Binary 1 ($BINARY1): $ARCH1" 88 | echo "Binary 2 ($BINARY2): $ARCH2" 89 | echo "Both x86_64 and arm64 architectures are required." 90 | exit 1 91 | fi 92 | 93 | # Identify which binary is which architecture 94 | X86_BINARY="" 95 | ARM_BINARY="" 96 | 97 | if [[ "$ARCH1" =~ "x86_64" ]]; then X86_BINARY="$BINARY1"; fi 98 | if [[ "$ARCH1" =~ "arm64" ]]; then ARM_BINARY="$BINARY1"; fi 99 | if [[ "$ARCH2" =~ "x86_64" ]]; then X86_BINARY="$BINARY2"; fi 100 | if [[ "$ARCH2" =~ "arm64" ]]; then ARM_BINARY="$BINARY2"; fi 101 | 102 | print_msg "INFO" "Detected x86_64 binary: $X86_BINARY" 103 | print_msg "INFO" "Detected arm64 binary: $ARM_BINARY" 104 | 105 | # Ensure output directory exists 106 | OUTPUT_DIR=$(dirname "$OUTPUT_BINARY") 107 | if [ ! -d "$OUTPUT_DIR" ] && [ "$OUTPUT_DIR" != "." ]; then 108 | if [ "$DRY_RUN" = true ]; then 109 | print_msg "DRY_RUN" "Would create directory: $OUTPUT_DIR" 110 | else 111 | mkdir -p "$OUTPUT_DIR" 112 | fi 113 | fi 114 | 115 | # Create the universal binary 116 | print_msg "PROCESS" "Creating universal binary..." 117 | if [ "$DRY_RUN" = true ]; then 118 | print_msg "DRY_RUN" "Would execute: lipo -create -output \"$OUTPUT_BINARY\" \"$X86_BINARY\" \"$ARM_BINARY\"" 119 | print_msg "DRY_RUN" "Would execute: chmod +x \"$OUTPUT_BINARY\"" 120 | print_msg "DRY_RUN" "Universal binary would be created: $OUTPUT_BINARY" 121 | else 122 | if lipo -create -output "$OUTPUT_BINARY" "$X86_BINARY" "$ARM_BINARY"; then 123 | chmod +x "$OUTPUT_BINARY" 124 | print_msg "SUCCESS" "Universal binary created successfully: $OUTPUT_BINARY" 125 | lipo -info "$OUTPUT_BINARY" 126 | else 127 | print_msg "ERROR" "Failed to create universal binary." 128 | exit 1 129 | fi 130 | fi -------------------------------------------------------------------------------- /bin/evtx_dump_lin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/bin/evtx_dump_lin -------------------------------------------------------------------------------- /bin/evtx_dump_lin_arm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/bin/evtx_dump_lin_arm -------------------------------------------------------------------------------- /bin/evtx_dump_mac: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/bin/evtx_dump_mac -------------------------------------------------------------------------------- /bin/evtx_dump_win.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/bin/evtx_dump_win.exe -------------------------------------------------------------------------------- /bin/package_evtx_binaries.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # Function for formatted message display 6 | print_msg() { 7 | local type=$1 8 | local message=$2 9 | 10 | case "$type" in 11 | "ERROR") echo -e "\033[1;31mERROR:\033[0m $message" ;; 12 | "INFO") echo -e "\033[1;32mINFO:\033[0m $message" ;; 13 | "WARNING") echo -e "\033[1;33mWARNING:\033[0m $message" ;; 14 | "USAGE") echo -e "\033[1;34mUSAGE:\033[0m $message" ;; 15 | "OPTIONS") echo -e "\033[1;34mOPTIONS:\033[0m $message" ;; 16 | "NOTE") echo -e "\033[1;34mNOTE:\033[0m $message" ;; 17 | "PROCESS") echo -e "\033[1;34mPROCESS:\033[0m $message" ;; 18 | "SUCCESS") echo -e "\033[1;32mSUCCESS:\033[0m $message" ;; 19 | "DRY_RUN") echo -e "\033[1;33m[DRY RUN]\033[0m $message" ;; 20 | *) echo "$message" ;; 21 | esac 22 | } 23 | 24 | # Function to determine latest release 25 | get_latest_release() { 26 | curl --silent "https://api.github.com/repos/omerbenamram/evtx/releases/latest" | 27 | grep '"tag_name":' | 28 | sed -E 's/.*"([^"]+)".*/\1/' 29 | } 30 | 31 | # Final binary names 32 | MACOS_FINAL_BINARY_NAME="evtx_dump_mac" 33 | LINUX_FINAL_BINARY_NAME="evtx_dump_lin" 34 | LINUX_ARM_FINAL_BINARY_NAME="evtx_dump_lin_arm" 35 | WINDOWS_FINAL_BINARY_NAME="evtx_dump_win.exe" 36 | 37 | # Check if linux cli tools are installed 38 | if ! command -v curl &> /dev/null; then 39 | print_msg "ERROR" "curl could not be found" 40 | exit 1 41 | fi 42 | # Initialize force flag 43 | FORCE_REMOVE=false 44 | 45 | # Parse options '-f' to force remove existing files 46 | while getopts "f" opt; do 47 | case $opt in 48 | f) FORCE_REMOVE=true ;; 49 | *) ;; 50 | esac 51 | done 52 | shift $((OPTIND-1)) 53 | 54 | # Check if destination files already exist 55 | if [ -f "$LINUX_FINAL_BINARY_NAME" ] || [ -f "$LINUX_ARM_FINAL_BINARY_NAME" ] || [ -f "$WINDOWS_FINAL_BINARY_NAME" ] || [ -f "$MACOS_FINAL_BINARY_NAME" ]; then 56 | if [ "$FORCE_REMOVE" = true ]; then 57 | print_msg "PROCESS" "Force removing existing files..." 58 | rm -f "$LINUX_FINAL_BINARY_NAME" "$LINUX_ARM_FINAL_BINARY_NAME" "$WINDOWS_FINAL_BINARY_NAME" "$MACOS_FINAL_BINARY_NAME" 59 | else 60 | print_msg "WARNING" "Destination files already exist. Do you want to remove them? (y/n)" 61 | read -r response 62 | if [[ "$response" =~ ^([yY][eE][sS]|[yY])$ ]]; then 63 | print_msg "PROCESS" "Removing existing files..." 64 | rm -f "$LINUX_FINAL_BINARY_NAME" "$LINUX_ARM_FINAL_BINARY_NAME" "$WINDOWS_FINAL_BINARY_NAME" "$MACOS_FINAL_BINARY_NAME" 65 | else 66 | print_msg "ERROR" "Operation cancelled by user" 67 | exit 1 68 | fi 69 | fi 70 | fi 71 | 72 | # Check if destination directories already exist, if so remove them 73 | if [ -d "$INSTALL_DIR" ]; then 74 | print_msg "PROCESS" "Removing existing destination directory: $INSTALL_DIR" 75 | rm -rf $INSTALL_DIR || print_msg "ERROR" "Failed to remove install directory: $INSTALL_DIR" 76 | exit 1 77 | fi 78 | 79 | # Get the latest release version 80 | LATEST_VERSION=$(get_latest_release) 81 | print_msg "INFO" "Latest release: $LATEST_VERSION" 82 | 83 | # Create a temporary directory 84 | TEMP_DIR=$(mktemp -d) 85 | print_msg "INFO" "Created temporary directory: $TEMP_DIR" 86 | 87 | # Download all assets from the latest release 88 | print_msg "PROCESS" "Downloading binaries from $LATEST_VERSION..." 89 | ASSETS_URL="https://api.github.com/repos/omerbenamram/evtx/releases/latest" 90 | DOWNLOAD_URLS=$(curl -s $ASSETS_URL | grep "browser_download_url" | cut -d '"' -f 4) 91 | 92 | # Download each binary 93 | for url in $DOWNLOAD_URLS; do 94 | filename=$(basename $url) 95 | # Skip musl binaries 96 | if [[ "$filename" == *"musl"* ]]; then 97 | print_msg "INFO" "Skipping musl binary: $filename" 98 | continue 99 | fi 100 | print_msg "PROCESS" "Downloading $filename..." 101 | curl -s -L -o "$TEMP_DIR/$filename" "$url" 102 | done 103 | 104 | # Create a directory for the binaries if it doesn't exist 105 | INSTALL_DIR="./evtx_binaries" 106 | mkdir -p $INSTALL_DIR 107 | 108 | # Move the binaries to the install directory 109 | print_msg "PROCESS" "Moving binaries to $INSTALL_DIR..." 110 | mv $TEMP_DIR/* $INSTALL_DIR/ 111 | 112 | # Clean up 113 | rmdir $TEMP_DIR 114 | print_msg "INFO" "Temporary directory removed" 115 | 116 | # Make binaries executable 117 | chmod +x $INSTALL_DIR/* 118 | print_msg "SUCCESS" "All binaries downloaded and made executable in $INSTALL_DIR" 119 | 120 | # Detect and handle macOS binaries 121 | print_msg "PROCESS" "Detecting macOS binaries for universal binary creation..." 122 | 123 | # Find macOS binaries and rename all binaries in one pass 124 | MACOS_X86_BINARY="" 125 | MACOS_ARM_BINARY="" 126 | UNIVERSAL_BINARY="$INSTALL_DIR/$MACOS_FINAL_BINARY_NAME" 127 | 128 | 129 | print_msg "PROCESS" "Renaming binaries with platform-specific names..." 130 | 131 | for binary in $INSTALL_DIR/*; do 132 | # Skip if not a file 133 | [ -f "$binary" ] || continue 134 | 135 | # Use file command to detect binary type 136 | FILE_INFO=$(file "$binary") 137 | 138 | # Process based on binary type 139 | if [[ "$FILE_INFO" == *"ELF"*"x86-64"* ]]; then 140 | # Linux x86_64 141 | mv "$binary" "$INSTALL_DIR/$LINUX_FINAL_BINARY_NAME" 142 | print_msg "INFO" "Renamed $(basename "$binary") to $LINUX_FINAL_BINARY_NAME" 143 | elif [[ "$FILE_INFO" == *"ELF"*"aarch64"* ]]; then 144 | # Linux ARM64 145 | mv "$binary" "$INSTALL_DIR/$LINUX_ARM_FINAL_BINARY_NAME" 146 | print_msg "INFO" "Renamed $(basename "$binary") to $LINUX_ARM_FINAL_BINARY_NAME" 147 | elif [[ "$FILE_INFO" == *"PE"*"executable"* ]] || [[ "$binary" == *".exe" ]]; then 148 | # Windows 149 | mv "$binary" "$INSTALL_DIR/$WINDOWS_FINAL_BINARY_NAME" 150 | print_msg "INFO" "Renamed $(basename "$binary") to $WINDOWS_FINAL_BINARY_NAME" 151 | elif [[ "$FILE_INFO" == *"Mach-O"* ]]; then 152 | if [[ "$FILE_INFO" == *"x86_64"* ]]; then 153 | print_msg "INFO" "Found macOS x86_64 binary: $(basename "$binary")" 154 | MACOS_X86_BINARY="$binary" 155 | elif [[ "$FILE_INFO" == *"arm64"* ]]; then 156 | print_msg "INFO" "Found macOS arm64 binary: $(basename "$binary")" 157 | MACOS_ARM_BINARY="$binary" 158 | fi 159 | fi 160 | done 161 | 162 | # Create universal binary if both architectures are found 163 | if [ -n "$MACOS_X86_BINARY" ] && [ -n "$MACOS_ARM_BINARY" ]; then 164 | print_msg "PROCESS" "Creating universal binary from x86_64 and arm64 binaries..." 165 | ./create_universal_bin_macos.sh "$MACOS_X86_BINARY" "$MACOS_ARM_BINARY" "$UNIVERSAL_BINARY" 166 | # Remove the architecture-specific binaries 167 | rm -f "$MACOS_X86_BINARY" "$MACOS_ARM_BINARY" 168 | print_msg "INFO" "Created universal macOS binary: evtx_dump_mac" 169 | elif [ -n "$MACOS_X86_BINARY" ]; then 170 | # If only x86_64 is available 171 | mv "$MACOS_X86_BINARY" "$UNIVERSAL_BINARY" 172 | print_msg "INFO" "Only x86_64 macOS binary available, renamed to evtx_dump_mac" 173 | elif [ -n "$MACOS_ARM_BINARY" ]; then 174 | # If only arm64 is available 175 | mv "$MACOS_ARM_BINARY" "$UNIVERSAL_BINARY" 176 | print_msg "INFO" "Only arm64 macOS binary available, renamed to evtx_dump_mac" 177 | fi 178 | 179 | # Put all the binaries in the root directory 180 | mv $INSTALL_DIR/* . 181 | 182 | # Remove the install directory 183 | rm -rf $INSTALL_DIR || print_msg "ERROR" "Failed to remove install directory: $INSTALL_DIR" 184 | 185 | print_msg "SUCCESS" "All binaries downloaded and made executable in $INSTALL_DIR" 186 | -------------------------------------------------------------------------------- /config/fieldMappings.json: -------------------------------------------------------------------------------- 1 | { 2 | "exclusions" : ["xmlns"], 3 | "useless" : [null, ""], 4 | "mappings" : 5 | { 6 | "Event.EventData.UserData" : "UserData", 7 | "Event.System.Provider.#attributes.Guid" : "Guid", 8 | "Event.EventData.ContextInfo": "ContextInfo", 9 | "Event.System.Execution.#attributes.ProcessID": "ProcessID", 10 | "Event.System.Execution.#attributes.ThreadID": "ThreadID", 11 | "Event.System.EventID" : "EventID", 12 | "Event.System.EventID.#text" : "EventID", 13 | "Event.System.Channel":"Channel", 14 | "Event.System.Computer":"Computer", 15 | "Event.System.Correlation":"Correlation", 16 | "Event.System.Correlation.#attributes.ActivityID":"ActivityID", 17 | "Event.System.EventID.#attributes.Qualifiers":"Qualifiers", 18 | "Event.System.EventRecordID":"EventRecordID", 19 | "Event.System.Keywords":"Keywords", 20 | "Event.System.Level":"Level", 21 | "Event.System.Opcode":"Opcode", 22 | "Event.System.Provider.#attributes.EventSourceName":"EventSourceName", 23 | "Event.System.Provider.#attributes.Name" : "Provider_Name", 24 | "Event.System.Security":"Security", 25 | "Event.System.Security.#attributes.UserID":"UserID", 26 | "Event.System.Task":"Task", 27 | "Event.System.TimeCreated.#attributes.SystemTime":"SystemTime", 28 | "Event.System.Version":"Version", 29 | "Event.EventData.AccessList":"AccessList", 30 | "Event.EventData.AccessMask":"AccessMask", 31 | "Event.EventData.Accesses":"Accesses", 32 | "Event.EventData.AccountDomain":"AccountDomain", 33 | "Event.EventData.AccountExpires":"AccountExpires", 34 | "Event.EventData.AccountName":"AccountName", 35 | "Event.EventData.AddonName":"AddonName", 36 | "Event.EventData.Address":"Address", 37 | "Event.EventData.AddressLength":"AddressLength", 38 | "Event.EventData.AllowedToDelegateTo":"AllowedToDelegateTo", 39 | "Event.EventData.Application":"Application", 40 | "Event.EventData.AttributeLDAPDisplayName":"AttributeLDAPDisplayName", 41 | "Event.EventData.AttributeValue":"AttributeValue", 42 | "Event.EventData.AuditPolicyChanges":"AuditPolicyChanges", 43 | "Event.EventData.AuditSourceName":"AuditSourceName", 44 | "Event.EventData.AuthenticationPackageName":"AuthenticationPackageName", 45 | "Event.EventData.Binary":"Binary", 46 | "Event.EventData.BootMode":"BootMode", 47 | "Event.EventData.BuildVersion":"BuildVersion", 48 | "Event.EventData.CallingProcessName":"CallingProcessName", 49 | "Event.EventData.CallTrace":"CallTrace", 50 | "Event.EventData.CommandLine":"CommandLine", 51 | "Event.EventData.Company":"Company", 52 | "Event.EventData.Context":"Context", 53 | "Event.EventData.CreationUtcTime":"CreationUtcTime", 54 | "Event.EventData.CurrentDirectory":"CurrentDirectory", 55 | "Event.EventData.DCName":"DCName", 56 | "Event.EventData.Description":"Description", 57 | "Event.EventData.DestinationAddress":"DestinationAddress", 58 | "Event.EventData.DestinationHostname":"DestinationHostname", 59 | "Event.EventData.DestinationIp":"DestinationIp", 60 | "Event.EventData.DestinationIsIpv6":"DestinationIsIpv6", 61 | "Event.EventData.DestinationPort":"DestinationPort", 62 | "Event.EventData.DestinationPortName":"DestinationPortName", 63 | "Event.EventData.DestPort":"DestPort", 64 | "Event.EventData.Detail":"Detail", 65 | "Event.EventData.Details":"Details", 66 | "Event.EventData.DetectionSource":"DetectionSource", 67 | "Event.EventData.DeviceClassName":"DeviceClassName", 68 | "Event.EventData.DeviceDescription":"DeviceDescription", 69 | "Event.EventData.DeviceName":"DeviceName", 70 | "Event.EventData.DeviceNameLength":"DeviceNameLength", 71 | "Event.EventData.DeviceTime":"DeviceTime", 72 | "Event.EventData.DeviceVersionMajor":"DeviceVersionMajor", 73 | "Event.EventData.DeviceVersionMinor":"DeviceVersionMinor", 74 | "Event.EventData.DisplayName":"DisplayName", 75 | "Event.EventData.EngineVersion":"EngineVersion", 76 | "Event.EventData.ErrorCode":"ErrorCode", 77 | "Event.EventData.ErrorDescription":"ErrorDescription", 78 | "Event.EventData.ErrorMessage":"ErrorMessage", 79 | "Event.EventData.EventSourceId":"EventSourceId", 80 | "Event.EventData.EventType":"EventType", 81 | "Event.EventData.ExtensionId":"ExtensionId", 82 | "Event.EventData.ExtensionName":"ExtensionName", 83 | "Event.EventData.ExtraInfo":"ExtraInfo", 84 | "Event.EventData.FailureCode":"FailureCode", 85 | "Event.EventData.FailureReason":"FailureReason", 86 | "Event.EventData.FileVersion":"FileVersion", 87 | "Event.EventData.FilterHostProcessID":"FilterHostProcessID", 88 | "Event.EventData.FinalStatus":"FinalStatus", 89 | "Event.EventData.GrantedAccess":"GrantedAccess", 90 | "Event.EventData.Group":"Group", 91 | "Event.EventData.GroupDomain":"GroupDomain", 92 | "Event.EventData.GroupName":"GroupName", 93 | "Event.EventData.GroupSid":"GroupSid", 94 | "Event.EventData.HandleId":"HandleId", 95 | "Event.EventData.Hash":"Hash", 96 | "Event.EventData.Hashes":"Hashes", 97 | "Event.EventData.HiveName":"HiveName", 98 | "Event.EventData.HomeDirectory":"HomeDirectory", 99 | "Event.EventData.HomePath":"HomePath", 100 | "Event.EventData.HostApplication":"HostApplication", 101 | "Event.EventData.HostName":"HostName", 102 | "Event.EventData.HostVersion":"HostVersion", 103 | "Event.EventData.IdleStateCount":"IdleStateCount", 104 | "Event.EventData.Image":"Image", 105 | "Event.EventData.ImageLoaded":"ImageLoaded", 106 | "Event.EventData.ImagePath":"ImagePath", 107 | "Event.EventData.Initiated":"Initiated", 108 | "Event.EventData.IntegrityLevel":"IntegrityLevel", 109 | "Event.EventData.IpAddress":"IpAddress", 110 | "Event.EventData.IpPort":"IpPort", 111 | "Event.EventData.KeyLength":"KeyLength", 112 | "Event.EventData.LayerRTID":"LayerRTID", 113 | "Event.EventData.LDAPDisplayName":"LDAPDisplayName", 114 | "Event.EventData.LmPackageName":"LmPackageName", 115 | "Event.EventData.LogonGuid":"LogonGuid", 116 | "Event.EventData.LogonHours":"LogonHours", 117 | "Event.EventData.LogonId":"LogonId", 118 | "Event.EventData.LogonProcessName":"LogonProcessName", 119 | "Event.EventData.LogonType":"LogonType", 120 | "Event.EventData.MajorVersion":"MajorVersion", 121 | "Event.EventData.Data.#text":"Message", 122 | "Event.EventData.MinorVersion":"MinorVersion", 123 | "Event.EventData.NewName":"NewName", 124 | "Event.EventData.NewProcessId":"NewProcessId", 125 | "Event.EventData.NewProcessName":"NewProcessName", 126 | "Event.EventData.NewState":"NewState", 127 | "Event.EventData.NewThreadId":"NewThreadId", 128 | "Event.EventData.NewTime":"NewTime", 129 | "Event.EventData.NewUacValue":"NewUacValue", 130 | "Event.EventData.NewValue":"NewValue", 131 | "Event.EventData.NotificationPackageName":"NotificationPackageName", 132 | "Event.EventData.Number":"Number", 133 | "Event.EventData.NumberOfGroupPolicyObjects":"NumberOfGroupPolicyObjects", 134 | "Event.EventData.ObjectClass":"ObjectClass", 135 | "Event.EventData.ObjectName":"ObjectName", 136 | "Event.EventData.ObjectServer":"ObjectServer", 137 | "Event.EventData.ObjectType":"ObjectType", 138 | "Event.EventData.ObjectValueName":"ObjectValueName", 139 | "Event.EventData.OldTime":"OldTime", 140 | "Event.EventData.OldUacValue":"OldUacValue", 141 | "Event.EventData.OperationType":"OperationType", 142 | "Event.EventData.OriginalFileName":"OriginalFileName", 143 | "Event.EventData.PackageName":"PackageName", 144 | "Event.EventData.ParentCommandLine":"ParentCommandLine", 145 | "Event.EventData.ParentImage":"ParentImage", 146 | "Event.EventData.ParentProcessGuid":"ParentProcessGuid", 147 | "Event.EventData.ParentProcessId":"ParentProcessId", 148 | "Event.EventData.PasswordLastSet":"PasswordLastSet", 149 | "Event.EventData.Payload":"Payload", 150 | "Event.EventData.PerfStateCount":"PerfStateCount", 151 | "Event.EventData.PipeName":"PipeName", 152 | "Event.EventData.PreviousTime":"PreviousTime", 153 | "Event.EventData.PrimaryGroupId":"PrimaryGroupId", 154 | "Event.EventData.PrivilegeList":"PrivilegeList", 155 | "Event.EventData.ProcessCommandLine":"ProcessCommandLine", 156 | "Event.EventData.ProcessGuid":"ProcessGuid", 157 | "Event.EventData.ProcessId":"ProcessId", 158 | "Event.EventData.ProcessName":"ProcessName", 159 | "Event.EventData.ProcessingMode":"ProcessingMode", 160 | "Event.EventData.ProcessingTimeInMilliseconds":"ProcessingTimeInMilliseconds", 161 | "Event.EventData.Product":"Product", 162 | "Event.EventData.ProfilePath":"ProfilePath", 163 | "Event.EventData.Properties":"Properties", 164 | "Event.EventData.Protocol":"Protocol", 165 | "Event.EventData.ProtocolHostProcessID":"ProtocolHostProcessID", 166 | "Event.EventData.PuaCount":"PuaCount", 167 | "Event.EventData.PuaPolicyId":"PuaPolicyId", 168 | "Event.EventData.Publisher":"Publisher", 169 | "Event.EventData.QfeVersion":"QfeVersion", 170 | "Event.EventData.QueryName":"QueryName", 171 | "Event.EventData.QueryResults":"QueryResults", 172 | "Event.EventData.QueryStatus":"QueryStatus", 173 | "Event.EventData.RelativeTargetName":"RelativeTargetName", 174 | "Event.EventData.ResourceManager":"ResourceManager", 175 | "Event.EventData.RetryMinutes":"RetryMinutes", 176 | "Event.EventData.RuleName":"RuleName", 177 | "Event.EventData.SamAccountName":"SAMAccountName", 178 | "Event.EventData.SchemaVersion":"SchemaVersion", 179 | "Event.EventData.ScriptPath":"ScriptPath", 180 | "Event.EventData.ScriptBlockText":"ScriptBlockText", 181 | "Event.EventData.SecurityPackageName":"SecurityPackageName", 182 | "Event.EventData.ServerID":"ServerID", 183 | "Event.EventData.ServerURL":"ServerURL", 184 | "Event.EventData.Service":"Service", 185 | "Event.EventData.ServiceName":"ServiceName", 186 | "Event.EventData.ServicePrincipalNames":"ServicePrincipalNames", 187 | "Event.EventData.ServiceType":"ServiceType", 188 | "Event.EventData.ServiceVersion":"ServiceVersion", 189 | "Event.EventData.ShareLocalPath":"ShareLocalPath", 190 | "Event.EventData.ShareName":"ShareName", 191 | "Event.EventData.ShutdownActionType":"ShutdownActionType", 192 | "Event.EventData.ShutdownEventCode":"ShutdownEventCode", 193 | "Event.EventData.ShutdownReason":"ShutdownReason", 194 | "Event.EventData.SidHistory":"SidHistory", 195 | "Event.EventData.Signature":"Signature", 196 | "Event.EventData.SignatureStatus":"SignatureStatus", 197 | "Event.EventData.Signed":"Signed", 198 | "Event.EventData.SourceAddress":"SourceAddress", 199 | "Event.EventData.SourceHostname":"SourceHostname", 200 | "Event.EventData.SourceImage":"SourceImage", 201 | "Event.EventData.SourceIp":"SourceIp", 202 | "Event.EventData.SourceNetworkAddress":"SourceNetworkAddress", 203 | "Event.EventData.SourceIsIpv6":"SourceIsIpv6", 204 | "Event.EventData.SourcePort":"SourcePort", 205 | "Event.EventData.SourcePortName":"SourcePortName", 206 | "Event.EventData.SourceProcessGuid":"SourceProcessGuid", 207 | "Event.EventData.SourceProcessId":"SourceProcessId", 208 | "Event.EventData.StartAddress":"StartAddress", 209 | "Event.EventData.StartFunction":"StartFunction", 210 | "Event.EventData.StartModule":"StartModule", 211 | "Event.EventData.StartTime":"StartTime", 212 | "Event.EventData.StartType":"StartType", 213 | "Event.EventData.State":"State", 214 | "Event.EventData.Status":"Status", 215 | "Event.EventData.StopTime":"StopTime", 216 | "Event.EventData.SubStatus":"SubStatus", 217 | "Event.EventData.SubjectDomainName":"SubjectDomainName", 218 | "Event.EventData.SubjectLogonId":"SubjectLogonId", 219 | "Event.EventData.SubjectUserName":"SubjectUserName", 220 | "Event.EventData.SubjectUserSid":"SubjectUserSid", 221 | "Event.EventData.TSId":"TSId", 222 | "Event.EventData.TargetDomainName":"TargetDomainName", 223 | "Event.EventData.TargetFilename":"TargetFileName", 224 | "Event.EventData.TargetImage":"TargetImage", 225 | "Event.EventData.TargetInfo":"TargetInfo", 226 | "Event.EventData.TargetLogonGuid":"TargetLogonGuid", 227 | "Event.EventData.TargetLogonId":"TargetLogonId", 228 | "Event.EventData.TargetObject":"TargetObject", 229 | "Event.EventData.TargetProcessAddress":"TargetProcessAddress", 230 | "Event.EventData.TargetProcessGuid":"TargetProcessGuid", 231 | "Event.EventData.TargetProcessId":"TargetProcessId", 232 | "Event.EventData.TargetServerName":"TargetServerName", 233 | "Event.EventData.TargetSid":"TargetSid", 234 | "Event.EventData.TargetUserName":"TargetUserName", 235 | "Event.EventData.TargetUserSid":"TargetUserSid", 236 | "Event.EventData.TaskContent":"TaskContent", 237 | "Event.EventData.TaskContentNew":"TaskContentNew", 238 | "Event.EventData.TaskName":"TaskName", 239 | "Event.EventData.TerminalSessionId":"TerminalSessionId", 240 | "Event.EventData.ThrottleStateCount":"ThrottleStateCount", 241 | "Event.EventData.TicketEncryptionType":"TicketEncryptionType", 242 | "Event.EventData.TicketOptions":"TicketOptions", 243 | "Event.EventData.TimeSource":"TimeSource", 244 | "Event.EventData.TokenElevationType":"TokenElevationType", 245 | "Event.EventData.TransactionId":"TransactionId", 246 | "Event.EventData.TransmittedServices":"TransmittedServices", 247 | "Event.EventData.User":"User", 248 | "Event.EventData.UserAccountControl":"UserAccountControl", 249 | "Event.EventData.UserParameters":"UserParameters", 250 | "Event.EventData.UserPrincipalName":"UserPrincipalName", 251 | "Event.EventData.UserSid":"UserSid", 252 | "Event.EventData.UserWorkstations":"UserWorkstations", 253 | "Event.EventData.UtcTime":"UtcTime", 254 | "Event.EventData.Version":"Version", 255 | "Event.EventData.Workstation":"Workstation", 256 | "Event.EventData.WorkstationName":"WorkstationName", 257 | "Event.EventData.updateGuid":"updateGuid", 258 | "Event.EventData.updateRevisionNumber":"updateRevisionNumber", 259 | "Event.EventData.updateTitle":"updateTitle", 260 | "Event.EventData.ParentIntegrityLevel":"ParentIntegrityLevel", 261 | "Event.EventData.ParentUser":"ParentUser" 262 | }, 263 | "alias": 264 | { 265 | }, 266 | "split": 267 | { 268 | "Hash": {"separator":",", "equal":"="}, 269 | "Hashes": {"separator":",", "equal":"="}, 270 | "ConfigurationFileHash": {"separator":",", "equal":"="} 271 | }, 272 | "transforms_enabled": true, 273 | "transforms":{ 274 | "proctitle": [{ 275 | "info": "Proctitle HEX to ASCII", 276 | "type": "python", 277 | "code": "def transform(param):\n\treturn bytes.fromhex(param).decode('ascii').replace('\\x00',' ')", 278 | "alias": false, 279 | "alias_name": "", 280 | "source_condition": ["auditd_input"], 281 | "enabled": true 282 | }], 283 | "cmd": [{ 284 | "info": "Cmd HEX to ASCII", 285 | "type": "python", 286 | "code": "def transform(param):\n\treturn bytes.fromhex(param).decode('ascii').replace('\\x00',' ')", 287 | "alias": false, 288 | "alias_name": "", 289 | "source_condition": ["auditd_input"], 290 | "enabled": true 291 | }], 292 | "CommandLine": [ 293 | { 294 | "info": "Base64 decoded CommandLine", 295 | "type": "python", 296 | "code": "\ndef transform(param):\n decoded_values = []\n concatenated_result = ''\n data = param\n\n base64_pattern = r'(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?'\n matches = re.findall(base64_pattern, data)\n for match in matches:\n decoded = base64.b64decode(match)\n encoding = chardet.detect(decoded)['encoding']\n if encoding and encoding in ['utf-8', 'ascii', 'utf-16le', 'ISO-8859-1']:\n decoded = decoded.decode(encoding)\n decoded = decoded.strip()\n if decoded.isprintable() and len(decoded) > 10 :\n decoded_values.append(decoded)\n \n concatenated_result = '|'.join(decoded_values)\n return concatenated_result\n", 297 | "alias": true, 298 | "alias_name": "CommandLine_b64decoded", 299 | "source_condition": ["evtx_input", "json_array_input", "json_input", "evtxtract_input", "db_input"], 300 | "enabled": false 301 | }, 302 | { 303 | "info": "CommandLine credentials extraction - Regex by Practical Security Analytics - https://practicalsecurityanalytics.com/extracting-credentials-from-windows-logs/", 304 | "type": "python", 305 | "code": "\ndef transform(param):\n import re\n regex_patterns = [\n r'net.+user\\s+(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+))\\s+(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+))',\n r'net.+use\\s+(?P\\\\\\\\\\S+)\\s+/USER:(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+))\\s+(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+))',\n r'schtasks.+/U\\s+(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+)).+/P\\s+(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+))',\n r'wmic.+/user:\\s*(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+)).+/password:\\s*(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+))',\n r'psexec.+-u\\s+(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+)).+-p\\s+(?P(?:\"((?:\\\\.|[^\"\\\\])*)\")|(?:[^\\s\"]+))'\n ]\n\n matches = []\n \n for pattern in regex_patterns:\n found = re.findall(pattern, param)\n if len(found) > 0:\n for match in list(found[0]):\n if len(match) > 0: \n matches.append(match) \n\n concatenated_result = '|'.join(matches)\n if concatenated_result == None:\n return ''\n return concatenated_result\n", 306 | "alias": true, 307 | "alias_name": "CommandLine_Extracted_Creds", 308 | "source_condition": ["evtx_input", "json_array_input", "json_input", "evtxtract_input", "db_input"], 309 | "enabled": false 310 | } 311 | ], 312 | "Payload": [ 313 | { 314 | "info": "Base64 decoded Payload", 315 | "type": "python", 316 | "code": "\ndef transform(param):\n decoded_values = []\n concatenated_result = ''\n data = param\n\n base64_pattern = r'(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?'\n matches = re.findall(base64_pattern, data)\n for match in matches:\n decoded = base64.b64decode(match)\n encoding = chardet.detect(decoded)['encoding']\n if encoding and encoding in ['utf-8', 'ascii', 'utf-16le', 'ISO-8859-1']:\n decoded = decoded.decode(encoding)\n decoded = decoded.strip()\n if decoded.isprintable() and len(decoded) > 10 :\n decoded_values.append(decoded)\n \n concatenated_result = '|'.join(decoded_values)\n return concatenated_result\n", 317 | "alias": true, 318 | "alias_name": "Payload_b64decoded", 319 | "source_condition": ["evtx_input", "json_array_input", "json_input", "evtxtract_input", "db_input"], 320 | "enabled": false 321 | } 322 | ], 323 | "ServiceFileName":[ 324 | { 325 | "info": "Base64 decoded ServiceFileName", 326 | "type": "python", 327 | "code": "\ndef transform(param):\n decoded_values = []\n concatenated_result = ''\n data = param\n\n base64_pattern = r'(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?'\n matches = re.findall(base64_pattern, data)\n for match in matches:\n decoded = base64.b64decode(match)\n encoding = chardet.detect(decoded)['encoding']\n if encoding and encoding in ['utf-8', 'ascii', 'utf-16le', 'ISO-8859-1']:\n decoded = decoded.decode(encoding)\n decoded = decoded.strip()\n if decoded.isprintable() and len(decoded) > 10 :\n decoded_values.append(decoded)\n \n concatenated_result = '|'.join(decoded_values)\n return concatenated_result\n", 328 | "alias": true, 329 | "alias_name": "ServiceFileName_b64decoded", 330 | "source_condition": ["evtx_input", "json_array_input", "json_input", "evtxtract_input", "db_input"], 331 | "enabled": false 332 | } 333 | ] 334 | } 335 | } -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/docs/.nojekyll -------------------------------------------------------------------------------- /docs/Advanced.md: -------------------------------------------------------------------------------- 1 | # Advanced use 2 | 3 | ## Working with large datasets 4 | 5 | Zircolite tries to be as fast as possible so a lot of data is stored in memory. So : 6 | 7 | - **Zircolite memory use oscillate between 2 or 3 times the size of the logs** 8 | - It is not a good idea to use it on very big EVTX files or a large number of EVTX **as is** 9 | 10 | There are a lot of ways to speed up Zircolite : 11 | 12 | - Using as much CPU core as possible : see below "[Using GNU Parallel](using-gnu-parallel)" 13 | - Using [Filtering](#filtering) 14 | 15 | > [!NOTE] 16 | > There is an option to heavily limit the memory usage of Zircolite by using the `--ondiskdb ` argument. This is only usefull to avoid errors when dealing with very large datasets and if you have have a lot of time... **This should be used with caution and the below alternatives are far better choices**. 17 | 18 | ### Using GNU Parallel 19 | 20 | Except when `evtx_dump` is used, Zircolite only use one core. So if you have a lot of EVTX files and their total size is big, it is recommended that you use a script to launch multiple Zircolite instances. On Linux or MacOS The easiest way is to use **GNU Parallel**. 21 | 22 | > [!NOTE] 23 | > On MacOS, please use GNU find (`brew install find` will install `gfind`) 24 | 25 | - **"DFIR Case mode" : One directory per computer/endpoint** 26 | 27 | This mode is very useful when you have a case where all your evidences is stored per computer (one directory per computer containing all EVTX for this computer). It will create one result file per computer in the current directory. 28 | 29 | ```shell 30 | find -maxdepth 1 -mindepth 1 -type d | \ 31 | parallel --bar python3 zircolite.py -e {} \ 32 | -r rules/rules_windows_sysmon_pysigma.json --outfile {/.}.json 33 | ``` 34 | 35 | One downside of this mode is that if you have less computer evidences than CPU Cores, they all will not be used. 36 | 37 | - **"WEF/WEC mode" : One zircolite instance per EVTX** 38 | 39 | You can use this mode when you have a lot of aggregated EVTX coming from multiple computers. It is generally the case when you use WEF/WEC and you recover the EVTX files from the collector. This mode will create one result file per EVTX. 40 | 41 | ```shell 42 | find -type f -name "*.evtx" \ 43 | parallel -j -1 --progress python3 zircolite.py -e {} \ 44 | -r rules/rules_windows_sysmon_pysigma.json --outfile {/.}.json 45 | ``` 46 | 47 | In this example the `-j -1` is for using all cores but one. You can adjust the number of used cores with this arguments. 48 | 49 | ## Keep data used by Zircolite 50 | 51 | **Zircolite** has a lot of arguments that can be used to keep data used to perform Sigma detections : 52 | 53 | - `--dbfile ` allows you to export all the logs in a SQLite 3 database file. You can query the logs with SQL statements to find more things than what the Sigma rules could have found 54 | - `--keeptmp` allows you to keep the source logs (EVTX/Auditd/Evtxtract/XML...) converted in JSON format 55 | - `--keepflat` allow you to keep the source logs (EVTX/Auditd/Evtxtract/XML...) converted in a flattened JSON format 56 | 57 | ## Filtering 58 | 59 | Zircolite has a lot of filtering options to speed up the detection process. Don't overlook these options because they can save you a lot of time. 60 | 61 | ### File filters 62 | 63 | Some EVTX files are not used by SIGMA rules but can become quite large (a good example is `Microsoft-Windows-SystemDataArchiver%4Diagnostic.evtx`), if you use Zircolite with a directory as input argument, all EVTX files will be converted, saved and matched against the SIGMA Rules. 64 | 65 | To speed up the detection process, you may want to use Zircolite on files matching or not matching a specific pattern. For that you can use **filters** provided by the two command line arguments : 66 | 67 | - `-s` or `--select` : select files partly matching the provided a string (case insensitive) 68 | - `-a` or `--avoid` : exclude files partly matching the provided a string (case insensitive) 69 | 70 | > [!NOTE] 71 | > When using the two arguments, the "select" argument is always applied first and then the "avoid" argument is applied. So, it is possible to exclude files from included files but not the opposite. 72 | 73 | - Only use EVTX files that contains "sysmon" in their names 74 | 75 | ```shell 76 | python3 zircolite.py --evtx logs/ --ruleset rules/rules_windows_sysmon_pysigma.json \ 77 | --select sysmon 78 | ``` 79 | - Exclude "Microsoft-Windows-SystemDataArchiver%4Diagnostic.evtx" 80 | 81 | ```shell 82 | python3 zircolite.py --evtx logs/ --ruleset rules/rules_windows_sysmon_pysigma.json \ 83 | --avoid systemdataarchiver 84 | ``` 85 | 86 | - Only use EVTX files with "operational" in their names but exclude "defender" related logs 87 | 88 | ```shell 89 | python3 zircolite.py --evtx logs/ --ruleset rules/rules_windows_sysmon_pysigma.json \ 90 | --select operational --avoid defender 91 | ``` 92 | 93 | For example, the **Sysmon** ruleset available in the `rules` directory only use the following channels (names have been shortened) : *Sysmon, Security, System, Powershell, Defender, AppLocker, DriverFrameworks, Application, NTLM, DNS, MSexchange, WMI-activity, TaskScheduler*. 94 | 95 | So if you use the sysmon ruleset with the following rules, it should speed up `Zircolite`execution : 96 | 97 | ```shell 98 | python3 zircolite.py --evtx logs/ --ruleset rules/rules_windows_sysmon_pysigma.json \ 99 | --select sysmon --select security.evtx --select system.evtx \ 100 | --select application.evtx --select Windows-NTLM --select DNS \ 101 | --select powershell --select defender --select applocker \ 102 | --select driverframeworks --select "msexchange management" \ 103 | --select TaskScheduler --select WMI-activity 104 | ``` 105 | 106 | ### Time filters 107 | 108 | Sometimes you only want to work on a specific timerange to speed up analysis. With Zircolite, it is possible to filter on a specific timerange just by using the `--after` and `--before` and their respective shorter versions `-A` and `-B`. Please note that : 109 | 110 | - The filter will apply on the `SystemTime` field of each event 111 | - The `--after` and `--before` arguments can be used independently 112 | - The timestamps provided must have the following format : YYYY-MM-DD**T**HH:MM:SS (hours are in 24h format) 113 | 114 | Examples : 115 | 116 | - Select all events between the 2021-06-02 22:40:00 and 2021-06-02 23:00:00 : 117 | 118 | ```shell 119 | python3 zircolite.py --evtx logs/ --ruleset rules/rules_windows_sysmon_pysigma.json \ 120 | -A 2021-06-02T22:40:00 -B 2021-06-02T23:00:00 121 | ``` 122 | 123 | - Select all events after the 2021-06-01 12:00:00 : 124 | 125 | ```shell 126 | python3 zircolite.py --evtx logs/ --ruleset rules/rules_windows_sysmon_pysigma.json \ 127 | -A 2021-06-01T12:00:00 128 | ``` 129 | 130 | ### Rule filters 131 | 132 | Some rules can be noisy or slow on specific datasets (check [here](https://github.com/wagga40/Zircolite/tree/master/rules/README.md)) so it is possible to skip them by using the `-R` or `--rulefilter` argument. This argument can be used multiple times. 133 | 134 | The filter will apply on the rule title. To avoid unexpected side-effect **comparison is case-sensitive**. For example, if you do not want to use all MSHTA related rules : 135 | 136 | ```shell 137 | python3 zircolite.py --evtx logs/ \ 138 | --ruleset rules/rules_windows_sysmon_pysigma.json \ 139 | -R MSHTA 140 | ``` 141 | 142 | ### Limit the number of detected events 143 | 144 | Sometimes, SIGMA rules can be very noisy (and generate a lot of false positives) but you still want to keep them in your rulesets. It is possible to filter rules that returns too mich detected events with the option `--limit `. **Please note that when using this option, the rules are not skipped the results are just ignored** but this is useful when forwarding events to Splunk. 145 | 146 | ## Templating and Formatting 147 | 148 | Zircolite provides a templating system based on Jinja 2. It allows you to change the output format to suits your needs (Splunk or ELK integration, Grep-able output...). There are some templates available in the [Templates directory](https://github.com/wagga40/Zircolite/tree/master/templates) of the repository : Splunk, Timesketch, ... To use the template system, use these arguments : 149 | 150 | - `--template ` 151 | - `--templateOutput ` 152 | 153 | ```shell 154 | python3 zircolite.py --evtx sample.evtx --ruleset rules/rules_windows_sysmon_pysigma.json \ 155 | --template templates/exportForSplunk.tmpl --templateOutput exportForSplunk.json 156 | ``` 157 | 158 | It is possible to use multiple templates if you provide for each `--template` argument there is a `--templateOutput` argument associated. 159 | 160 | ## Mini-GUI 161 | 162 | ![](pics/gui.jpg) 163 | 164 | 165 | The Mini-GUI can be used totally offline, it allows the user to display and search results. It uses [datatables](https://datatables.net/) and the [SB Admin 2 theme](https://github.com/StartBootstrap/startbootstrap-sb-admin-2). 166 | 167 | ### Automatic generation 168 | 169 | As of Zircolite 2.1.0, the easier way to use the Mini-GUI is to generate a package with the `--package` option. A zip file containing all the necessary data will be generated at the root of the repository. 170 | 171 | ### Manual generation 172 | 173 | You need to generate a `data.js` file with the `exportForZircoGui.tmpl` template, decompress the zircogui.zip file in the [gui](https://github.com/wagga40/Zircolite/tree/master/gui/) directory and replace the `data.js` file in it with yours : 174 | 175 | ```shell 176 | python3 zircolite.py --evtx sample.evtx 177 | --ruleset rules/rules_windows_sysmon_pysigma.json \ 178 | --template templates/exportForZircoGui.tmpl --templateOutput data.js 179 | 7z x gui/zircogui.zip 180 | mv data.js zircogui/ 181 | ``` 182 | 183 | Then you just have to open `index.html` in your favorite browser and click on a Mitre Att&ck category or an alert level. 184 | 185 | > [!WARNING] 186 | > **The mini-GUI was not built to handle big datasets**. 187 | 188 | ## Packaging Zircolite 189 | 190 | ### PyInstaller 191 | 192 | * Install Python 3.8 on the same OS as the one you want to use Zircolite on 193 | * Install all dependencies : `pip3 install -r requirements.txt` 194 | * After Python 3.8 install, you will need PyInstaller : `pip3 install pyinstaller` 195 | * In the root folder of Zircolite type : `pyinstaller -c --onefile zircolite.py` 196 | * The `dist` folder will contain the packaged app 197 | 198 | ### Nuitka 199 | 200 | * Install Python 3.8 on the same OS as the one you want to use Zircolite on 201 | * Install all dependencies : `pip3 install -r requirements.txt` 202 | * After Python 3.8 install, you will need Nuitka : `pip3 install nuitka` 203 | * In the root folder of Zircolite type : `python3 -m nuitka --onefile zircolite.py` 204 | 205 | > [!WARNING] 206 | > When packaging with PyInstaller or Nuitka some AV may not like your package. 207 | 208 | ## Other tools 209 | 210 | Some other tools (mostly untested) have included a way to run Zircolite : 211 | 212 | - [Kape](https://www.kroll.com/en/services/cyber-risk/incident-response-litigation-support/kroll-artifact-parser-extractor-kape) has a module for Zircolite : [here](https://github.com/EricZimmerman/KapeFiles/tree/master/Modules/Apps/GitHub) 213 | - [Velociraptor](https://github.com/Velocidex/velociraptor) has an artifact for Zircolite : [here](https://docs.velociraptor.app/exchange/artifacts/pages/windows.eventlogs.zircolite/) 214 | -------------------------------------------------------------------------------- /docs/Internals.md: -------------------------------------------------------------------------------- 1 | # Internals 2 | 3 | ## Zircolite architecture 4 | 5 | **Zircolite is more a workflow than a real detection engine**. To put it simply, it leverages the ability of the sigma converter to output rules in SQLite format. Zircolite simply applies SQLite-converted rules to EVTX stored in an in-memory SQLite DB. 6 | 7 | ![](pics/Zircolite.png) 8 | 9 | ## Project structure 10 | 11 | ```text 12 | ├── Makefile # Very basic Makefile 13 | ├── Readme.md # Do I need to explain ? 14 | ├── bin # Directory containing all external binaries (evtx_dump) 15 | ├── config # Directory containing the config files 16 | ├── docs # Directory containing the documentation 17 | ├── pics # Pictures directory - not really relevant 18 | ├── rules # Sample rules you can use 19 | ├── templates # Jinja2 templates 20 | └── zircolite.py # Zircolite ! 21 | ``` 22 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Zircolite documentation 2 | 3 | **Zircolite** is a standalone tool written in Python 3 that allows you to use SIGMA rules on: 4 | 5 | - MS Windows EVTX (EVTX, XML, and JSONL formats) 6 | - Auditd logs 7 | - Sysmon for Linux 8 | - EVTXtract 9 | - CSV and XML logs 10 | 11 | ### Key Features 12 | 13 | - **Fast Processing**: Zircolite is relatively fast and can parse large datasets in just seconds. 14 | - **SIGMA Backend**: It is based on a SIGMA backend (SQLite) and does not use internal SIGMA-to-something conversion. 15 | - **Advanced Log Manipulation**: It can manipulate input logs by splitting fields and applying transformations, allowing for more flexible and powerful log analysis. 16 | - **Flexible Export**: Zircolite can export results to multiple formats using Jinja [templates](templates), including JSON, CSV, JSONL, Splunk, Elastic, Zinc, Timesketch, and more. 17 | 18 | **You can use Zircolite directly in Python or use the binaries provided in the [releases](https://github.com/wagga40/Zircolite/releases).** 19 | -------------------------------------------------------------------------------- /docs/Usage.md: -------------------------------------------------------------------------------- 1 | # Usage 2 | 3 | > [!NOTE] 4 | > If you use the packaged version of Zircolite, remember to replace `python3 zircolite.py` in the examples with the packaged binary name. 5 | 6 | ## Requirements and Installation 7 | 8 | - [Release versions](https://github.com/wagga40/Zircolite/releases) are standalone, they are easier to use and deploy. Be careful, **the packager (nuitka) does not like Zircolite being run in from another directory**. 9 | - If you have an **ARM CPU, it is strongly recommended to use the release versions** 10 | - The repository version of Zircolite works with **Python 3.8** and above 11 | - The repository version can run on Linux, Mac OS and Windows 12 | - The use of [evtx_dump](https://github.com/omerbenamram/evtx) is **optional but required by default (because it is for now much faster)**, I you do not want to use it you have to use the '--noexternal' option. The tool is provided if you clone the Zircolite repository (the official repository is [here](https://github.com/omerbenamram/evtx)). 13 | 14 | ### Installation from releases 15 | 16 | - Get the appropriate version [here](https://github.com/wagga40/Zircolite/releases) 17 | 18 | ```bash 19 | # DECOMPRESS 20 | 7z x zircolite_lin_amd64_glibc_2.20.0.zip 21 | cd zircolite_lin_amd64_glibc/ 22 | 23 | # EXAMPLE RUN 24 | git clone https://github.com/sbousseaden/EVTX-ATTACK-SAMPLES.git 25 | ./zircolite_lin_amd64_glibc.bin -e EVTX-ATTACK-SAMPLES/Execution/ \ 26 | -r rules/rules_windows_sysmon_pysigma.json 27 | 28 | ``` 29 | 30 | ### Installation from repository 31 | 32 | #### Using [venv](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/) on Linux/MacOS 33 | 34 | ```shell 35 | # INSTALL 36 | git clone https://github.com/wagga40/Zircolite.git 37 | cd Zircolite 38 | python3 -m venv .venv 39 | source .venv/bin/activate 40 | pip3 install -r requirements.txt 41 | 42 | # EXAMPLE RUN 43 | git clone https://github.com/sbousseaden/EVTX-ATTA^C-SAMPLES.git 44 | python3 zircolite.py -e EVTX-ATTACK-SAMPLES/ -r rules/rules_windows_sysmon_pysigma.json 45 | deactivate # Quit Python3 venv 46 | ``` 47 | 48 | #### Using [Pdm](https://pdm-project.org/latest/) or [Poetry](https://python-poetry.org) 49 | 50 | ```shell 51 | # INSTALL 52 | git clone https://github.com/wagga40/Zircolite.git 53 | cd Zircolite 54 | pdm init -n 55 | cat requirements.txt | xargs pdm add 56 | 57 | # EXAMPLE RUN 58 | git clone https://github.com/sbousseaden/EVTX-ATTACK-SAMPLES.git 59 | pdm run python3 zircolite.py -e EVTX-ATTACK-SAMPLES/ \ 60 | -r rules/rules_windows_sysmon_pysigma.json 61 | ``` 62 | 63 | If you want to use *poetry*, just replace the "pdm" command in the above example by "poetry". 64 | 65 | ### Known issues 66 | 67 | - Sometimes `evtx_dump` hangs under MS Windows, this is not related to Zircolite. If it happens to you, usually the use of `--noexternal` solves the problem. If you can share the EVTX files on whose the blocking happened, feel free to post an issue in the [evtx_dump](https://github.com/omerbenamram/evtx/issues) repository. 68 | - If you use the packaged/release version, please note that the packager (nuitka) does not like Zircolite being run in from another directory (i.e : `c:\SOMEDIR\Zircolite\Zircolite.exe -e sample.evtx -r rules.json`). 69 | 70 | 71 | ## Basic usage 72 | 73 | Help is available with `zircolite.py -h`. 74 | 75 | Basically, the simplest way to use Zircolite is something like this: 76 | 77 | ```shell 78 | python3 zircolite.py --events --ruleset 79 | ``` 80 | 81 | Where : 82 | 83 | - `--events` is a filename or a directory containing the logs you want to analyse (`--evtx` and `-e` can be used instade of `--events`) . Zircolite support the following format : EVTX, XML, JSON (one event per line), JSON Array (one big array), EVTXTRACT, CSV, Auditd, Sysmon for Linux 84 | - `--ruleset` is a file or directory containing the Sigma rules to use for detection. Zircolite as its own format called "Zircolite ruleset" where all the rules are in one JSON file. However, as of version *2.20.0*, Zircolite can directly use Sigma rules in YAML format (YAML file or Directory containing the YAML files) 85 | 86 | Multiple rulesets can be specified, results can be per-ruleset or combined (with `--combine-rulesets` or `-cr`) : 87 | 88 | ```shell 89 | # Example with a Zircolite ruleset and a Sigma rule. Results will be displayed per-ruleset 90 | python3 zircolite.py --events sample.evtx --ruleset rules/rules_windows_sysmon_pysigma.json \ 91 | --ruleset schtasks.yml 92 | # Example with a Zircolite ruleset and a Sigma rule. Results will be displayed combined 93 | python3 zircolite.py --events sample.evtx --ruleset rules/rules_windows_sysmon_pysigma.json \ 94 | --ruleset schtasks.yml --combine-rulesets 95 | ``` 96 | 97 | By default : 98 | 99 | - `--ruleset` is not mandatory but the default ruleset is `rules/rules_windows_generic_pysigma.json` 100 | - Results are written in the `detected_events.json` in the same directory as Zircolite, you can choose a CSV formatted output with `--csv` 101 | - There is a `zircolite.log` file that will be created in the current working directory, it can be disabled with `--nolog` 102 | - When providing a directory for then event logs, `Zircolite` will automatically use a file extension, you can change it with `--fileext`. This option can be used with wildcards or [Python Glob syntax](https://docs.python.org/3/library/glob.html) but `*.` will automatically be added before the given parameter value : `*.`. For example `--fileext log` will search for `*.log` files in the given path and `--fileext log.*` will search for `*.log.*` which can be useful when handling linux log files (auditd.log.1...) 103 | 104 | ### EVTX files 105 | 106 | If your evtx files have the extension ".evtx" : 107 | 108 | ```shell 109 | python3 zircolite.py --evtx \ 110 | --ruleset 111 | python3 zircolite.py --evtx ../Logs --ruleset rules/rules_windows_sysmon_pysigma.json 112 | ``` 113 | 114 | ### XML logs 115 | 116 | `evtx_dump` or services like **VirusTotal** sometimes output logs in text format with XML logs inside. 117 | 118 | To do that with `evtx_dump` you have to use the following command line : 119 | ```shell 120 | ./evtx_dump -o xml -f --no-indent --dont-show-record-number 121 | ``` 122 | 123 | And it produces something like this (1 event per line): 124 | 125 | ```xml 126 | 15410XXXXXXXXMicrosoft-Windows-Sysmon/OperationalXXXXXXXXXXXXXXX-XX-XX XX:XX:XX.XXXXXXXXXXXXXXXXXXXXXXXXXXXXMicrosoft® Windows® Operating SystemMicrosoft CorporationXXXXXXXXXXXXXXXXXXXXXXXXXXXX0HighXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX 127 | 128 | ``` 129 | 130 | **VirusTotal** : if you have an enterprise account will allow you to get logs in a pretty similar format : 131 | 132 | ```xml 133 | 134 | 135 | 13241300x8000000000000000749827Microsoft-Windows-Sysmon/OperationalXXXXXX-SetValueXXXX-XX-XX XX:XX:XX.XXXXXXXXXXXXXXXC:\Windows\Explorer.EXEXXXXXXXXBinary Data 136 | 137 | ``` 138 | 139 | **Zircolite** will handle both format with the following command line : 140 | 141 | ```shell 142 | python3 zircolite.py --events --ruleset --xml 143 | python3 zircolite.py --events Microsoft-Windows-SysmonOperational.xml \ 144 | --ruleset rules/rules_windows_sysmon_pysigma.json --xml 145 | ``` 146 | 147 | ### EVTXtract logs 148 | 149 | Willi Ballenthin has built called [EVTXtract](https://github.com/williballenthin/EVTXtract) a tool to recovers and reconstructs fragments of EVTX log files from raw binary data, including unallocated space and memory images. 150 | 151 | **Zircolite** can work with the output of EVTXtract with the following command line : 152 | 153 | ```shell 154 | python3 zircolite.py --events --ruleset --evtxtract 155 | ``` 156 | 157 | ### Auditd logs 158 | 159 | ```shell 160 | python3 zircolite.py --events auditd.log --ruleset rules/rules_linux.json --auditd 161 | ``` 162 | 163 | > [!NOTE] 164 | > `--events` and `--evtx` are strictly equivalent but `--events` make more sense with non-EVTX logs. 165 | 166 | ### Sysmon for Linux logs 167 | 168 | Sysmon for linux has been released in October 2021. It outputs XML in text format with one event per-line. As of version 2.6.0, **Zircolite** support of Sysmon for Linux log files. You just have to add `-S`, `--sysmon4linux`, `--sysmon-linux`, `--sysmon-linux-input` to your command line : 169 | 170 | ```shell 171 | python3 zircolite.py --events sysmon.log --ruleset rules/rules_linux.json --sysmon-linux 172 | ``` 173 | 174 | > [!NOTE] 175 | > Since the logs come from Linux, the default file extension when using `-S` case is `.log` 176 | 177 | ### JSONL/NDJSON logs 178 | 179 | JSONL/NDJSON logs have one event log per line, they look like this : 180 | 181 | ```json 182 | {"EventID": "4688", "EventRecordID": "1", ...} 183 | {"EventID": "4688", "EventRecordID": "2", ...} 184 | ... 185 | ``` 186 | 187 | It is possible to use Zircolite directly on JSONL/NDJSON files (NXLog files) with the `-j`, `--jsonl`, `--jsononly` or `--json-input` options : 188 | 189 | ```shell 190 | python3 zircolite.py --events --ruleset --jsonl 191 | ``` 192 | 193 | A simple use case is when you have already run Zircolite and use the `--keeptmp` option. Since it keeps all the converted EVTX in a temp directory, if you need to re-execute Zircolite, you can do it directly using this directory as the EVTX source (with `--evtx ` and `--jsononly`) and avoid to convert the EVTX again. 194 | 195 | ### JSON Array / Full JSON object 196 | 197 | Some logs will be provided in JSON format as an array : 198 | 199 | ```json 200 | [ 201 | {"EventID": "4688", "EventRecordID": "1", ...}, 202 | {"EventID": "4688", "EventRecordID": "2", ...}, 203 | ... ] 204 | ``` 205 | 206 | To handle these logs you will need to use the `--jsonarray`, `--json-array` or `--json-array-input` options : 207 | 208 | ```shell 209 | python3 zircolite.py --events --ruleset --json-array-input 210 | ``` 211 | 212 | ### CSV 213 | 214 | It is possible to use Zircolite directly on CSV logs **if the CSV are correctly formatted**. The field names must appear on the first line : 215 | 216 | ```csv 217 | EventID,EventRecordID,Computer,SubjectUserSid,... 218 | 4624,32421,xxxx.DOMAIN.local,S-1-5-18,xxxx,DOMAIN,... 219 | ... 220 | ``` 221 | 222 | To handle these logs you will need to use the `--csv-input` options (**Do not use `--csv`** !): 223 | 224 | ```shell 225 | python3 zircolite.py --events --ruleset --csv-input 226 | ``` 227 | 228 | ### SQLite database files 229 | 230 | Since everything in Zircolite is stored in a in-memory SQlite database, you can choose to save the database on disk for later use. It is possible with the option `--dbfile `. 231 | 232 | ```shell 233 | python3 zircolite.py --evtx --ruleset \ 234 | --dbfile output.db 235 | ``` 236 | 237 | If you need to re-execute Zircolite, you can do it directly using the SQLite database as the EVTX source (with `--evtx ` and `--dbonly`) and avoid to convert the EVTX, post-process the EVTX and insert data to database. **Using this technique can save a lot of time** 238 | 239 | ## Rulesets / Rules 240 | 241 | Zircolite has his own rulesets format (JSON). Default rulesets are available in the [rules](https://github.com/wagga40/Zircolite/tree/master/rules/) directory or in the [Zircolite-Rules](https://github.com/wagga40/Zircolite-Rules) repository. 242 | 243 | Since version 2.20.0, Zircolite can directly use native Sigma rules by converting them with [pySigma](https://github.com/SigmaHQ/pySigma). Zircolite will detect whether the provided rules are in JSON or YAML format and will automatically convert the rules in the latter case : 244 | 245 | ```bash 246 | # Simple rule 247 | python3 zircolite.py -e sample.evtx -r schtasks.yml 248 | 249 | # Directory 250 | python3 zircolite.py -e sample.evtx -r ./sigma/rules/windows/process_creation 251 | 252 | ``` 253 | ### Using multiple rules/rulesets 254 | 255 | It is possible to use multiple rulesets by chaining or repeating with the `-r`or `--ruleset` arguments : 256 | 257 | ```bash 258 | # Simple rule 259 | python3 zircolite.py -e sample.evtx -r schtasks.yml -r ./sigma/rules/windows/process_creation 260 | 261 | ``` 262 | 263 | By default, the detection results are displayed by ruleset, it is possible to group the results with `-cr` or `--combine-rulesets`. In this case only one list will be displayed. 264 | 265 | ## Pipelines 266 | 267 | By default, Zircolite does not use any pySigma pipelines, which can be somewhat limiting. However, it is possible to use the default pySigma pipelines. 268 | 269 | ### Install and list pipelines 270 | 271 | However, they must be installed before check [pySigma docs](https://github.com/SigmaHQ) for that, but it is generaly as simple as : 272 | 273 | - `pip3 install pysigma-pipeline-nameofpipeline` 274 | - `poetry add pysigma-pipeline-nameofpipeline` 275 | 276 | Installed pipelines can be listed with : 277 | 278 | - `python3 zircolite_dev.py -pl` 279 | - `python3 zircolite_dev.py --pipeline-list` 280 | 281 | ### Use pipelines 282 | 283 | To use pipelines, employ the -p or --pipelines arguments; multiple pipelines are supported. The usage closely mirrors that of **Sigma-cli**. 284 | 285 | Example : 286 | 287 | ```bash 288 | python3 zircolite.py -e sample.evtx -r schtasks.yml -p sysmon -p windows-logsources 289 | ``` 290 | 291 | The converted rules/rulesets can be saved by using the `-sr` or the `--save-ruleset` arguments. 292 | 293 | > [!NOTE] 294 | > When using multiple native Sigma rule/rulesets, you cannot differenciate pipelines. All the pipelines will be used in the conversion process. 295 | 296 | ## Field mappings, field exclusions, value exclusions, field aliases and field splitting 297 | 298 | If your logs require transformations to align with your rules, Zircolite offers several mechanisms for this purpose. You can configure these mechanisms using a file located in the [config](https://github.com/wagga40/Zircolite/tree/master/config/) directory of the repository. Additionally, you have the option to use your own configuration by utilizing the `--config` or `-c` options. 299 | 300 | The configuration file has the following structure : 301 | 302 | ```json 303 | { 304 | "exclusions" : [], 305 | "useless" : [], 306 | "mappings" : 307 | { 308 | "field_name_1": "new_field_name_1", 309 | "field_name_2": "new_field_name_2" 310 | }, 311 | "alias": 312 | { 313 | "field_alias_1": "alias_1" 314 | }, 315 | "split": 316 | { 317 | "field_name_split": {"separator":",", "equal":"="} 318 | } 319 | } 320 | ``` 321 | 322 | ### Field mappings 323 | 324 | **Field mappings** enable you to rename a field from your logs. Zircolite leverages this mechanism extensively to rename nested JSON fields. You can view all the built-in field mappings [here](https://github.com/wagga40/Zircolite/blob/master/config/fieldMappings.json). 325 | 326 | For instance, to rename the "CommandLine" field in **your raw logs** to "cmdline", you can add the following entry to the [fieldMappings.json](https://github.com/wagga40/Zircolite/blob/master/config/fieldMappings.json) file: 327 | 328 | ```json 329 | { 330 | "exclusions" : [], 331 | "useless" : [], 332 | "mappings" : 333 | { 334 | "CommandLine": "cmdline" 335 | }, 336 | "alias":{}, 337 | "split": {} 338 | } 339 | ``` 340 | 341 | Please keep in mind that as opposed to field alias, the original field name is not kept. 342 | 343 | ### Field exclusions 344 | 345 | **field exclusions** allow you to exclude a field. Zircolite already uses this mechanism to exclude the `xlmns` field. You can check all the builtin field exclusions [here](https://github.com/wagga40/Zircolite/blob/master/config/fieldMappings.json). 346 | 347 | ### Value exclusions 348 | 349 | **value exclusions** allow you to remove field which value is to be excluded. Zircolite already uses this mechanism to remove *null* and empty values. You can check all the builtin value exclusions [here](https://github.com/wagga40/Zircolite/blob/master/config/fieldMappings.json). 350 | 351 | ### Field aliases 352 | 353 | **field aliases** allow you to have multiple fields with different name but the same value. It is pretty similar to field mapping but you keep the original value. Field aliases can be used on original field names but also on mapped field names and splitted fields. 354 | 355 | Let's say you have this event log in JSON format (the event has been deliberately truncated): 356 | 357 | ```json 358 | { 359 | "EventID": 1, 360 | "Provider_Name": "Microsoft-Windows-Sysmon", 361 | "Channel": "Microsoft-Windows-Sysmon/Operational", 362 | "CommandLine": "\"C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe\"", 363 | "Image": "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe", 364 | "IntegrityLevel": "Medium", 365 | } 366 | ``` 367 | 368 | Let's say you are not sure all your rules use the "CommandLine" field but you remember that some of them use the "cmdline" field. To avoid any problems you could use an alias for the "CommandLine" field like this : 369 | 370 | ```json 371 | { 372 | "exclusions" : [], 373 | "useless" : [], 374 | "mappings" : {}, 375 | "alias":{ 376 | "CommandLine": "cmdline" 377 | }, 378 | "split": {} 379 | } 380 | ``` 381 | 382 | With this configuration, the event log used to apply Sigma rules will look like this : 383 | 384 | ```json 385 | { 386 | "EventID": 1, 387 | "Provider_Name": "Microsoft-Windows-Sysmon", 388 | "Channel": "Microsoft-Windows-Sysmon/Operational", 389 | "CommandLine": "\"C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe\"", 390 | "cmdline": "\"C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe\"", 391 | "Image": "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe", 392 | "IntegrityLevel": "Medium", 393 | } 394 | ``` 395 | 396 | Be careful when using aliases because the data is stored multiple times. 397 | 398 | ### Field splitting 399 | 400 | **field aliases** allow you to split fields that contain key,value pairs. Zircolite already uses this mechanism to handle hash/hashes fields in Sysmon logs. You can check all the builtin field splittings [here](https://github.com/wagga40/Zircolite/blob/master/config/fieldMappings.json). Moreover, Field aliases can be applied to splitted fields. 401 | 402 | For example, let's say we have this Sysmon event log : 403 | 404 | ```json 405 | { 406 | "Hashes": "SHA1=XX,MD5=X,SHA256=XXX,IMPHASH=XXXX", 407 | "EventID": 1 408 | } 409 | ``` 410 | 411 | With the following configuration, Zircolite will split the `hashes` field like this : 412 | 413 | ```json 414 | { 415 | "exclusions" : [], 416 | "useless" : [], 417 | "mappings" : {}, 418 | "alias":{}, 419 | "split": { 420 | "Hashes": {"separator":",", "equal":"="} 421 | } 422 | } 423 | ``` 424 | 425 | The final event log used to apply Sigma rules will look like this : 426 | 427 | ```json 428 | { 429 | "SHA1": "x", 430 | "MD5": "x", 431 | "SHA256": "x", 432 | "IMPHASH": "x", 433 | "Hashes": "SHA1=x,MD5=x,SHA256=x,IMPHASH=x", 434 | "EventID": 1 435 | } 436 | ``` 437 | 438 | ## Field Transforms 439 | 440 | ### What Are Transforms? 441 | 442 | Transforms in Zircolite are custom functions that manipulate the value of a specific field during the event flattening process. They allow you to: 443 | 444 | - Format or normalize data 445 | - Enrich events with additional computed fields 446 | - Decode encoded data (e.g., Base64, hexadecimal) 447 | - Extract information using regular expressions 448 | 449 | By using transforms, you can preprocess event data to make it more suitable for detection rules and analysis. 450 | 451 | ### Enabling Transforms 452 | 453 | Transforms are configured in the config file (the default one is in `config/fieldMappings.json`) under the `"transforms"` section. To enable transforms, set the `"transforms_enabled"` flag to `true` in your configuration file: 454 | 455 | ```json 456 | { 457 | "transforms_enabled": true, 458 | "transforms": { 459 | // Transform definitions 460 | } 461 | } 462 | ``` 463 | 464 | ### Configuring Transforms 465 | 466 | Transforms are defined in the `"transforms"` section of the configuration file. Each transform is associated with a specific field and consists of several properties. 467 | 468 | ### Transform Structure 469 | 470 | A transform definition has the following structure: 471 | 472 | - **Field Name**: The name of the field to which the transform applies. 473 | - **Transform List**: A list of transform objects for the field. 474 | 475 | Each transform object contains: 476 | 477 | - **info**: A description of what the transform does. 478 | - **type**: The type of the transform (currently only `"python"` is supported). 479 | - **code**: The Python code that performs the transformation. 480 | - **alias**: A boolean indicating whether the result should be stored in a new field. 481 | - **alias_name**: The name of the new field if `alias` is `true`. 482 | - **source_condition**: A list specifying when the transform should be applied based on the input type (e.g., `["evtx_input", "json_input"]`). 483 | - **enabled**: A boolean indicating whether the transform is active. 484 | 485 | #### Source conditions possible values 486 | 487 | | Sets `source_condition` Value | 488 | |-------------------------------| 489 | | `"json_input"` | 490 | | `"json_array_input"` | 491 | | `"db_input"` | 492 | | `"sysmon_linux_input"` | 493 | | `"auditd_input"` | 494 | | `"xml_input"` | 495 | | `"evtxtract_input"` | 496 | | `"csv_input"` | 497 | | `"evtx_input"` | 498 | 499 | #### Example Transform Object 500 | 501 | ```json 502 | { 503 | "info": "Base64 decoded CommandLine", 504 | "type": "python", 505 | "code": "def transform(param):\n # Transformation logic\n return transformed_value", 506 | "alias": true, 507 | "alias_name": "CommandLine_b64decoded", 508 | "source_condition": ["evtx_input", "json_input"], 509 | "enabled": true 510 | } 511 | ``` 512 | 513 | ### Available Fields 514 | 515 | You can define transforms for any field present in your event data. In the configuration, transforms are keyed by the field name: 516 | 517 | ```json 518 | "transforms": { 519 | "CommandLine": [ 520 | { 521 | // Transform object 522 | } 523 | ], 524 | "Payload": [ 525 | { 526 | // Transform object 527 | } 528 | ] 529 | } 530 | ``` 531 | 532 | --- 533 | 534 | ### Writing Transform Functions 535 | 536 | Zircolite uses `RestrictedPython` to safely execute transform functions. This means that certain built-in functions and modules are available, while others are restricted. 537 | The function must be named `transform` and accept a single parameter `param`, which is the original value of the field. 538 | 539 | **Available Modules and Functions:** 540 | 541 | - **Built-in Functions**: A limited set of Python built-in functions, such as `len`, `int`, `str`, etc. 542 | - **Modules**: You can import `re` for regular expressions, `base64` for encoding/decoding, and `chardet` for character encoding detection. 543 | 544 | **Unavailable Features:** 545 | 546 | - Access to file I/O, network, or system calls is prohibited. 547 | - Use of certain built-in functions that can affect the system is restricted. 548 | 549 | #### Example Transform Functions 550 | 551 | ##### Base64 Decoding 552 | 553 | ```python 554 | def transform(param): 555 | import base64 556 | decoded = base64.b64decode(param) 557 | return decoded.decode('utf-8') 558 | ``` 559 | 560 | ##### Hexadecimal to ASCII Conversion 561 | 562 | ```python 563 | def transform(param): 564 | decoded = bytes.fromhex(param).decode('ascii') 565 | return decoded.replace('\x00', ' ') 566 | ``` 567 | 568 | ### Applying Transforms 569 | 570 | Transforms are automatically applied during the event flattening process if: 571 | 572 | - They are **enabled** (`"enabled": true`). 573 | - The current input type matches the **source condition** (`"source_condition": [...]`). 574 | 575 | For each event, Zircolite checks if any transforms are defined for the fields present in the event. If so, it executes the transform function and replaces the field's value with the transformed value or stores it in a new field if `alias` is `true`. 576 | 577 | ### Example 578 | 579 | **Use Case**: Convert hexadecimal-encoded command lines in Auditd logs to readable ASCII strings. 580 | 581 | **Configuration:** 582 | 583 | ```json 584 | "proctitle": [ 585 | { 586 | "info": "Proctitle HEX to ASCII", 587 | "type": "python", 588 | "code": "def transform(param):\n return bytes.fromhex(param).decode('ascii').replace('\\x00', ' ')", 589 | "alias": false, 590 | "alias_name": "", 591 | "source_condition": ["auditd_input"], 592 | "enabled": true 593 | } 594 | ] 595 | ``` 596 | 597 | **Explanation:** 598 | 599 | - **Field**: `proctitle` 600 | - **Function**: Converts hexadecimal strings to ASCII and replaces null bytes with spaces. 601 | - **Alias**: `false` (the original `proctitle` field is replaced). 602 | 603 | ### Best Practices 604 | 605 | - **Test Your Transforms**: Before enabling a transform, ensure that the code works correctly with sample data. 606 | - **Use Aliases Wisely**: If you don't want to overwrite the original field, set `"alias": true` and provide an `"alias_name"`. 607 | - **Manage Performance**: Complex transforms can impact performance. Optimize your code and only enable necessary transforms. 608 | - **Keep Transforms Specific**: Tailor transforms to specific fields and input types using `"source_condition"` to avoid unexpected behavior. 609 | 610 | ## Generate your own rulesets 611 | 612 | Default rulesets are already provided in the `rules` directory. These rulesets only are the conversion of the rules located in [rules/windows](https://github.com/SigmaHQ/sigma/tree/master/rules/windows) directory of the Sigma repository. These rulesets are provided to use Zircolite out-of-the-box but [you should generate your own rulesets](#why-you-should-build-your-own-rulesets). 613 | 614 | **As of v2.9.5, Zircolite can auto-update its default rulesets using the `-U` or `--update-rules`. There is an auto-updated rulesets repository available [here](https://github.com/wagga40/Zircolite-Rules).** 615 | 616 | ### Generate rulesets using PySigma 617 | 618 | #### Using [Pdm](https://pdm-project.org/latest/) or [Poetry](https://python-poetry.org) 619 | 620 | ```shell 621 | # INSTALL 622 | git clone https://github.com/SigmaHQ/sigma.git 623 | cd sigma 624 | pdm init -n 625 | pdm add pysigma pip sigma-cli pysigma-pipeline-sysmon pysigma-pipeline-windows pysigma-backend-sqlite 626 | 627 | # GENERATE RULESET (SYSMON) 628 | pdm run sigma convert -t sqlite -f zircolite -p sysmon -p windows-logsources sigma/rules/windows/ -s -o rules.json 629 | # GENERATE RULESET (GENERIC / NO SYSMON) 630 | pdm run sigma convert -t sqlite -f zircolite -p windows-audit -p windows-logsources sigma/rules/windows/ -s -o rules.json 631 | 632 | ``` 633 | 634 | In the last line : 635 | 636 | - `-t` is the backend type (SQlite) 637 | - `-f` is the format, here "zircolite" means the ruleset will be generated in the format used by Zircolite 638 | - `-p` option is the pipeline used, in the given example we use two pipelines 639 | - `-s` to continue on error (e.g when there are not supported rules) 640 | - `-o` allow to specify the output file 641 | 642 | If you want to use *poetry*, just replace the "pdm" command in the above example by "poetry". 643 | 644 | ### Generate rulesets using sigmatools [**DEPRECATED**] 645 | 646 | [**DEPRECATED**] Zircolite use the SIGMA rules in JSON format. Since the SQLite backend is not yet available in pySigma, you need to generate your ruleset with the official [legacy-sigmatools](https://github.com/SigmaHQ/legacy-sigmatools) (**version 0.21 minimum**) : 647 | 648 | ```shell 649 | pip3 install sigmatools 650 | ``` 651 | 652 | [**DEPRECATED**] since you need to access the configuration files directly it is easier to also clone the repository : 653 | 654 | ```shell 655 | git clone https://github.com/SigmaHQ/legacy-sigmatools.git 656 | cd legacy-sigmatools 657 | ``` 658 | 659 | #### [**DEPRECATED**] Sysmon rulesets (when investigated endpoints have Sysmon logs) 660 | 661 | ```shell 662 | sigmac \ 663 | -t sqlite \ 664 | -c tools/config/generic/sysmon.yml \ 665 | -c tools/config/generic/powershell.yml \ 666 | -c tools/config/zircolite.yml \ 667 | -d rules/windows/ \ 668 | --output-fields title,id,description,author,tags,level,falsepositives,filename,status \ 669 | --output-format json \ 670 | -r \ 671 | -o rules_sysmon.json \ 672 | --backend-option table=logs 673 | ``` 674 | 675 | Where : 676 | 677 | - `-t` is the backend type (SQlite) 678 | - `-c` options are the backend configurations from the official repository 679 | - `-r` option is used to convert an entire directory (don't forget to remove if it is a single rule conversion) 680 | - `-o` option is used to provide the output filename 681 | - `--backend-option` is used to specify the SQLite table name (leave as is) 682 | 683 | #### [**DEPRECATED**] Generic rulesets (when investigated endpoints _don't_ have Sysmon logs) [**DEPRECATED**] 684 | 685 | ```shell 686 | sigmac \ 687 | -t sqlite \ 688 | -c tools/config/generic/windows-audit.yml \ 689 | -c tools/config/generic/powershell.yml \ 690 | -c tools/config/zircolite.yml \ 691 | -d rules/windows/ \ 692 | --output-fields title,id,description,author,tags,level,falsepositives,filename,status \ 693 | --output-format json \ 694 | -r \ 695 | -o rules_generic.json \ 696 | --backend-option table=logs 697 | ``` 698 | 699 | ### Why you should build your own rulesets 700 | 701 | The default rulesets provided are the conversion of the rules located in `rules/windows` directory of the Sigma repository. You should take into account that : 702 | 703 | - **Some rules are very noisy or produce a lot of false positives** depending on your environment or the config file you used with genRules 704 | - **Some rules can be very slow** depending on your logs 705 | 706 | For example : 707 | 708 | - "Suspicious Eventlog Clear or Configuration Using Wevtutil" : **very noisy** on fresh environment (labs etc.), commonly generate a lot of useless detections 709 | - Notepad Making Network Connection : **can slow very significantly** the execution of Zircolite 710 | 711 | ## Docker 712 | 713 | Zircolite is also packaged as a Docker image (cf. [wagga40/zircolite](https://hub.docker.com/r/wagga40/zircolite) on Docker Hub), which embeds all dependencies (e.g. `evtx_dump`) and provides a platform-independant way of using the tool. Please note this image is not updated with the last rulesets ! 714 | 715 | You can pull the last image with : `docker pull wagga40/zircolite:latest` 716 | 717 | ### Build and run your own image 718 | 719 | ```shell 720 | docker build . -t 721 | docker container run --tty \ 722 | --volume :/case 723 | wagga40/zircolite:latest \ 724 | --ruleset rules/rules_windows_sysmon_pysigma.json \ 725 | --events /case \ 726 | --outfile /case/detected_events.json 727 | ``` 728 | 729 | This will recursively find log files in the `/case` directory of the container (which is bound to the `/path/to/evtx` of the host filesystem) and write the detection events to the `/case/detected_events.json` (which finally corresponds to `/path/to/evtx/detected_events.json`). The given example uses the internal rulesets, if you want to use your own, place them in the same directory as the logs : 730 | 731 | ```shell 732 | docker container run --tty \ 733 | --volume :/case 734 | wagga40/zircolite:latest \ 735 | --ruleset /case/my_ruleset.json \ 736 | --events /case/my_logs.evtx \ 737 | --outfile /case/detected_events.json 738 | ``` 739 | 740 | Event if Zircolite does not alter the original log files, sometimes you want to make sure that nothing will write to the original files. For these cases, you can use a read-only bind mount with the following command: 741 | 742 | ```shell 743 | docker run --rm --tty \ 744 | -v :/case/input:ro \ 745 | -v :/case/output \ 746 | wagga40/zircolite:latest \ 747 | --ruleset rules/rules_windows_sysmon_pysigma.json \ 748 | --events /case/input \ 749 | -o /case/output/detected_events.json 750 | ``` 751 | 752 | ### Docker Hub 753 | 754 | You can use the Docker image available on [Docker Hub](https://hub.docker.com/r/wagga40/zircolite). Please note that in this case, the configuration files and rules are the default ones. 755 | 756 | ```shell 757 | docker container run --tty \ 758 | --volume :/case docker.io/wagga40/zircolite:lastest \ 759 | --ruleset rules/rules_windows_sysmon_pysigma.json \ 760 | --evtx /case --outfile /case/detected_events.json 761 | ``` 762 | -------------------------------------------------------------------------------- /docs/Zircolite_manual.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/docs/Zircolite_manual.pdf -------------------------------------------------------------------------------- /docs/_sidebar.md: -------------------------------------------------------------------------------- 1 | * Usage 2 | * [Requirements and Installation](Usage.md#requirements-and-installation) 3 | * [Basic usage](Usage.md#basic-usage) 4 | * [Rulesets / Rules](Usage.md#rulesets--rules) 5 | * [Pipelines](Usage.md#pipelines) 6 | * [Field mappings, field exclusions, value exclusions, field aliases and field splitting](Usage.md#field-mappings-field-exclusions-value-exclusions-field-aliases-and-field-splitting) 7 | * [Field Transforms](Usage.md#field-transforms) 8 | * [Generate your own rulesets](Usage.md#generate-your-own-rulesets) 9 | * [Docker](Usage.md#docker) 10 | 11 | * Advanced use 12 | * [Working with large datasets](Advanced.md#working-with-large-datasets) 13 | * [Keep data used by Zircolite](Advanced.md#keep-data-used-by-zircolite) 14 | * [Filtering](Advanced.md#filtering) 15 | * [Forwarding detected events](Advanced.md#forwarding-detected-events) 16 | * [Templating and Formatting](Advanced.md#templating-and-formatting) 17 | * [Mini GUI](Advanced.md#mini-gui) 18 | * [Packaging Zircolite](Advanced.md#packaging-zircolite) 19 | 20 | * Internals 21 | * [Zircolite architecture](Internals.md#zircolite-architecture) 22 | * [Project structure](Internals.md#project-structure) 23 | -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Document 6 | 7 | 8 | 9 | 10 | 11 | 12 |
13 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /docs/pics/Zircolite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/docs/pics/Zircolite.png -------------------------------------------------------------------------------- /docs/pics/gui.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/docs/pics/gui.jpg -------------------------------------------------------------------------------- /gui/zircogui.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/gui/zircogui.zip -------------------------------------------------------------------------------- /pics/Zircolite.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/Zircolite.gif -------------------------------------------------------------------------------- /pics/Zircolite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/Zircolite.png -------------------------------------------------------------------------------- /pics/Zircolite.svg: -------------------------------------------------------------------------------- 1 | bash-5.1$bash-5.1$pdmrunpython3zircolite.py--evtxsamples.evtx--rulesetrules/rules_windows_sysmon.json███████╗██╗██████╗██████╗██████╗██╗██╗████████╗███████╗╚══███╔╝██║██╔══██╗██╔════╝██╔═══██╗██║██║╚══██╔══╝██╔════╝███╔╝██║██████╔╝██║██║██║██║██║██║█████╗███╔╝██║██╔══██╗██║██║██║██║██║██║██╔══╝███████╗██║██║██║╚██████╗╚██████╔╝███████╗██║██║███████╗╚══════╝╚═╝╚═╝╚═╝╚═════╝╚═════╝╚══════╝╚═╝╚═╝╚══════╝-=StandaloneSIGMADetectiontoolforEVTX=-[+]Checkingprerequisites[+]ExtractingEVTXUsing'tmp-1516UZEB'directory0%||0/1[00:00<?,?it/s]100%|████████████████████████████████████████████████████████████████████████████|1/1[00:00<00:00,15.19it/s][+]ProcessingEVTX100%|████████████████████████████████████████████████████████████████████████████|1/1[00:00<00:00,2.01it/s][+]Creatingmodel[+]Insertingdata100%|███████████████████████████████████████████████████████████████████|7346/7346[00:00<00:00,16387.59it/s][+]Cleaningunusedobjects[+]Loadingrulesetfrom:rules/rules_windows_sysmon.json[+]Executingruleset-1212rules24%|████████████████▎|287/1212[00:00<00:00,1559.79it/s]-WindowsDefenderThreatDetectionDisabled[high]:2events-BypassUACUsingDelegateExecute[high]:1events37%|█████████████████████████▎|444/1212[00:00<00:00,1040.48it/s]-KoadicExecution[high]:9events46%|████████████████████████████████|563/1212[00:00<00:00,1032.53it/s]-WhoamiExecution[medium]:2events-RegistryDumpofSAMCredsandSecrets[high]:12events-ScheduledTaskCreation[low]:1events56%|██████████████████████████████████████▍|676/1212[00:00<00:00,1038.15it/s]-LocalAccountsDiscovery[low]:2events65%|█████████████████████████████████████████████▍|786/1212[00:00<00:00,931.84it/s]-MSHTASpawningWindowsShell[high]:6events74%|███████████████████████████████████████████████████▉|899/1212[00:00<00:00,984.45it/s]-SuspiciousMSHTAProcessPatterns[high]:2events-NotepadMakingNetworkConnection[high]:5events100%|████████████████████████████████████████████████████████████████████|1212/1212[00:00<00:00,1235.79it/s][+]Resultswrittenin:detected_events.json[+]CleaningFinishedin2seconds100%|████████████████████████████████████████████████████████████████████████████|1/1[00:00<00:00,2.02it/s]0%||0/7346[00:00<?,?it/s]25%|████████████████▋|1827/7346[00:00<00:00,18267.49it/s]50%|█████████████████████████████████▌|3679/7346[00:00<00:00,18411.38it/s]75%|██████████████████████████████████████████████████▎|5521/7346[00:00<00:00,16607.59it/s]98%|█████████████████████████████████████████████████████████████████▋|7201/7346[00:00<00:00,15885.11it/s]0%||0/1212[00:00<?,?it/s]6%|███▉|68/1212[00:00<00:01,674.49it/s]exit -------------------------------------------------------------------------------- /pics/Zircolite_v2.9.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/Zircolite_v2.9.gif -------------------------------------------------------------------------------- /pics/gui-matrix.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/gui-matrix.webp -------------------------------------------------------------------------------- /pics/gui-timeline.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/gui-timeline.webp -------------------------------------------------------------------------------- /pics/gui.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/gui.jpg -------------------------------------------------------------------------------- /pics/gui.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/gui.webp -------------------------------------------------------------------------------- /pics/zircolite_200.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/zircolite_200.png -------------------------------------------------------------------------------- /pics/zircolite_400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/zircolite_400.png -------------------------------------------------------------------------------- /pics/zircolite_600.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/zircolite_600.png -------------------------------------------------------------------------------- /pics/zircolite_800.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wagga40/Zircolite/edfc67c866bf0808a90dcca926ee38795e043e15/pics/zircolite_800.png -------------------------------------------------------------------------------- /requirements.full.txt: -------------------------------------------------------------------------------- 1 | orjson>=3.9.15 2 | xxhash 3 | colorama>=0.4.4 4 | tqdm>=4.58.0 5 | requests>=2.24.0 6 | jinja2>=2.11.3 7 | evtx>=0.6.2 8 | lxml 9 | pysigma>=0.10.10 10 | pysigma-pipeline-sysmon>=1.0.3 11 | pysigma-pipeline-windows>=1.1.1 12 | pysigma-backend-sqlite>=0.1.1 13 | pyyaml 14 | chardet 15 | RestrictedPython -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | orjson>=3.9.15 2 | xxhash 3 | colorama>=0.4.4 4 | tqdm>=4.58.0 5 | chardet 6 | RestrictedPython 7 | -------------------------------------------------------------------------------- /rules/README.md: -------------------------------------------------------------------------------- 1 | # Rulesets 2 | 3 | ## Default rulesets 4 | 5 | These rulesets have been generated with `sigmac` wich is available in the [official sigma repository](https://github.com/SigmaHQ/sigma). 6 | 7 | :warning: **These rulesets are given "as is" to help new analysts to discover SIGMA and Zircolite. They are not filtered for slow rules, rules with a lot of false positives etc. If you know what you do, you SHOULD generate your own rulesets.** 8 | 9 | - `rules_windows_generic_full.json` : Full SIGMA ruleset from the "**Windows**" directory of the official repository (no SYSMON rewriting) 10 | - `rules_windows_generic_high.json` : Only level high and above SIGMA rules from the "**Windows**" directory of the official repository (no SYSMON rewriting) 11 | - `rules_windows_generic_medium.json` : Only level medium and above SIGMA rules from the "**Windows**" directory of the official repository (no SYSMON rewriting) 12 | - `rules_windows_generic.json` : Same file as `rules_windows_generic_high.json` 13 | - `rules_windows_sysmon_full.json` : Full SIGMA ruleset from the "**Windows**" directory of the official repository (SYSMON) 14 | - `rules_windows_sysmon_high.json` : Only level high and above SIGMA rules from the "**Windows**" directory of the official repository (SYSMON) 15 | - `rules_windows_sysmon_medium.json` : Only level medium and above SIGMA rules from the "**Windows**" directory of the official repository (SYSMON) 16 | - `rules_windows_sysmon.json` : Same file as `rules_windows_sysmon_high.json` 17 | - `rules_linux.json`: Full SIGMA ruleset from the "**linux**" directory of the official repository. This ruleset can be used with Auditd and Sysmon for Linux logs. 18 | 19 | **As of v2.9.5, Zircolite can auto-update its default rulesets using the `-U` or `--update-rules`. There is an auto-updated rulesets repository available [here](https://github.com/wagga40/Zircolite-Rules).** 20 | 21 | ## Why you should make your own rulesets 22 | 23 | The default rulesets provided are the conversion of the rules located in `rules/windows` directory of the Sigma repository. You should take into account that : 24 | 25 | - **Some rules are very noisy or produce a lot of false positives** depending on your environment or the config file you used with genRules 26 | - **Some rules can be very slow** depending on your logs 27 | 28 | To generate you own ruleset please check the docs [here](https://github.com/wagga40/Zircolite/tree/master/docs). 29 | 30 | For example : 31 | 32 | - "Suspicious Eventlog Clear or Configuration Using Wevtutil" : **very noisy** on fresh environment (labs etc.), commonly generate a lot of useless detection 33 | - "Notepad Making Network Connection" : **can slow very significantly** the execution of Zircolite 34 | - "Rundll32 Internet Connection" : can be **very noisy** in some situations 35 | - "Wuauclt Network Connection" : **can slow very significantly** the execution of Zircolite 36 | - "PowerShell Network Connections : **can slow very significantly** the execution of Zircolite -------------------------------------------------------------------------------- /templates/exportForELK.tmpl: -------------------------------------------------------------------------------- 1 | {% for elem in data %}{% for match in elem["matches"] %}{"title":{{ elem["title"]|tojson }},"id":{{ elem["id"]|tojson }},"description":{{ elem["description"]|tojson }},{% for key, value in match.items() %}"{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %}} 2 | {% endfor %}{% endfor %} 3 | -------------------------------------------------------------------------------- /templates/exportForSplunk.tmpl: -------------------------------------------------------------------------------- 1 | {% for elem in data %}{% for match in elem["matches"] %}{"title":{{ elem["title"]|tojson }},"description":{{ elem["description"]|tojson }},{% for key, value in match.items() %}"{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %}} 2 | {% endfor %}{% endfor %} 3 | -------------------------------------------------------------------------------- /templates/exportForSplunkWithRuleID.tmpl: -------------------------------------------------------------------------------- 1 | {% for elem in data %}{% for match in elem["matches"] %}{"title":{{ elem["title"]|tojson }},"id":{{ elem["id"]|tojson }},"description":{{ elem["description"]|tojson }},{% for key, value in match.items() %}"{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %}} 2 | {% endfor %}{% endfor %} 3 | -------------------------------------------------------------------------------- /templates/exportForTimesketch.tmpl: -------------------------------------------------------------------------------- 1 | {% for elem in data %}{% for match in elem["matches"] %}{"title":{{ elem["title"]|tojson }},"id":{{ elem["id"]|tojson }},"description":{{ elem["description"]|tojson }},"message":{{ elem["title"]|tojson }},"timestamp_desc":{{ elem["rule_level"]|tojson }},{% for key, value in match.items() %}"{{ key }}":{{ value|tojson }}{%if key == "SystemTime"%},"datetime":{{ value|tojson }}{% endif %}{{ "," if not loop.last }}{% endfor %}} 2 | {% endfor %}{% endfor %} -------------------------------------------------------------------------------- /templates/exportForZinc.tmpl: -------------------------------------------------------------------------------- 1 | {% for elem in data %}{% for match in elem["matches"] %}{ "index" : { "_index" : "zircolite" } } 2 | {"title":{{ elem["title"]|tojson }},"id":{{ elem["id"]|tojson }},"description":{{ elem["description"]|tojson }},{% for key, value in match.items() %}"{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %}} 3 | {% endfor %}{% endfor %} 4 | -------------------------------------------------------------------------------- /templates/exportForZircoGui.tmpl: -------------------------------------------------------------------------------- 1 | var ReconnaissanceData = [{% for elem in data %}{% if "attack.reconnaissance" in elem["tags"] %}{% for match in elem["matches"] %} 2 | { 3 | "Rule level":{{ elem["rule_level"]|tojson }}, 4 | "title":{{ elem["title"]|tojson }}, 5 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 6 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 7 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 8 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 9 | ]; 10 | 11 | var ResourceDevelopmentData = [{% for elem in data %}{% if "attack.resource_development" in elem["tags"] %}{% for match in elem["matches"] %} 12 | { 13 | "Rule level":{{ elem["rule_level"]|tojson }}, 14 | "title":{{ elem["title"]|tojson }}, 15 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 16 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 17 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 18 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 19 | ]; 20 | 21 | var InitialAccessData = [{% for elem in data %}{% if "attack.initial_access" in elem["tags"] %}{% for match in elem["matches"] %} 22 | { 23 | "Rule level":{{ elem["rule_level"]|tojson }}, 24 | "title":{{ elem["title"]|tojson }}, 25 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 26 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 27 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 28 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 29 | ]; 30 | 31 | var ExecutionData = [{% for elem in data %}{% if "attack.execution" in elem["tags"] %}{% for match in elem["matches"] %} 32 | { 33 | "Rule level":{{ elem["rule_level"]|tojson }}, 34 | "title":{{ elem["title"]|tojson }}, 35 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 36 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 37 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 38 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 39 | ]; 40 | 41 | var PersistenceData = [{% for elem in data %}{% if "attack.persistence" in elem["tags"] %}{% for match in elem["matches"] %} 42 | { 43 | "Rule level":{{ elem["rule_level"]|tojson }}, 44 | "title":{{ elem["title"]|tojson }}, 45 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 46 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 47 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 48 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 49 | ]; 50 | 51 | var PrivilegeEscalationData = [{% for elem in data %}{% if "attack.privilege_escalation" in elem["tags"] %}{% for match in elem["matches"] %} 52 | { 53 | "Rule level":{{ elem["rule_level"]|tojson }}, 54 | "title":{{ elem["title"]|tojson }}, 55 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 56 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 57 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 58 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{%- endfor %} 59 | ]; 60 | 61 | var DefenseEvasionData = [{% for elem in data %}{% if "attack.defense_evasion" in elem["tags"] %}{% for match in elem["matches"] %} 62 | { 63 | "Rule level":{{ elem["rule_level"]|tojson }}, 64 | "title":{{ elem["title"]|tojson }}, 65 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 66 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 67 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 68 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 69 | ]; 70 | 71 | var CredentialAccessData = [{% for elem in data %}{% if "attack.credential_access" in elem["tags"] %}{% for match in elem["matches"] %} 72 | { 73 | "Rule level":{{ elem["rule_level"]|tojson }}, 74 | "title":{{ elem["title"]|tojson }}, 75 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 76 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 77 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 78 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 79 | ]; 80 | 81 | var DiscoveryData = [{% for elem in data %}{% if "attack.discovery" in elem["tags"] %}{% for match in elem["matches"] %} 82 | { 83 | "Rule level":{{ elem["rule_level"]|tojson }}, 84 | "title":{{ elem["title"]|tojson }}, 85 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 86 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 87 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 88 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 89 | ]; 90 | 91 | var LateralMovementData = [{% for elem in data %}{% if "attack.lateral_movement" in elem["tags"] %}{% for match in elem["matches"] %} 92 | { 93 | "Rule level":{{ elem["rule_level"]|tojson }}, 94 | "title":{{ elem["title"]|tojson }}, 95 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 96 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 97 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 98 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 99 | ]; 100 | 101 | var CollectionData = [{% for elem in data %}{% if "attack.collection" in elem["tags"] %}{% for match in elem["matches"] %} 102 | { 103 | "Rule level":{{ elem["rule_level"]|tojson }}, 104 | "title":{{ elem["title"]|tojson }}, 105 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 106 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 107 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 108 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 109 | ]; 110 | 111 | var CommandAndControlData = [{% for elem in data %}{% if "attack.command_and_control" in elem["tags"] %}{% for match in elem["matches"] %} 112 | { 113 | "Rule level":{{ elem["rule_level"]|tojson }}, 114 | "title":{{ elem["title"]|tojson }}, 115 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 116 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 117 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 118 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 119 | ]; 120 | 121 | var ExfiltrationData = [{% for elem in data %}{% if "attack.exfiltration" in elem["tags"] %}{% for match in elem["matches"] %} 122 | { 123 | "Rule level":{{ elem["rule_level"]|tojson }}, 124 | "title":{{ elem["title"]|tojson }}, 125 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 126 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 127 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 128 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 129 | ]; 130 | 131 | var ImpactData = [{% for elem in data %}{% if "attack.impact" in elem["tags"] %}{% for match in elem["matches"] %} 132 | { 133 | "Rule level":{{ elem["rule_level"]|tojson }}, 134 | "title":{{ elem["title"]|tojson }}, 135 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 136 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 137 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 138 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 139 | ]; 140 | 141 | var OtherData = [{% for elem in data %}{% if elem["tags"]==[] %}{% for match in elem["matches"] %} 142 | { 143 | "Rule level":{{ elem["rule_level"]|tojson }}, 144 | "title":{{ elem["title"]|tojson }}, 145 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 146 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 147 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 148 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 149 | ]; 150 | 151 | var UnknownData = [{% for elem in data %}{% if elem["rule_level"] == "unknown" %}{% for match in elem["matches"] %} 152 | { 153 | "Rule level":{{ elem["rule_level"]|tojson }}, 154 | "title":{{ elem["title"]|tojson }}, 155 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 156 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 157 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 158 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 159 | ]; 160 | 161 | var InformationalData = [{% for elem in data %}{% if elem["rule_level"] == "informational" %}{% for match in elem["matches"] %} 162 | { 163 | "Rule level":{{ elem["rule_level"]|tojson }}, 164 | "title":{{ elem["title"]|tojson }}, 165 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 166 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 167 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 168 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 169 | ]; 170 | 171 | var LowData = [{% for elem in data %}{% if elem["rule_level"] == "low" %}{% for match in elem["matches"] %} 172 | { 173 | "Rule level":{{ elem["rule_level"]|tojson }}, 174 | "title":{{ elem["title"]|tojson }}, 175 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 176 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 177 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 178 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 179 | ]; 180 | 181 | var MediumData = [{% for elem in data %}{% if elem["rule_level"] == "medium" %}{% for match in elem["matches"] %} 182 | { 183 | "Rule level":{{ elem["rule_level"]|tojson }}, 184 | "title":{{ elem["title"]|tojson }}, 185 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 186 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 187 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 188 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 189 | ]; 190 | 191 | var HighData = [{% for elem in data %}{% if elem["rule_level"] == "high" %}{% for match in elem["matches"] %} 192 | { 193 | "Rule level":{{ elem["rule_level"]|tojson }}, 194 | "title":{{ elem["title"]|tojson }}, 195 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 196 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 197 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 198 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 199 | ]; 200 | 201 | var CriticalData = [{% for elem in data %}{% if elem["rule_level"] == "critical" %}{% for match in elem["matches"] %} 202 | { 203 | "Rule level":{{ elem["rule_level"]|tojson }}, 204 | "title":{{ elem["title"]|tojson }}, 205 | "sigma_yml":{{ elem["sigmafile"]|tojson }}, 206 | "description":{{ elem["description"]|tojson }},{% for key, value in match.items() %} 207 | "{{ key }}":{{ value|tojson }}{{ "," if not loop.last }}{% endfor %} 208 | }{{ "," if not loop.last }}{% endfor %}{{ "," if not loop.last }}{% endif %}{% endfor %} 209 | ]; 210 | 211 | var attack = [ 212 | {% for elem in data -%} 213 | {% for tag in elem["tags"] -%} 214 | {% if ".t" in tag -%} 215 | [{{ tag|replace("attack.", "")|upper|tojson }}, {{ elem["title"]|tojson }}], 216 | {% endif -%} 217 | {% endfor -%} 218 | {% endfor -%} 219 | ]; 220 | 221 | var tags = [{% for elem in data %}{% for tag in elem["tags"] %}{% if ".t" in tag %} 222 | {{ tag|replace("attack.", "")|upper|tojson }},{% endif %}{% endfor %}{% endfor %} 223 | ]; 224 | 225 | var timeField = {{timeField|tojson}}; 226 | 227 | var dictData = {}; 228 | dictData["reconnaissance"] = ReconnaissanceData; 229 | dictData["resource_development"] = ResourceDevelopmentData; 230 | dictData["initial_access"] = InitialAccessData; 231 | dictData["execution"] = ExecutionData; 232 | dictData["persistence"] = PersistenceData; 233 | dictData["privilege_escalation"] = PrivilegeEscalationData; 234 | dictData["defense_evasion"] = DefenseEvasionData; 235 | dictData["credential_access"] = CredentialAccessData; 236 | dictData["discovery"] = DiscoveryData; 237 | dictData["lateral_movement"] = LateralMovementData; 238 | dictData["collection"] = CollectionData; 239 | dictData["command_and_control"] = CommandAndControlData; 240 | dictData["exfiltration"] = ExfiltrationData; 241 | dictData["impact"] = ImpactData; 242 | dictData["other"] = OtherData; 243 | dictData["unknown"] = UnknownData; 244 | dictData["informational"] = InformationalData; 245 | dictData["low"] = LowData; 246 | dictData["medium"] = MediumData; 247 | dictData["high"] = HighData; 248 | dictData["critical"] = CriticalData; 249 | --------------------------------------------------------------------------------