├── drakrun
├── __init__.py
├── ipt
│ ├── __init__.py
│ └── ipt_utils.py
├── lib
│ ├── __init__.py
│ ├── libvmi
│ │ ├── __init__.py
│ │ ├── vmi_info.py
│ │ ├── dlls.py
│ │ └── libvmi.py
│ ├── fetch_pdb.py
│ ├── network_info.py
│ ├── install_info.py
│ ├── paths.py
│ ├── drakvuf_cmdline.py
│ ├── xen.py
│ ├── injector.py
│ ├── vmi_profile.py
│ └── s3_storage.py
├── web
│ ├── __init__.py
│ ├── frontend
│ │ ├── src
│ │ │ ├── assets
│ │ │ │ └── logo.png
│ │ │ ├── analysisStatus.js
│ │ │ ├── main.jsx
│ │ │ ├── ProcessBadge.jsx
│ │ │ ├── AnalysisStatusBadge.jsx
│ │ │ ├── AnalysisScreenshotViewer.jsx
│ │ │ ├── MethodFilterPicker.jsx
│ │ │ ├── App.css
│ │ │ ├── AnalysisMetadataTable.jsx
│ │ │ ├── TabSwitcher.jsx
│ │ │ ├── ProcessInfoTable.jsx
│ │ │ ├── AnalysisView.jsx
│ │ │ ├── AnalysisList.jsx
│ │ │ ├── PluginPicker.jsx
│ │ │ ├── ProcessTree.jsx
│ │ │ ├── ProcessTreeView.jsx
│ │ │ ├── AnalysisPendingView.jsx
│ │ │ └── api.js
│ │ ├── index.html
│ │ ├── .gitignore
│ │ ├── README.md
│ │ ├── vite.config.js
│ │ ├── eslint.config.js
│ │ └── package.json
│ ├── app.py
│ └── schema.py
├── analyzer
│ ├── __init__.py
│ ├── postprocessing
│ │ ├── plugins
│ │ │ ├── capa_plugin
│ │ │ │ └── __init__.py
│ │ │ ├── index_logs.py
│ │ │ ├── screenshot_metadata.py
│ │ │ ├── compress_ipt.py
│ │ │ ├── gzip_syscalls.py
│ │ │ ├── get_ttps_info.py
│ │ │ ├── generate_wireshark_key_file.py
│ │ │ ├── build_process_tree.py
│ │ │ ├── plugin_base.py
│ │ │ ├── split_drakmon_log.py
│ │ │ ├── parse_utils.py
│ │ │ ├── generate_report.py
│ │ │ ├── get_socket_info.py
│ │ │ ├── __init__.py
│ │ │ └── get_modified_files_info.py
│ │ ├── __init__.py
│ │ └── postprocess.py
│ ├── file_metadata.py
│ ├── post_restore.py
│ ├── startup_command.py
│ ├── run_tools.py
│ ├── analysis_options.py
│ └── screenshotter.py
├── data
│ ├── syscalls.txt
│ ├── config.toml
│ ├── vm-post-restore.ps1
│ └── cfg.template
├── tools
│ ├── drakshell
│ │ ├── obj
│ │ │ └── .gitkeep
│ │ ├── .gitignore
│ │ ├── linker.ld
│ │ ├── Makefile
│ │ └── thread_start.S
│ ├── ipt
│ │ ├── .clang-format
│ │ ├── drak-ipt-filter
│ │ └── CMakeLists.txt
│ └── Makefile
├── version.py
└── cli
│ ├── __main__.py
│ ├── __init__.py
│ ├── banner.py
│ ├── check_root.py
│ ├── worker.py
│ ├── postprocess.py
│ ├── vm_start.py
│ ├── vm_stop.py
│ ├── mount.py
│ ├── postinstall.py
│ ├── drakvuf_cmdline.py
│ ├── main.py
│ ├── sanity_check.py
│ ├── drakshell.py
│ ├── make_profile.py
│ ├── s3_storage.py
│ ├── modify_vm0.py
│ └── install.py
├── docs
├── .gitignore
├── requirements.txt
├── _static
│ ├── custom_plugin.png
│ ├── general_logs.png
│ ├── process_hover.png
│ ├── process_info.png
│ ├── process_tree.png
│ ├── screenshots.png
│ ├── upload_menu.png
│ ├── analysis_report.png
│ ├── analysis_status.png
│ ├── recent_analyses.png
│ ├── summary_report.png
│ ├── artifact_buttons.png
│ ├── live_interaction.png
│ ├── general_logs_json_inspector.png
│ ├── process_info_method_filter.png
│ ├── process_info_method_filter_selected.png
│ └── theme-overrides.css
├── Makefile
├── drakpdb.rst
├── index.rst
├── usage
│ ├── troubleshooting.rst
│ └── managing_snapshots.rst
├── faq.rst
├── conf.py
├── whats_changed.rst
└── ipt.rst
├── dev
├── .gitignore
├── backend.Dockerfile
├── frontend.Dockerfile
├── compose.yml
└── config.example.toml
├── test
├── requirements.txt
├── vm-runner-client
│ ├── .gitignore
│ ├── vm_runner_client
│ │ ├── __init__.py
│ │ ├── socks.py
│ │ └── __main__.py
│ └── pyproject.toml
├── test.exe
├── utils.py
└── test_sanity.py
├── .github
├── screenshots
│ ├── cef.png
│ ├── cert.png
│ ├── honeynet.png
│ └── sandbox.png
├── ISSUE_TEMPLATE
│ ├── other-issue.md
│ ├── feature_request.md
│ └── bug_report.md
└── workflows
│ ├── release.yml
│ └── build.yml
├── .flake8
├── .gitignore
├── .gitmodules
├── COPYING
├── MANIFEST.in
├── pyproject.toml
├── scripts
├── bump_version.json
└── bump_version.py
├── .readthedocs.yaml
├── requirements.txt
├── setup.py
├── Makefile
├── CONTRIBUTING.md
└── README.md
/drakrun/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/drakrun/ipt/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/drakrun/lib/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/drakrun/web/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | _build/
2 |
--------------------------------------------------------------------------------
/drakrun/analyzer/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/drakrun/data/syscalls.txt:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/dev/.gitignore:
--------------------------------------------------------------------------------
1 | config.toml
2 |
--------------------------------------------------------------------------------
/drakrun/tools/drakshell/obj/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/drakrun/version.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.20.0"
2 |
--------------------------------------------------------------------------------
/drakrun/tools/drakshell/.gitignore:
--------------------------------------------------------------------------------
1 | obj/*
2 | drakshell
3 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/capa_plugin/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/requirements.txt:
--------------------------------------------------------------------------------
1 | pytest==5.4.1
2 | ./vm-runner-client
3 |
--------------------------------------------------------------------------------
/drakrun/cli/__main__.py:
--------------------------------------------------------------------------------
1 | from .main import main
2 |
3 | main()
4 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | Sphinx==8.2.3
2 | sphinx-rtd-theme==3.0.2
3 |
4 |
--------------------------------------------------------------------------------
/drakrun/cli/__init__.py:
--------------------------------------------------------------------------------
1 | from .main import main
2 |
3 | __all__ = ["main"]
4 |
--------------------------------------------------------------------------------
/test/vm-runner-client/.gitignore:
--------------------------------------------------------------------------------
1 | *.egg-info/
2 | build/
3 | __pycache__/
4 |
--------------------------------------------------------------------------------
/drakrun/tools/ipt/.clang-format:
--------------------------------------------------------------------------------
1 | ---
2 | Language: Cpp
3 | BasedOnStyle: Google
4 |
--------------------------------------------------------------------------------
/test/test.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/test/test.exe
--------------------------------------------------------------------------------
/.github/screenshots/cef.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/.github/screenshots/cef.png
--------------------------------------------------------------------------------
/test/vm-runner-client/vm_runner_client/__init__.py:
--------------------------------------------------------------------------------
1 | from .client import DrakvufVM
2 |
3 | __all__ = ["DrakvufVM"]
4 |
--------------------------------------------------------------------------------
/.github/screenshots/cert.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/.github/screenshots/cert.png
--------------------------------------------------------------------------------
/docs/_static/custom_plugin.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/custom_plugin.png
--------------------------------------------------------------------------------
/docs/_static/general_logs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/general_logs.png
--------------------------------------------------------------------------------
/docs/_static/process_hover.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/process_hover.png
--------------------------------------------------------------------------------
/docs/_static/process_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/process_info.png
--------------------------------------------------------------------------------
/docs/_static/process_tree.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/process_tree.png
--------------------------------------------------------------------------------
/docs/_static/screenshots.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/screenshots.png
--------------------------------------------------------------------------------
/docs/_static/upload_menu.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/upload_menu.png
--------------------------------------------------------------------------------
/.github/screenshots/honeynet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/.github/screenshots/honeynet.png
--------------------------------------------------------------------------------
/.github/screenshots/sandbox.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/.github/screenshots/sandbox.png
--------------------------------------------------------------------------------
/docs/_static/analysis_report.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/analysis_report.png
--------------------------------------------------------------------------------
/docs/_static/analysis_status.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/analysis_status.png
--------------------------------------------------------------------------------
/docs/_static/recent_analyses.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/recent_analyses.png
--------------------------------------------------------------------------------
/docs/_static/summary_report.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/summary_report.png
--------------------------------------------------------------------------------
/drakrun/tools/ipt/drak-ipt-filter:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | ptdump --cr3 "$2" --raw "$1" | cut -f3 '-d ' | xxd -p -r
5 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | extend-ignore=E501,E203
3 | max-line-length=88
4 | exclude=drakrun/data,drakrun/tools,drakrun/web/frontend
5 |
--------------------------------------------------------------------------------
/docs/_static/artifact_buttons.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/artifact_buttons.png
--------------------------------------------------------------------------------
/docs/_static/live_interaction.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/live_interaction.png
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/drakrun/web/frontend/src/assets/logo.png
--------------------------------------------------------------------------------
/docs/_static/general_logs_json_inspector.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/general_logs_json_inspector.png
--------------------------------------------------------------------------------
/docs/_static/process_info_method_filter.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/process_info_method_filter.png
--------------------------------------------------------------------------------
/docs/_static/process_info_method_filter_selected.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CERT-Polska/drakvuf-sandbox/HEAD/docs/_static/process_info_method_filter_selected.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /configs/
2 | /venv/
3 | /*.sav
4 | .idea/
5 | .vscode/
6 | *.egg-info/
7 | __pycache__/
8 | build/
9 | dist/
10 | drakrun/tools/get-explorer-pid
11 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/__init__.py:
--------------------------------------------------------------------------------
1 | from .postprocess import append_metadata_to_analysis, postprocess_analysis_dir
2 |
3 | __all__ = [
4 | "postprocess_analysis_dir",
5 | "append_metadata_to_analysis",
6 | ]
7 |
--------------------------------------------------------------------------------
/drakrun/tools/Makefile:
--------------------------------------------------------------------------------
1 | all: get-explorer-pid drakshell/drakshell
2 |
3 | get-explorer-pid: get-explorer-pid.c
4 | gcc $< -o $@ -lvmi `pkg-config --cflags --libs glib-2.0`
5 |
6 | drakshell/drakshell:
7 | $(MAKE) -C drakshell
8 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "drakvuf"]
2 | path = drakvuf
3 | url = https://github.com/tklengyel/drakvuf.git
4 | [submodule "drakrun/data/capa-rules"]
5 | path = drakrun/data/capa-rules
6 | url = https://github.com/mandiant/capa-rules.git
7 |
--------------------------------------------------------------------------------
/COPYING:
--------------------------------------------------------------------------------
1 | /*****************IMPORTANT DRAKVUF SANDBOX LICENSE TERMS*******************/
2 |
3 | Please read LICENSE file in order to read about this project's license.
4 |
5 | ***************************************************************************/
6 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include drakrun/test/*
2 | include drakrun/data/*
3 | include drakrun/tools/get-explorer-pid
4 | include drakrun/tools/drakshell/drakshell
5 | recursive-include drakrun/data/capa-rules *.yml
6 | recursive-include drakrun/web/frontend/dist *
7 |
--------------------------------------------------------------------------------
/drakrun/analyzer/file_metadata.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 |
3 |
4 | class FileMetadata(BaseModel):
5 | name: str
6 | type: str
7 | sha256: str
8 |
9 | def to_dict(self):
10 | return self.model_dump(mode="json")
11 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/other-issue.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Other issue
3 | about: An issue which is not a bug report and not a feature request.
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **What would you like to ask us?**
11 |
12 | ...
13 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/analysisStatus.js:
--------------------------------------------------------------------------------
1 | export function isStatusPending(status) {
2 | return status === "queued" || status === "started";
3 | }
4 |
5 | export function isStatusFinal(status) {
6 | return status === "finished" || status === "failed";
7 | }
8 |
--------------------------------------------------------------------------------
/docs/_static/theme-overrides.css:
--------------------------------------------------------------------------------
1 | /* override table width restrictions */
2 | .wy-table-responsive table td, .wy-table-responsive table th {
3 | white-space: normal;
4 | }
5 |
6 | .wy-table-responsive {
7 | margin-bottom: 24px;
8 | max-width: 100%;
9 | overflow: visible;
10 | }
11 |
--------------------------------------------------------------------------------
/drakrun/cli/banner.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import textwrap
3 |
4 | log = logging.getLogger(__name__)
5 |
6 |
7 | def banner(banner_text):
8 | log.info("-" * 80)
9 | for banner_line in textwrap.dedent(banner_text).splitlines():
10 | log.info(banner_line)
11 | log.info("-" * 80)
12 |
--------------------------------------------------------------------------------
/dev/backend.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.12
2 |
3 | COPY ./drakrun /app/drakrun
4 | COPY ./setup.py ./requirements.txt ./pyproject.toml ./MANIFEST.in /app/
5 |
6 | WORKDIR /app/
7 | RUN pip install .
8 | CMD ["flask", "--app", "drakrun.web.app:app", "run", "--with-threads", "--host", "0.0.0.0", "--port", "8080"]
9 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.isort]
2 | profile = "black"
3 | skip_glob = ["drakrun/data/**/*", "drakrun/tools/**/*", "drakrun/web/frontend/**/*"]
4 |
5 | [tool.black]
6 | exclude = "drakrun/(data|tools|web/frontend)"
7 |
8 | [tool.lint-python]
9 | lint-version = "2"
10 | source = "./drakrun"
11 | use-mypy = false
12 |
--------------------------------------------------------------------------------
/dev/frontend.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:22-alpine
2 |
3 | COPY ./drakrun/web/frontend /app
4 | RUN cd /app \
5 | && npm install --unsafe-perm . \
6 | && CI=true npm run build \
7 | && npm cache clean --force
8 |
9 | ENV PROXY_BACKEND_URL=http://backend.:8080
10 | WORKDIR /app
11 | CMD ["npm", "run", "dev"]
12 |
--------------------------------------------------------------------------------
/drakrun/tools/ipt/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.2)
2 | project(drak-ipt-tools CXX)
3 |
4 | add_executable(drak-ipt-blocks drak-ipt-blocks.cpp)
5 | target_link_libraries(drak-ipt-blocks ipt json-c spdlog fmt)
6 | set_property(TARGET drak-ipt-blocks PROPERTY CXX_STANDARD 17)
7 | install(TARGETS drak-ipt-blocks RUNTIME DESTINATION bin)
8 |
--------------------------------------------------------------------------------
/drakrun/data/config.toml:
--------------------------------------------------------------------------------
1 | [redis]
2 | host = "localhost"
3 | port = 6379
4 |
5 | [network]
6 | dns_server = "use-gateway-address"
7 | out_interface = "default"
8 | net_enable = false
9 |
10 | [drakrun]
11 | default_timeout = 300
12 | plugins = ["apimon", "clipboardmon", "exmon", "filetracer", "memdump", "procmon", "regmon", "socketmon", "tlsmon"]
13 |
--------------------------------------------------------------------------------
/drakrun/lib/libvmi/__init__.py:
--------------------------------------------------------------------------------
1 | from .dlls import get_dll_cmdline_args
2 | from .libvmi import extract_explorer_pid, extract_vmi_offsets, get_vmi_kernel_guid
3 | from .vmi_info import VmiInfo
4 |
5 | __all__ = [
6 | "get_dll_cmdline_args",
7 | "VmiInfo",
8 | "get_vmi_kernel_guid",
9 | "extract_vmi_offsets",
10 | "extract_explorer_pid",
11 | ]
12 |
--------------------------------------------------------------------------------
/drakrun/data/vm-post-restore.ps1:
--------------------------------------------------------------------------------
1 | if ( $DRAKVUF_NET_ENABLE )
2 | {
3 | ipconfig /release
4 | ipconfig /renew
5 | }
6 | $scriptBlock = {
7 | Set-Date -Date $DRAKVUF_DATE
8 | }
9 | Start-Process -Wait -Verb RunAs powershell.exe -ArgumentList (
10 | '-EncodedCommand', (
11 | [Convert]::ToBase64String([Text.Encoding]::Unicode.GetBytes($scriptBlock))
12 | )
13 | )
--------------------------------------------------------------------------------
/drakrun/web/frontend/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | DRAKVUF Sandbox
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules
11 | dist
12 | dist-ssr
13 | *.local
14 |
15 | # Editor directories and files
16 | .vscode/*
17 | !.vscode/extensions.json
18 | .idea
19 | .DS_Store
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/scripts/bump_version.json:
--------------------------------------------------------------------------------
1 | {
2 | "files": {
3 | "drakrun/web/frontend/package.json": "\"version\": \"$VERSION\"",
4 | "drakrun/web/frontend/package-lock.json": "\"version\": \"$VERSION\"",
5 | "drakrun/version.py": "__version__ = \"$VERSION\"",
6 | "docs/conf.py": "release = 'v$VERSION'"
7 | },
8 | "regex": "(\\d+\\.\\d+\\.\\d+(?:-(post|dev|alpha|beta|rc)[0-9]?)?)"
9 | }
10 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | version: 2
6 |
7 | build:
8 | os: ubuntu-22.04
9 | tools:
10 | python: "3.11"
11 |
12 | sphinx:
13 | configuration: docs/conf.py
14 |
15 | formats:
16 | - pdf
17 |
18 | python:
19 | install:
20 | - requirements: docs/requirements.txt
21 |
22 |
--------------------------------------------------------------------------------
/drakrun/cli/check_root.py:
--------------------------------------------------------------------------------
1 | import functools
2 | import logging
3 | import os
4 |
5 | import click
6 |
7 |
8 | def check_root(fn):
9 | @functools.wraps(fn)
10 | def wrapper(*args, **kwargs):
11 | if os.geteuid() != 0:
12 | logging.error("You need to have root privileges to run this command.")
13 | raise click.Abort()
14 | return fn(*args, **kwargs)
15 |
16 | return wrapper
17 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/index_logs.py:
--------------------------------------------------------------------------------
1 | from ..indexer import build_log_index
2 | from .plugin_base import PostprocessContext
3 |
4 |
5 | def index_logs(context: PostprocessContext) -> None:
6 | analysis_dir = context.analysis_dir
7 | process_tree = context.process_tree
8 | index = build_log_index(analysis_dir, process_tree)
9 | index_path = analysis_dir / "log_index"
10 | index_path.write_bytes(index)
11 |
--------------------------------------------------------------------------------
/drakrun/cli/worker.py:
--------------------------------------------------------------------------------
1 | import click
2 |
3 | from .check_root import check_root
4 |
5 |
6 | @click.command(help="Start drakrun analysis worker")
7 | @click.option(
8 | "--vm-id",
9 | "vm_id",
10 | default=1,
11 | type=int,
12 | show_default=True,
13 | help="VM id to use for running analyses",
14 | )
15 | @check_root
16 | def worker(vm_id: int):
17 | from drakrun.analyzer.worker import worker_main
18 |
19 | worker_main(vm_id)
20 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/README.md:
--------------------------------------------------------------------------------
1 | ## DRAKVUF Sandbox frontend development notes
2 |
3 | If you want to run this React app with external DRAKVUF Sandbox API server:
4 |
5 | ```shell
6 | $ VITE_API_SERVER="http://" npm run dev
7 | ```
8 |
9 | Flask app must be run with `DRAKRUN_CORS_ALL=1` environment variable to share resources with external origins
10 | (e.g. your localhost web app). This setting is recommended only for a development environment.
11 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/main.jsx:
--------------------------------------------------------------------------------
1 | import { StrictMode } from "react";
2 | import { createRoot } from "react-dom/client";
3 |
4 | import "bootstrap";
5 | import "bootstrap/dist/css/bootstrap.css";
6 |
7 | import App from "./App.jsx";
8 | import { BrowserRouter } from "react-router-dom";
9 |
10 | createRoot(document.getElementById("root")).render(
11 |
12 |
13 |
14 |
15 | ,
16 | );
17 |
--------------------------------------------------------------------------------
/test/vm-runner-client/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools >= 40.9.0",
4 | ]
5 | build-backend = "setuptools.build_meta"
6 |
7 | [project]
8 | name = "vm-runner-client"
9 | description = "Drakvuf Sandbox VM runner client for E2E testing purposes"
10 | dependencies = [
11 | "fabric==2.5.0",
12 | "python-socks==2.2.0",
13 | "requests==2.32.4",
14 | "urllib3==2.6.0"
15 | ]
16 | version = "0.1.0"
17 |
18 | [tool.setuptools]
19 | packages = ["vm_runner_client"]
20 |
--------------------------------------------------------------------------------
/dev/compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | redis:
3 | image: "redis:latest"
4 | command: '--requirepass redisPassword'
5 |
6 | backend:
7 | build:
8 | context: ..
9 | dockerfile: ./dev/backend.Dockerfile
10 | volumes:
11 | - "./config.toml:/etc/drakrun/config.toml"
12 |
13 | frontend:
14 | build:
15 | context: ..
16 | dockerfile: ./dev/frontend.Dockerfile
17 | volumes:
18 | - "../drakrun/web/frontend/src:/app/src"
19 | ports:
20 | - "3000:3000"
21 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/ProcessBadge.jsx:
--------------------------------------------------------------------------------
1 | function trimProcessName(procname) {
2 | return procname.split("\\").at(-1);
3 | }
4 |
5 | export function ProcessBadge({ process, onClick = () => {} }) {
6 | if (!process) {
7 | return [];
8 | }
9 | return (
10 |
14 | {trimProcessName(process.name)}:{process.pid}
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/AnalysisStatusBadge.jsx:
--------------------------------------------------------------------------------
1 | export function AnalysisStatusBadge({ status, substatus }) {
2 | const statusStyle =
3 | {
4 | queued: "bg-primary",
5 | started: "bg-info",
6 | finished: "bg-success",
7 | failed: "bg-danger",
8 | }[status] || "bg-secondary";
9 | return (
10 |
11 | {status}
12 | {substatus ? ` (${substatus})` : ""}
13 |
14 | );
15 | }
16 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 |
12 | A clear and concise description of what the bug is.
13 |
14 | **How to reproduce**
15 |
16 | Steps to reproduce the behavior:
17 | 1. Install drakcore and drakrun
18 | 2. Execute `draksetup ...`
19 | 3. Execute (what commands?)...
20 |
21 | **Output of the [status checking commands](https://github.com/CERT-Polska/drakvuf-sandbox#checking-service-status)**
22 |
23 | If applicable (optional).
24 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | dataclasses-json==0.5.6
2 | click==8.1.7
3 | flare-capa==7.4.0
4 | orjson==3.9.15
5 | mslex==1.1.0
6 | pydantic==2.10.6
7 | pathvalidate==3.2.0
8 | drakpdb==0.2.2
9 | tomli==2.2.1
10 | # Web analyzer dependencies
11 | rq==2.3.1
12 | Flask==3.0.3
13 | flask-openapi3[swagger]==4.1.0
14 | python-magic==0.4.27
15 | rq-dashboard==0.8.2.2
16 | # Screenshotter dependencies
17 | asyncvnc==1.3.0
18 | Pillow==10.4.0
19 | Perception==0.6.8
20 | # Perception missing peer dependency
21 | opencv-contrib-python-headless==4.11.0.86
22 | # Also peer vivisect dependency, but we use it directly
23 | msgpack==1.0.8
24 | boto3==1.38.15
25 |
--------------------------------------------------------------------------------
/drakrun/cli/postprocess.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | import click
4 |
5 | from drakrun.lib.config import load_config
6 |
7 |
8 | @click.command("postprocess")
9 | @click.argument(
10 | "output_dir",
11 | type=click.Path(exists=True),
12 | )
13 | def postprocess(output_dir):
14 | """
15 | Run postprocessing on analysis output
16 | """
17 | from drakrun.analyzer.postprocessing import (
18 | append_metadata_to_analysis,
19 | postprocess_analysis_dir,
20 | )
21 |
22 | config = load_config()
23 | output_dir = pathlib.Path(output_dir)
24 | extra_metadata = postprocess_analysis_dir(output_dir, config)
25 | append_metadata_to_analysis(output_dir, extra_metadata)
26 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/test/vm-runner-client/vm_runner_client/socks.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from python_socks.sync import Proxy
3 |
4 |
5 | def create_connection(
6 | host,
7 | port,
8 | proxy_username=None,
9 | proxy_password=None,
10 | proxy_host=None,
11 | proxy_port=None
12 | ):
13 | """
14 | Establishes TCP connection with host via SOCKS5
15 | """
16 | proxy = Proxy.from_url(f'socks5://{proxy_username}:{proxy_password}@{proxy_host}:{proxy_port}')
17 | return proxy.connect(host, port)
18 |
19 |
20 | def make_session(socks5_uri):
21 | session = requests.Session()
22 | session.proxies.update({
23 | 'http': socks5_uri,
24 | 'https': socks5_uri
25 | })
26 | return session
27 |
--------------------------------------------------------------------------------
/dev/config.example.toml:
--------------------------------------------------------------------------------
1 | # Copy this file to config.toml
2 | #
3 | # If you want to connect it to the remote worker instance:
4 | # - change [redis] so it points to the Redis host shared with worker
5 | # - uncomment and change [s3] section so it points to the S3 host shared with worker
6 |
7 | [redis]
8 | host = "redis."
9 | port = 6379
10 |
11 | # [s3]
12 | # address = "https://"
13 | # access_key = ""
14 | # secret_key = ""
15 |
16 | [network]
17 | dns_server = "use-gateway-address"
18 | out_interface = "default"
19 | net_enable = false
20 |
21 | [drakrun]
22 | default_timeout = 300
23 | plugins = ["apimon", "clipboardmon", "exmon", "filetracer", "memdump", "procmon", "regmon", "socketmon", "tlsmon"]
24 |
--------------------------------------------------------------------------------
/drakrun/cli/vm_start.py:
--------------------------------------------------------------------------------
1 | import click
2 |
3 | from drakrun.lib.config import load_config
4 | from drakrun.lib.install_info import InstallInfo
5 | from drakrun.lib.paths import INSTALL_INFO_PATH
6 | from drakrun.lib.vm import VirtualMachine
7 |
8 | from .check_root import check_root
9 |
10 |
11 | @click.command(help="Start VM from snapshot")
12 | @click.option(
13 | "--vm-id",
14 | "vm_id",
15 | default=1,
16 | type=int,
17 | show_default=True,
18 | help="VM id to use for generating profile",
19 | )
20 | @check_root
21 | def vm_start(vm_id: int):
22 | config = load_config()
23 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
24 | vm = VirtualMachine(vm_id, install_info, config.network)
25 | vm.restore()
26 |
--------------------------------------------------------------------------------
/drakrun/tools/drakshell/linker.ld:
--------------------------------------------------------------------------------
1 | ENTRY(_start)
2 |
3 | PHDRS {
4 | text PT_LOAD FLAGS(7); /* R-X */
5 | }
6 | SECTIONS
7 | {
8 | .text : SUBALIGN(16)
9 | {
10 | *(.startup)
11 | *(.text)
12 | *(.text.*)
13 | *(.data)
14 | *(.data.*)
15 | *(.rodata)
16 | *(.rodata.*)
17 | } :text
18 |
19 | .bss : SUBALIGN(16)
20 | {
21 | bss_start = .;
22 | *(.bss)
23 | *(.bss.*)
24 | bss_end = .;
25 | }
26 |
27 | /DISCARD/ : {
28 | *(.comment)
29 | *(.dynsym)
30 | *(.dynstr)
31 | *(.gnu.hash)
32 | *(.hash)
33 | *(.eh_frame_hdr)
34 | *(.eh_frame)
35 | *(.dynamic)
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/drakrun/cli/vm_stop.py:
--------------------------------------------------------------------------------
1 | import click
2 |
3 | from drakrun.lib.config import load_config
4 | from drakrun.lib.install_info import InstallInfo
5 | from drakrun.lib.paths import INSTALL_INFO_PATH
6 | from drakrun.lib.vm import VirtualMachine
7 |
8 | from .check_root import check_root
9 |
10 |
11 | @click.command(help="Stop VM and cleanup network")
12 | @click.option(
13 | "--vm-id",
14 | "vm_id",
15 | default=1,
16 | type=int,
17 | show_default=True,
18 | help="VM id to use for generating profile",
19 | )
20 | @check_root
21 | def vm_stop(vm_id: int):
22 | config = load_config()
23 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
24 | vm = VirtualMachine(vm_id, install_info, config.network)
25 | vm.destroy()
26 |
--------------------------------------------------------------------------------
/test/vm-runner-client/vm_runner_client/__main__.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import sys
3 |
4 | from .client import DrakvufVM
5 |
6 |
7 | parser = argparse.ArgumentParser("CLI client for vm-runner")
8 | parser.add_argument('command', choices=['suspend', 'destroy'])
9 | parser.add_argument('identity', default=None, nargs="?")
10 | args = parser.parse_args()
11 |
12 | identity = DrakvufVM.get_vm_identity() or args.identity
13 |
14 | if identity is None:
15 | print("[!] Identity is required outside CI/CD environment", file=sys.stderr)
16 | sys.exit(1)
17 |
18 | print(f"=> {args.command} {identity}", file=sys.stderr)
19 | vm = DrakvufVM(identity)
20 |
21 | if args.command == "suspend":
22 | vm.suspend()
23 | elif args.command == "destroy":
24 | vm.destroy()
25 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/screenshot_metadata.py:
--------------------------------------------------------------------------------
1 | import json
2 |
3 | from .plugin_base import PostprocessContext
4 |
5 |
6 | def screenshot_metadata(context: PostprocessContext) -> None:
7 | """
8 | Checks if every line is parseable and exposes screenshot amount in metadata object.
9 | We can perform some postprocessing here if needed in the future.
10 | """
11 | analysis_dir = context.analysis_dir
12 | screenshots_data = (analysis_dir / "screenshots.json").read_text().splitlines()
13 | last_index = 0
14 | for dataline in screenshots_data:
15 | if dataline:
16 | screenshot_data = json.loads(dataline)
17 | last_index = screenshot_data["index"]
18 | context.update_metadata({"screenshots": last_index})
19 |
--------------------------------------------------------------------------------
/drakrun/tools/drakshell/Makefile:
--------------------------------------------------------------------------------
1 | IDIR =./include
2 | CFLAGS=-I$(IDIR) -fPIE -nostdlib -ffreestanding -masm=intel -march=x86-64 -mtune=generic -fshort-wchar -O2
3 | LFLAGS=-T linker.ld
4 | ODIR=obj
5 |
6 | _DEPS = nt_loader.h
7 | DEPS = $(patsubst %,$(IDIR)/%,$(_DEPS))
8 |
9 | _OBJ = nt_loader.o drakshell.o thread_start.o
10 | OBJ = $(patsubst %,$(ODIR)/%,$(_OBJ))
11 |
12 | drakshell: $(ODIR)/drakshell.elf
13 | objcopy -O binary -j.text -j.bss --set-section-flags .bss=alloc,load,contents $^ drakshell
14 |
15 | $(ODIR)/%.o: %.c $(DEPS)
16 | gcc -c -o $@ $< $(CFLAGS)
17 |
18 | $(ODIR)/%.o: %.S $(DEPS)
19 | gcc -c -o $@ $<
20 |
21 | $(ODIR)/drakshell.elf: $(OBJ)
22 | gcc $(LFLAGS) -o $@ $^ $(CFLAGS)
23 |
24 | clean:
25 | rm -f drakshell $(ODIR)/*.o $(ODIR)/*.elf *~ core $(INCDIR)/*~
26 |
27 | .PHONY: clean
28 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/compress_ipt.py:
--------------------------------------------------------------------------------
1 | import zipfile
2 |
3 | from drakrun.lib.paths import IPT_DIR, IPT_ZIP
4 |
5 | from .plugin_base import PostprocessContext
6 |
7 |
8 | def compress_ipt(context: PostprocessContext) -> None:
9 | """
10 | Compress the directory specified by dirpath to target_zip file.
11 | """
12 | analysis_dir = context.analysis_dir
13 | # Compress IPT traces, they're quite large however they compress well
14 | ipt_path = analysis_dir / IPT_DIR
15 | ipt_zip_path = analysis_dir / IPT_ZIP
16 |
17 | zipf = zipfile.ZipFile(ipt_zip_path, "w", zipfile.ZIP_DEFLATED)
18 |
19 | for ipt_file in ipt_path.rglob("*"):
20 | zip_path = ipt_file.relative_to(analysis_dir).as_posix()
21 | zipf.write(ipt_file, zip_path)
22 | ipt_file.unlink()
23 |
--------------------------------------------------------------------------------
/drakrun/tools/drakshell/thread_start.S:
--------------------------------------------------------------------------------
1 | .intel_syntax noprefix
2 | .globl thread_start
3 | thread_start:
4 |
5 | #if defined(__x86_64__)
6 | push rcx
7 | #endif
8 | #if defined(__i386__)
9 | push ecx
10 | #endif
11 | call drakshell_main
12 | # This one is going to deallocate memory occupied by shellcode
13 | # and finish the thread to cover up all traces of drakshell
14 | # in explorer.exe
15 | #
16 | # We're going to jump to the VirtualFree and it is going to
17 | # return to the ExitThread for us.
18 | #if defined(__x86_64__)
19 | pop rcx
20 | and rcx, -4096
21 | xor rdx, rdx
22 | mov r8, 0x8000
23 | jmp [rip+pVirtualFree]
24 | #endif
25 | #if defined(__i386__)
26 | # TODO: Deallocation for i386 not implemented
27 | pop ecx
28 | xor eax, eax
29 | ret
30 | #endif
31 |
--------------------------------------------------------------------------------
/docs/drakpdb.rst:
--------------------------------------------------------------------------------
1 | Using drakpdb tool
2 | ##################
3 |
4 | The ``drakpdb`` tool allows you to:
5 |
6 | * determine PDB name and GUID age given an executable file (e.g. DLL)
7 | * fetch PDB with given name and GUID age
8 | * parse PDB into a profile that could be plugged into DRAKVUF
9 |
10 | Usage examples
11 | ==============
12 |
13 | .. code-block:: console
14 |
15 | root@zen2:~/drakvuf# drakpdb pe_codeview_data --file ntdll.dll
16 | {'filename': 'wntdll.pdb', 'symstore_hash': 'dccff2d483fa4dee81dc04552c73bb5e2'}
17 | root@zen2:~/drakvuf# drakpdb fetch_pdb --pdb_name wntdll.pdb --guid_age dccff2d483fa4dee81dc04552c73bb5e2
18 | 100%|██████████████████████████████████████████████████████████████| 2.12M/2.12M [00:00<00:00, 2.27MiB/s]
19 | root@zen2:~/drakvuf# drakpdb parse_pdb --pdb_name wntdll.pdb > profile.json
20 |
--------------------------------------------------------------------------------
/drakrun/cli/mount.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import click
4 |
5 | from drakrun.lib.vm import FIRST_CDROM_DRIVE
6 | from drakrun.lib.xen import xen_insert_cd
7 |
8 | from .check_root import check_root
9 |
10 |
11 | @click.command(help="Mount ISO into guest", no_args_is_help=True)
12 | @click.argument("iso_path", type=click.Path(exists=True))
13 | @click.option(
14 | "--domain",
15 | "domain_name",
16 | type=str,
17 | default="vm-0",
18 | show_default=True,
19 | help="Domain name (i.e. Virtual Machine name)",
20 | )
21 | @check_root
22 | def mount(iso_path, domain_name):
23 | """Inject ISO file into specified guest vm.
24 | Domain can be retrieved by running "xl list" command on the host.
25 | """
26 | iso_path_full = os.path.abspath(iso_path)
27 | xen_insert_cd(domain_name, FIRST_CDROM_DRIVE, iso_path_full)
28 |
--------------------------------------------------------------------------------
/drakrun/lib/fetch_pdb.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import pathlib
3 | import shutil
4 |
5 | from drakpdb import fetch_pdb
6 |
7 | from drakrun.lib.paths import PDB_CACHE_DIR
8 |
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | def get_cache_pdb_name(pdbname: str, guidage: str) -> str:
13 | pdb_basename, pdb_ext = pdbname.rsplit(".", 1)
14 | return f"{pdb_basename}-{guidage}.{pdb_ext}"
15 |
16 |
17 | def vmi_fetch_pdb(pdbname: str, guidage: str) -> pathlib.Path:
18 | cache_pdbname = get_cache_pdb_name(pdbname, guidage)
19 | destpath = (PDB_CACHE_DIR / cache_pdbname).resolve()
20 | if destpath.exists():
21 | log.info("PDB %s already fetched", cache_pdbname)
22 | return destpath
23 | pdb_filepath = fetch_pdb(pdbname, guidage, PDB_CACHE_DIR.as_posix())
24 | shutil.move(pdb_filepath, destpath)
25 | return destpath
26 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/vite.config.js:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 | import react from '@vitejs/plugin-react'
3 |
4 | // https://vite.dev/config/
5 | export default defineConfig({
6 | plugins: [react()],
7 | define: {
8 | '__APP_VERSION__': JSON.stringify(process.env.npm_package_version),
9 | },
10 | // dev-server
11 | server: {
12 | host: "0.0.0.0",
13 | port: 3000,
14 | strictPort: true,
15 | proxy: (
16 | process.env.PROXY_BACKEND_URL ? {
17 | "/api": {
18 | target: process.env.PROXY_BACKEND_URL,
19 | changeOrigin: true,
20 | },
21 | "/openapi": {
22 | target: process.env.PROXY_BACKEND_URL,
23 | changeOrigin: true,
24 | }
25 | } : {}
26 | ),
27 | }
28 | })
29 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import os
4 | from setuptools import find_packages, setup
5 |
6 | version_globals = {}
7 | with open("drakrun/version.py") as f:
8 | exec(f.read(), version_globals)
9 |
10 | version = version_globals["__version__"]
11 | if os.getenv("DRAKRUN_VERSION_TAG"):
12 | version = version + "+" + os.getenv("DRAKRUN_VERSION_TAG")
13 |
14 | setup(
15 | name="drakvuf-sandbox",
16 | version=version,
17 | description="DRAKRUN",
18 | package_dir={"drakrun": "drakrun"},
19 | packages=find_packages(),
20 | include_package_data=True,
21 | python_requires=">=3.9",
22 | install_requires=open("requirements.txt").read().splitlines(),
23 | entry_points={
24 | "console_scripts": [
25 | "drakrun = drakrun.cli:main",
26 | ]
27 | },
28 | classifiers=[
29 | "Programming Language :: Python",
30 | "Operating System :: OS Independent",
31 | ],
32 | )
33 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/gzip_syscalls.py:
--------------------------------------------------------------------------------
1 | import gzip
2 | import shutil
3 |
4 | from drakrun.lib.config import load_config
5 |
6 | from .plugin_base import PostprocessContext
7 |
8 |
9 | def gzip_syscalls(context: PostprocessContext) -> None:
10 | """
11 | Compress syscall.log using gzip if gzip_syscalls configuration option is True.
12 | This will disable preview of these logs in web.
13 | """
14 | analysis_dir = context.analysis_dir
15 | config = load_config()
16 | if not config.drakrun.gzip_syscalls:
17 | return
18 |
19 | for name in ["syscall", "sysret"]:
20 | log_path = analysis_dir / f"{name}.log"
21 | log_path_gz = analysis_dir / f"{name}.log.gz"
22 | if log_path.exists():
23 | with log_path.open("rb") as f_in:
24 | with gzip.open(log_path_gz, "wb") as f_out:
25 | shutil.copyfileobj(f_in, f_out)
26 | log_path.unlink()
27 |
--------------------------------------------------------------------------------
/drakrun/data/cfg.template:
--------------------------------------------------------------------------------
1 | # DomU config template for DRAKVUF Sandbox
2 |
3 | # This is the template used to generate DomU configuration instances.
4 | # Please don't edit this file unless you know what you are doing.
5 |
6 | arch = 'x86_64'
7 | name = "{{ VM_NAME }}"
8 | maxmem = {{ MEMORY }}
9 | memory = {{ MEMORY }}
10 | vcpus = {{ VCPUS }}
11 | maxvcpus = {{ VCPUS }}
12 | type = "hvm"
13 | boot = "cd"
14 | hap = 1
15 | acpi = 1
16 | on_poweroff = "destroy"
17 | on_reboot = "{{ ON_REBOOT }}"
18 | on_crash = "destroy"
19 | vnc=1
20 | vnclisten="0.0.0.0:{{ VM_ID }},websocket={{ VNC_PORT }}"
21 | vncpasswd="{{ VNC_PASS }}"
22 | usb = 1
23 | usbdevice = "tablet"
24 | altp2m = 2
25 | shadow_memory = 32
26 | audio = 1
27 | soundhw='hda'
28 | cpuid="host,htt=0"
29 | vga="stdvga"
30 | vif = [ 'type=ioemu,model=e1000,bridge={{ BRIDGE_NAME }}' ]
31 | disk = [ {{ DISKS }} ]
32 | serial = [ "unix:/var/run/drakrun/{{ VM_NAME }}.sock,server,nowait" ]
33 | #uncomment when using IPT
34 | #vmtrace_buf_kb = 8192
35 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/eslint.config.js:
--------------------------------------------------------------------------------
1 | import js from '@eslint/js'
2 | import globals from 'globals'
3 | import reactHooks from 'eslint-plugin-react-hooks'
4 | import reactRefresh from 'eslint-plugin-react-refresh'
5 |
6 | export default [
7 | { ignores: ['dist'] },
8 | {
9 | files: ['**/*.{js,jsx}'],
10 | languageOptions: {
11 | ecmaVersion: 2020,
12 | globals: globals.browser,
13 | parserOptions: {
14 | ecmaVersion: 'latest',
15 | ecmaFeatures: { jsx: true },
16 | sourceType: 'module',
17 | },
18 | },
19 | plugins: {
20 | 'react-hooks': reactHooks,
21 | 'react-refresh': reactRefresh,
22 | },
23 | rules: {
24 | ...js.configs.recommended.rules,
25 | ...reactHooks.configs.recommended.rules,
26 | 'no-unused-vars': ['error', { varsIgnorePattern: '^[A-Z_]' }],
27 | 'react-refresh/only-export-components': [
28 | 'warn',
29 | { allowConstantExport: true },
30 | ],
31 | },
32 | },
33 | ]
34 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/get_ttps_info.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from .parse_utils import parse_log
4 | from .plugin_base import PostprocessContext
5 |
6 |
7 | def get_ttps_info(context: PostprocessContext) -> None:
8 | analysis_dir = context.analysis_dir
9 | process_tree = context.process_tree
10 |
11 | def filter_ttps(data: dict) -> Optional[dict]:
12 | pid_occurrences = data["occurrences"]
13 | processes = []
14 | for process in pid_occurrences:
15 | process = process_tree.get_process_by_pid_ppid(
16 | process["pid"], process["ppid"]
17 | )
18 | if process:
19 | processes.append(process.seqid)
20 | return {
21 | "name": data["name"],
22 | "att&ck": data["att&ck"],
23 | "process_seqids": processes,
24 | }
25 |
26 | ttps_log = parse_log(analysis_dir / "ttps.json", filter_ttps)
27 | context.update_report({"ttps": [data for data in ttps_log]})
28 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release new Drakvuf Sandbox version
2 |
3 | on:
4 | release:
5 | types: [published]
6 |
7 | jobs:
8 | release_artifacts:
9 | runs-on: ubuntu-latest
10 | permissions:
11 | contents: write
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: Download artifact
15 | id: download-artifact
16 | uses: dawidd6/action-download-artifact@v6
17 | with:
18 | workflow: build.yml
19 | workflow_conclusion: success
20 | - uses: nanoufo/action-upload-artifacts-and-release-assets@v2
21 | with:
22 | path: |
23 | drakvuf-sandbox-whl/drakvuf_sandbox-*.whl
24 | upload-release-files: true
25 | release-upload-url: ${{ github.event.release.upload_url }}
26 | - name: Publish to PyPi
27 | uses: pypa/gh-action-pypi-publish@v1.13.0
28 | with:
29 | user: __token__
30 | password: ${{ secrets.pypi_password }}
31 | packages-dir: drakvuf-sandbox-whl/
32 |
--------------------------------------------------------------------------------
/drakrun/analyzer/post_restore.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import datetime
3 |
4 | from drakrun.lib.paths import ETC_DIR, PACKAGE_DATA_PATH
5 |
6 |
7 | def prepare_ps_command(script: str):
8 | encoded_cmd = base64.b64encode(script.encode("utf-16le")).decode()
9 | return ["powershell.exe", "-EncodedCommand", encoded_cmd]
10 |
11 |
12 | def get_post_restore_command(net_enable: bool):
13 | post_restore_script_path = ETC_DIR / "vm-post-restore.ps1"
14 | if not post_restore_script_path.exists():
15 | post_restore_script_path = PACKAGE_DATA_PATH / "vm-post-restore.ps1"
16 | post_restore_script = post_restore_script_path.read_text()
17 | current_date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
18 | script_substs = {
19 | "$DRAKVUF_NET_ENABLE": "$true" if net_enable else "$false",
20 | "$DRAKVUF_DATE": f'"{current_date}"',
21 | }
22 | for subst_key, subst_value in script_substs.items():
23 | post_restore_script = post_restore_script.replace(subst_key, subst_value)
24 | return prepare_ps_command(post_restore_script)
25 |
--------------------------------------------------------------------------------
/drakrun/lib/network_info.py:
--------------------------------------------------------------------------------
1 | import ipaddress
2 | import json
3 | import pathlib
4 |
5 | from pydantic import BaseModel
6 |
7 |
8 | class NetworkInfo(BaseModel):
9 | out_interface: str
10 | dns_server: str
11 | net_enable: bool
12 |
13 | bridge_name: str
14 | network_address: str
15 | gateway_address: str
16 | vm_address: str
17 | dnsmasq_pidfile: str
18 |
19 | @property
20 | def network_prefix(self):
21 | return ipaddress.IPv4Network(self.network_address).prefixlen
22 |
23 | @staticmethod
24 | def load(path: pathlib.Path) -> "NetworkInfo":
25 | """Parses InstallInfo file at the provided path"""
26 | with path.open("r") as f:
27 | return NetworkInfo.model_validate_json(f.read())
28 |
29 | def save(self, path: pathlib.Path) -> None:
30 | """Serializes self and writes to the provided path"""
31 | with path.open("w") as f:
32 | f.write(json.dumps(self.model_dump(mode="json"), indent=4))
33 |
34 | def dump_for_env(self):
35 | return {k.upper(): str(v) for k, v in self.model_dump(mode="json").items()}
36 |
--------------------------------------------------------------------------------
/drakrun/cli/postinstall.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import click
4 |
5 | from drakrun.lib.config import load_config
6 | from drakrun.lib.install_info import InstallInfo
7 | from drakrun.lib.paths import INSTALL_INFO_PATH
8 | from drakrun.lib.vm import VirtualMachine
9 | from drakrun.lib.vmi_profile import create_vmi_info, create_vmi_json_profile
10 |
11 | from .check_root import check_root
12 |
13 | log = logging.getLogger(__name__)
14 |
15 |
16 | @click.command(help="Finalize VM installation")
17 | @check_root
18 | def postinstall():
19 | config = load_config()
20 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
21 | vm0 = VirtualMachine(0, install_info, config.network)
22 |
23 | if vm0.is_running is False:
24 | log.error("vm-0 is not running")
25 | raise click.Abort()
26 |
27 | vm0.eject_cd()
28 | vmi_info = create_vmi_info(vm0)
29 | vm0.save()
30 | vm0.storage.snapshot_vm0_volume()
31 |
32 | vm1 = VirtualMachine(1, install_info, config.network)
33 | vm1.restore()
34 | try:
35 | create_vmi_json_profile(vm1, vmi_info)
36 | finally:
37 | vm1.destroy()
38 | log.info("All right, VM setup is done.")
39 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | =============================
2 | DRAKVUF Sandbox documentation
3 | =============================
4 |
5 | DRAKVUF Sandbox is an automated black-box malware analysis system with DRAKVUF engine under the hood, which does not require an agent on guest OS.
6 |
7 | This project provides you with a friendly web interface that allows you to upload suspicious files to be analyzed. Once the sandboxing job is finished, you can explore the analysis result through the mentioned interface and get insight whether the file is truly malicious or not.
8 |
9 | Because it is usually pretty hard to set up a malware sandbox, this project also comes with installation toolkit that would help you get through the necessary steps.
10 |
11 | It's highly recommended to have a basic knowledge about Xen hypervisor that would help you debug issues that may depend on your hardware.
12 |
13 | .. toctree::
14 | :maxdepth: 2
15 | :caption: User guide
16 |
17 | whats_changed
18 | usage/getting_started
19 | usage/basic_usage
20 | usage/advanced_configuration
21 | usage/optional_features
22 | usage/managing_snapshots
23 | usage/troubleshooting
24 |
25 | .. toctree::
26 | :maxdepth: 1
27 | :caption: Misc
28 |
29 | faq
30 | drakpdb
31 | ipt
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/generate_wireshark_key_file.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | from .plugin_base import PostprocessContext
5 |
6 | logger = logging.getLogger(__name__)
7 |
8 |
9 | def gen_key_file_from_log(tlsmon_log):
10 | key_file_content = ""
11 | for line in tlsmon_log:
12 | try:
13 | entry = json.loads(line)
14 | client_random = entry["client_random"]
15 | master_key = entry["master_key"]
16 | key_file_content += f"CLIENT_RANDOM {client_random} {master_key}\n"
17 | except KeyError:
18 | logger.exception(f"JSON is missing a required field\n{line}")
19 | continue
20 | except json.JSONDecodeError as e:
21 | logger.warning(f"line cannot be parsed as JSON\n{e}")
22 | continue
23 | return key_file_content
24 |
25 |
26 | def generate_wireshark_key_file(context: PostprocessContext) -> None:
27 | analysis_dir = context.analysis_dir
28 | tlsmon_log_path = analysis_dir / "tlsmon.log"
29 | target_path = analysis_dir / "wireshark_key_file.txt"
30 | with open(tlsmon_log_path, "r") as tlsmon_log:
31 | key_file_content = gen_key_file_from_log(tlsmon_log)
32 | target_path.write_text(key_file_content)
33 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/AnalysisScreenshotViewer.jsx:
--------------------------------------------------------------------------------
1 | import Zoom from "react-medium-image-zoom";
2 | import "react-medium-image-zoom/dist/styles.css";
3 |
4 | let BASE_URL = "";
5 | if (import.meta.env.VITE_API_SERVER) {
6 | BASE_URL = import.meta.env.VITE_API_SERVER;
7 | } else {
8 | BASE_URL = "/api";
9 | }
10 |
11 | export function AnalysisScreenshotViewer({ analysis }) {
12 | return (
13 |
14 |
15 | {Array.from(Array(analysis.screenshots).keys()).map(
16 | (_, idx) => (
17 |
18 |
19 |
25 |
26 |
27 | ),
28 | )}
29 |
30 |
31 | );
32 | }
33 |
--------------------------------------------------------------------------------
/drakrun/web/app.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import rq_dashboard
4 | from flask import jsonify, send_file
5 | from flask_openapi3 import Info, OpenAPI
6 |
7 | from drakrun.lib.config import load_config
8 | from drakrun.version import __version__
9 |
10 | from .api import api
11 |
12 | info = Info(title="Drakvuf Sandbox", version=__version__)
13 | app = OpenAPI(__name__, info=info, static_folder="frontend/dist/assets")
14 | config = load_config()
15 | app.config.update(
16 | {
17 | "RQ_DASHBOARD_REDIS_URL": config.redis.make_url(),
18 | }
19 | )
20 | rq_dashboard.web.setup_rq_connection(app)
21 | app.register_blueprint(rq_dashboard.blueprint, url_prefix="/rq")
22 | app.register_api(api)
23 |
24 |
25 | @app.errorhandler(404)
26 | def resource_not_found(e):
27 | return jsonify(error="Object not found"), 404
28 |
29 |
30 | if os.environ.get("DRAKRUN_CORS_ALL"):
31 |
32 | @app.after_request
33 | def add_header(response):
34 | response.headers["Access-Control-Allow-Origin"] = "*"
35 | response.headers["Access-Control-Allow-Headers"] = "Range"
36 | return response
37 |
38 |
39 | @app.route("/")
40 | def index():
41 | return send_file("frontend/dist/index.html")
42 |
43 |
44 | @app.route("/")
45 | def catchall(path):
46 | return send_file("frontend/dist/index.html")
47 |
--------------------------------------------------------------------------------
/drakrun/lib/install_info.py:
--------------------------------------------------------------------------------
1 | import json
2 | import pathlib
3 | from typing import Optional
4 |
5 | from pydantic import BaseModel
6 |
7 | from .paths import SNAPSHOT_DIR, XL_CFG_TEMPLATE_PATH
8 |
9 |
10 | class InstallInfo(BaseModel):
11 | """
12 | This object is main configuration of the VM, initialized during installation process.
13 |
14 | Values are mapped to the xl domain configuration file template.
15 | """
16 |
17 | storage_backend: str
18 | disk_size: str
19 | vnc_passwd: str
20 | vcpus: int
21 | memory: int
22 | reboot_vm0_action: str = "restart"
23 | reboot_vmn_action: str = "destroy"
24 | xl_cfg_template: pathlib.Path = XL_CFG_TEMPLATE_PATH
25 | snapshot_dir: pathlib.Path = SNAPSHOT_DIR
26 |
27 | lvm_snapshot_size: str = "1G"
28 | zfs_tank_name: Optional[str] = None
29 | lvm_volume_group: Optional[str] = None
30 |
31 | @staticmethod
32 | def load(path: pathlib.Path) -> "InstallInfo":
33 | """Parses InstallInfo file at the provided path"""
34 | with path.open("r") as f:
35 | return InstallInfo.model_validate_json(f.read())
36 |
37 | def save(self, path: pathlib.Path) -> None:
38 | """Serializes self and writes to the provided path"""
39 | with path.open("w") as f:
40 | f.write(json.dumps(self.model_dump(mode="json"), indent=4))
41 |
--------------------------------------------------------------------------------
/drakrun/cli/drakvuf_cmdline.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import shlex
3 |
4 | import click
5 |
6 | from drakrun.lib.config import load_config
7 | from drakrun.lib.drakvuf_cmdline import get_base_drakvuf_cmdline
8 | from drakrun.lib.install_info import InstallInfo
9 | from drakrun.lib.libvmi import VmiInfo
10 | from drakrun.lib.paths import INSTALL_INFO_PATH, VMI_INFO_PATH, VMI_KERNEL_PROFILE_PATH
11 | from drakrun.lib.vm import VirtualMachine
12 |
13 | log = logging.getLogger(__name__)
14 |
15 |
16 | @click.command(help="Get base Drakvuf cmdline")
17 | @click.option(
18 | "--vm-id",
19 | "vm_id",
20 | default=1,
21 | type=int,
22 | show_default=True,
23 | help="VM id to use for generating profile",
24 | )
25 | @click.option(
26 | "--cmd",
27 | default=None,
28 | help="Command line to inject for execution",
29 | )
30 | def drakvuf_cmdline(vm_id, cmd):
31 | config = load_config()
32 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
33 |
34 | vm = VirtualMachine(vm_id, install_info, config.network)
35 | vmi_info = VmiInfo.load(VMI_INFO_PATH)
36 | print(
37 | shlex.join(
38 | get_base_drakvuf_cmdline(
39 | vm.vm_name,
40 | VMI_KERNEL_PROFILE_PATH.as_posix(),
41 | vmi_info=vmi_info,
42 | exec_cmd=cmd,
43 | )
44 | )
45 | )
46 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/MethodFilterPicker.jsx:
--------------------------------------------------------------------------------
1 | import { useCallback, useMemo } from "react";
2 | import CreatableSelect from "react-select/creatable";
3 |
4 | const methodPickerStyles = {
5 | multiValue: (styles, { data }) => {
6 | return { ...styles, backgroundColor: "rgba(255, 86, 48, 0.1)" };
7 | },
8 | multiValueLabel: (styles, { data }) => {
9 | return { ...styles, color: "rgb(255, 86, 48)" };
10 | },
11 | };
12 |
13 | export function MethodFilterPicker({ onFilterChange, methods, currentFilter }) {
14 | const onSelectChange = useCallback(
15 | (value) => {
16 | return onFilterChange(value.map((v) => v.value));
17 | },
18 | [onFilterChange],
19 | );
20 | const methodOptions = useMemo(
21 | () => methods.map((option) => ({ label: option, value: option })),
22 | [methods],
23 | );
24 | const currentFilterOptions = useMemo(
25 | () => currentFilter.map((option) => ({ label: option, value: option })),
26 | [currentFilter],
27 | );
28 | return (
29 |
30 |
37 |
38 | );
39 | }
40 |
--------------------------------------------------------------------------------
/drakrun/cli/main.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import click
4 |
5 | from .analyze import analyze
6 | from .drakshell import drakshell
7 | from .drakvuf_cmdline import drakvuf_cmdline
8 | from .injector import injector
9 | from .install import install
10 | from .make_profile import make_profile
11 | from .modify_vm0 import modify_vm0
12 | from .mount import mount
13 | from .postinstall import postinstall
14 | from .postprocess import postprocess
15 | from .s3_storage import s3_storage
16 | from .snapshot import snapshot
17 | from .vm_start import vm_start
18 | from .vm_stop import vm_stop
19 | from .worker import worker
20 |
21 |
22 | @click.group()
23 | def main():
24 | logging.basicConfig(
25 | level=logging.INFO,
26 | format="[%(asctime)s][%(levelname)s] %(message)s",
27 | handlers=[logging.StreamHandler()],
28 | )
29 | logging.getLogger("drakrun").setLevel(logging.DEBUG)
30 |
31 |
32 | main.add_command(analyze)
33 | main.add_command(postprocess)
34 | main.add_command(install)
35 | main.add_command(postinstall)
36 | main.add_command(vm_start)
37 | main.add_command(vm_stop)
38 | main.add_command(worker)
39 | main.add_command(modify_vm0)
40 | main.add_command(injector)
41 | main.add_command(drakshell)
42 | main.add_command(drakvuf_cmdline)
43 | main.add_command(mount)
44 | main.add_command(make_profile)
45 | main.add_command(s3_storage)
46 | main.add_command(snapshot)
47 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@drakvuf-sandbox/web",
3 | "private": true,
4 | "version": "0.20.0",
5 | "type": "module",
6 | "scripts": {
7 | "dev": "vite",
8 | "build": "vite build",
9 | "lint": "eslint .",
10 | "preview": "vite preview"
11 | },
12 | "dependencies": {
13 | "@fortawesome/fontawesome-svg-core": "^6.7.2",
14 | "@fortawesome/free-regular-svg-icons": "^6.7.2",
15 | "@fortawesome/free-solid-svg-icons": "^6.7.2",
16 | "@fortawesome/react-fontawesome": "^0.2.2",
17 | "@melloware/react-logviewer": "^6.2.0",
18 | "@novnc/novnc": "^1.5.0",
19 | "axios": "^1.12.0",
20 | "bootstrap": "^5.3.5",
21 | "jszip": "^3.10.1",
22 | "jszip-utils": "^0.1.0",
23 | "prettier": "^3.5.3",
24 | "react": "^19.0.0",
25 | "react-dom": "^19.0.0",
26 | "react-medium-image-zoom": "^5.2.14",
27 | "react-modal": "^3.16.3",
28 | "react-router-dom": "^6.30.0",
29 | "react-select": "^5.10.1",
30 | "startbootstrap-sb-admin": "^7.0.7"
31 | },
32 | "devDependencies": {
33 | "@eslint/js": "^9.22.0",
34 | "@types/react": "^19.0.10",
35 | "@types/react-dom": "^19.0.4",
36 | "@vitejs/plugin-react": "^4.3.4",
37 | "eslint": "^9.22.0",
38 | "eslint-plugin-react-hooks": "^5.2.0",
39 | "eslint-plugin-react-refresh": "^0.4.19",
40 | "globals": "^16.0.0",
41 | "vite": "^6.4.1"
42 | },
43 | "prettier": {
44 | "tabWidth": 4
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/build_process_tree.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 | from ..process_tree import tree_from_log
5 | from .plugin_base import PostprocessContext
6 |
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 | def build_process_tree(context: PostprocessContext) -> None:
11 | analysis_dir = context.analysis_dir
12 | procmon_log_path = analysis_dir / "procmon.log"
13 | process_tree_path = analysis_dir / "process_tree.json"
14 |
15 | with procmon_log_path.open("r") as procmon_log:
16 | process_tree = tree_from_log(procmon_log)
17 |
18 | data = json.dumps(process_tree.as_dict())
19 | process_tree_path.write_text(data)
20 |
21 | context.process_tree = process_tree
22 | context.update_report(
23 | {
24 | "processes": [
25 | {
26 | "seqid": process.seqid,
27 | "pid": process.pid,
28 | "parent_seqid": process.parent.seqid if process.parent else None,
29 | "name": process.procname,
30 | "args": process.args,
31 | "started_at": process.ts_from,
32 | "exited_at": process.ts_to,
33 | "exit_code": process.exit_code,
34 | "exit_code_str": process.exit_code_str,
35 | "killed_by": process.killed_by,
36 | }
37 | for process in process_tree.processes
38 | ]
39 | }
40 | )
41 |
--------------------------------------------------------------------------------
/drakrun/cli/sanity_check.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import subprocess
3 |
4 | log = logging.getLogger(__name__)
5 |
6 |
7 | def sanity_check():
8 | log.info("Checking xen-detect...")
9 | proc = subprocess.run("xen-detect -N", shell=True)
10 |
11 | if proc.returncode != 1:
12 | log.error(
13 | "It looks like the system is not running on Xen. Please reboot your machine into Xen hypervisor."
14 | )
15 | return False
16 |
17 | log.info("Testing if xl tool is sane...")
18 |
19 | try:
20 | subprocess.run(
21 | "xl info",
22 | shell=True,
23 | stdout=subprocess.DEVNULL,
24 | stderr=subprocess.DEVNULL,
25 | check=True,
26 | )
27 | except subprocess.CalledProcessError:
28 | log.exception(
29 | "Failed to test xl info command. There might be some dependency problem (please execute 'xl info' manually to find out)."
30 | )
31 | return False
32 |
33 | try:
34 | subprocess.run(
35 | "xl list",
36 | shell=True,
37 | stdout=subprocess.DEVNULL,
38 | stderr=subprocess.DEVNULL,
39 | check=True,
40 | timeout=10,
41 | )
42 | except subprocess.SubprocessError:
43 | log.exception(
44 | "Failed to test xl list command. There might be a problem with xen services (check 'systemctl status xenstored', 'systemctl status xenconsoled')."
45 | )
46 | return False
47 |
48 | return True
49 |
--------------------------------------------------------------------------------
/docs/usage/troubleshooting.rst:
--------------------------------------------------------------------------------
1 | Troubleshooting
2 | ===============
3 |
4 | Debug ``device model did not start``
5 | ------------------------------------
6 |
7 | You may encounter the following error with ``draksetup`` command or ``drakrun-worker@*`` service, which will prevent the VM from starting properly.
8 |
9 | ::
10 |
11 | libxl: error: libxl_create.c:1676:domcreate_devmodel_started: Domain 4:device model did not start: -3
12 | ...
13 | subprocess.CalledProcessError: Command 'xl create /etc/drakrun/configs/vm-0.cfg' returned non-zero exit status 3.
14 |
15 | In such a case, you should inspect ``/var/log/xen/qemu*.log`` in order to determine the actual reason why the VM is refusing to start.
16 |
17 | Debug ``can't allocate low memory for domain``
18 | ----------------------------------------------
19 |
20 | The following error with ``drakrun`` command or ``drakrun-worker@*`` service means that your machine is missing memory resources:
21 |
22 | ::
23 |
24 | xc: error: panic: xc_dom_boot.c:122: xc_dom_boot_mem_init: can't allocate low memory for domain: Out of memory
25 | ...
26 | subprocess.CalledProcessError: Command 'xl create /var/lib/drakrun/configs/vm-0.cfg' returned non-zero exit status 3.
27 |
28 | Resolutions:
29 |
30 | * adjust the amount of memory dedicated to the Dom0 (host system) in ``/etc/default/grub.d/xen.cfg`` (look for ``dom0_mem=2048M,max:2048M``) and run ``update-grub && reboot``
31 | * adjust the amount of memory dedicated to the DomU (guest systems) in ``/etc/drakrun/install.json`` or ``/etc/drakrun/cfg.template`` ( ``memory`` and ``maxmem`` keys)
32 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/App.css:
--------------------------------------------------------------------------------
1 | .App {
2 | text-align: left;
3 | }
4 |
5 | .App-logo {
6 | height: 40vmin;
7 | }
8 |
9 | .App-header {
10 | background-color: #282c34;
11 | min-height: 100vh;
12 | display: flex;
13 | flex-direction: column;
14 | align-items: center;
15 | justify-content: center;
16 | font-size: calc(10px + 2vmin);
17 | color: white;
18 | }
19 |
20 | .App-link {
21 | color: #09d3ac;
22 | }
23 |
24 | .text-hash {
25 | font-family:
26 | SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono",
27 | "Courier New", monospace;
28 | word-break: break-all;
29 | }
30 |
31 | .apicallTable td {
32 | padding: 0.5rem;
33 | }
34 |
35 | .plugin-badge {
36 | border-radius: 2px;
37 | margin: 2px;
38 | font-size: 85%;
39 | padding: 3px 8px;
40 | text-overflow: ellipsis;
41 | overflow: hidden;
42 | white-space: nowrap;
43 | }
44 |
45 | .tooltip-inner {
46 | max-width: none !important;
47 | white-space: nowrap;
48 | }
49 |
50 | .btn.btn-inline-link {
51 | padding: 0;
52 | margin: 0;
53 | }
54 |
55 | .btn.btn-inline-link:hover {
56 | color: rgb(0, 82, 204);
57 | }
58 |
59 | li.selected {
60 | background-color: rgba(0, 82, 204, 0.1);
61 | border: 1px rgb(0, 82, 204) solid;
62 | padding: 2px 4px;
63 | }
64 |
65 | /* Overriding element hover */
66 | .optionListContainer > .optionContainer li:hover,
67 | .optionContainer .highlight {
68 | background: #e9ecef;
69 | color: #6c757d;
70 | }
71 | /* Ends overriding */
72 |
73 | .ReactModalPortal {
74 | z-index: 10000;
75 | position: absolute;
76 | }
77 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/plugin_base.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any, Dict, List, NamedTuple, Optional, Protocol
3 |
4 | from drakrun.lib.config import DrakrunConfig
5 |
6 | from ..process_tree import ProcessTree
7 |
8 |
9 | class PostprocessContext:
10 | def __init__(self, analysis_dir: pathlib.Path, config: DrakrunConfig) -> None:
11 | self.analysis_dir = analysis_dir
12 | self.config = config
13 | self._process_tree: Optional[ProcessTree] = None
14 | # Quick metadata fetched along with analysis status
15 | self.metadata = {}
16 | # More verbose data to be placed in report.json
17 | self.report = {}
18 |
19 | @property
20 | def process_tree(self) -> ProcessTree:
21 | if self._process_tree is None:
22 | raise RuntimeError("Process tree not initialized")
23 | return self._process_tree
24 |
25 | @process_tree.setter
26 | def process_tree(self, value: ProcessTree) -> None:
27 | self._process_tree = value
28 |
29 | def update_metadata(self, metadata: Dict[str, Any]) -> None:
30 | self.metadata.update(metadata)
31 |
32 | def update_report(self, report: Dict[str, Any]) -> None:
33 | self.report.update(report)
34 |
35 |
36 | class PostprocessFunction(Protocol):
37 | def __call__(self, context: PostprocessContext) -> None: ...
38 |
39 |
40 | class PostprocessPlugin(NamedTuple):
41 | function: PostprocessFunction
42 | # Paths that are required by plugin to run
43 | requires: List[str]
44 | # Paths that are products of processing and plugin is not run when they exist
45 | generates: List[str]
46 |
--------------------------------------------------------------------------------
/drakrun/lib/paths.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | import shutil
3 |
4 | ETC_DIR = pathlib.Path("/etc/drakrun")
5 | INSTALL_INFO_PATH = ETC_DIR / "install.json"
6 | XL_CFG_TEMPLATE_PATH = ETC_DIR / "cfg.template"
7 | CONFIG_PATH = ETC_DIR / "config.toml"
8 |
9 | LIB_DIR = pathlib.Path("/var/lib/drakrun")
10 | SNAPSHOT_DIR = LIB_DIR / "volumes"
11 | CONFIGS_DIR = LIB_DIR / "configs"
12 | VMI_PROFILES_DIR = LIB_DIR / "profiles"
13 | PDB_CACHE_DIR = LIB_DIR / "pdb_cache"
14 | ANALYSES_DIR = LIB_DIR / "analyses"
15 | UPLOADS_DIR = LIB_DIR / "uploads"
16 |
17 | VMI_INFO_PATH = VMI_PROFILES_DIR / "runtime.json"
18 | VMI_KERNEL_PROFILE_PATH = VMI_PROFILES_DIR / "kernel.json"
19 |
20 | PACKAGE_DIR = pathlib.Path(__file__).parent.parent.absolute()
21 | PACKAGE_DATA_PATH = PACKAGE_DIR / "data"
22 | PACKAGE_TOOLS_PATH = PACKAGE_DIR / "tools"
23 |
24 | RUN_DIR = pathlib.Path("/var/run/drakrun")
25 |
26 | DUMPS_DIR = "dumps"
27 | DUMPS_ZIP = "dumps.zip"
28 | IPT_DIR = "ipt"
29 | IPT_ZIP = "ipt.zip"
30 |
31 |
32 | def make_dirs():
33 | pathlib.Path(ETC_DIR).mkdir(exist_ok=True)
34 | pathlib.Path(LIB_DIR).mkdir(exist_ok=True)
35 | SNAPSHOT_DIR.mkdir(exist_ok=True)
36 | CONFIGS_DIR.mkdir(exist_ok=True)
37 | ANALYSES_DIR.mkdir(exist_ok=True)
38 | VMI_PROFILES_DIR.mkdir(exist_ok=True)
39 | PDB_CACHE_DIR.mkdir(exist_ok=True)
40 |
41 |
42 | def initialize_config_files():
43 | if not XL_CFG_TEMPLATE_PATH.exists():
44 | source_file = PACKAGE_DATA_PATH / "cfg.template"
45 | shutil.copy(source_file, XL_CFG_TEMPLATE_PATH)
46 | if not CONFIG_PATH.exists():
47 | source_file = PACKAGE_DATA_PATH / "config.toml"
48 | shutil.copy(source_file, CONFIG_PATH)
49 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | rwildcard = $(foreach d,$(wildcard $1*),\
2 | $(call rwildcard,$d/,$2) \
3 | $(filter $(subst *,%,$2),$d))
4 | WEB_SOURCE_FILES := $(call rwildcard,drakrun/web/frontend/src/,*.js) $(call rwildcard,drakrun/web/frontend/src/,*.jsx) $(call rwildcard,drakrun/web/frontend/src/,*.css)
5 | PYTHON_SOURCE_FILES := $(call rwildcard,drakrun/,*.py) drakrun/data pyproject.toml MANIFEST.in requirements.txt setup.py
6 |
7 | .PHONY: all
8 | all: dist/*.whl
9 |
10 | dist/*.whl: $(PYTHON_SOURCE_FILES) drakrun/web/frontend/dist drakrun/tools/get-explorer-pid drakrun/tools/drakshell/drakshell
11 | rm -f dist/*.whl
12 | ifndef DIST
13 | DRAKRUN_VERSION_TAG=$(shell git rev-parse --short HEAD) python3 setup.py bdist_wheel
14 | else
15 | python3 setup.py bdist_wheel
16 | endif
17 |
18 | drakrun/web/frontend/dist: drakrun/web/frontend/node_modules $(WEB_SOURCE_FILES) drakrun/web/frontend/vite.config.js drakrun/web/frontend/index.html
19 | cd drakrun/web/frontend ; npm run build
20 |
21 | drakrun/web/frontend/node_modules: drakrun/web/frontend/package.json drakrun/web/frontend/package-lock.json
22 | cd drakrun/web/frontend ; npm ci
23 |
24 | drakrun/tools/get-explorer-pid: drakrun/tools/get-explorer-pid.c
25 | gcc $< -o $@ -lvmi `pkg-config --cflags --libs glib-2.0`
26 |
27 | drakrun/tools/drakshell/drakshell:
28 | $(MAKE) -C drakrun/tools/drakshell
29 |
30 | .PHONY: clean
31 | clean:
32 | rm -rf dist drakvuf_sandbox.egg-info build
33 | rm -rf drakrun/web/frontend/dist drakrun/web/frontend/node_modules
34 | rm -f drakrun/tools/get-explorer-pid drakrun/tools/test-altp2m
35 | rm -f drakrun/tools/drakshell/drakshell
36 |
37 | .PHONY: install
38 | install: all
39 | pip install dist/*.whl
40 |
--------------------------------------------------------------------------------
/test/utils.py:
--------------------------------------------------------------------------------
1 | import io
2 | import logging
3 |
4 |
5 | def apt_install(c, packages):
6 | deps = " ".join(packages)
7 | logging.info(f"Installing {packages} with apt")
8 | c.run(f"DEBIAN_FRONTEND=noninteractive apt-get install -y {deps}", in_stream=False)
9 |
10 |
11 | def dpkg_install(c, deb_file):
12 | logging.info(f"Installing {deb_file} with dpkg")
13 | c.run(f"DEBIAN_FRONTEND=noninteractive dpkg -i {deb_file}", in_stream=False)
14 |
15 |
16 | def get_file(c, path):
17 | tmp = io.BytesIO()
18 | c.get(path, tmp)
19 | return tmp.getvalue()
20 |
21 |
22 | def get_hypervisor_type(c):
23 | return get_file(c, "/sys/hypervisor/type").strip().decode()
24 |
25 |
26 | def get_service_info(c, service):
27 | lines = c.run(f"systemctl show {service}", hide="out").stdout.splitlines()
28 | return dict(map(lambda l: l.split("=", maxsplit=1), lines))
29 |
30 |
31 | class Drakcore:
32 | def __init__(self, drakvuf_vm):
33 | self.host = f"http://{drakvuf_vm.vm_ip}:6300/"
34 | self.session = drakvuf_vm.http_session()
35 |
36 | def get(self, endpoint, *args, **kwargs):
37 | return self.session.get(f"{self.host}{endpoint}", *args, **kwargs)
38 |
39 | def post(self, endpoint, *args, **kwargs):
40 | return self.session.post(f"{self.host}{endpoint}", *args, **kwargs)
41 |
42 | def upload(self, sample, timeout):
43 | response = self.post(f"upload", files={"file": sample}, data={"timeout": timeout})
44 | response.raise_for_status()
45 | return response.json()["task_uid"]
46 |
47 | def check_status(self, task_uuid):
48 | response = self.get(f"status/{task_uuid}")
49 | response.raise_for_status()
50 | return response.json()
51 |
52 | def analysis_log(self, task_uuid, log_name):
53 | response = self.get(f"logs/{task_uuid}/{log_name}", stream=True)
54 | response.raise_for_status()
55 | return response
56 |
--------------------------------------------------------------------------------
/test/test_sanity.py:
--------------------------------------------------------------------------------
1 | import time
2 | import json
3 | import pytest
4 |
5 | from utils import get_hypervisor_type, get_service_info, Drakcore
6 | from conftest import DRAKMON_SERVICES
7 |
8 |
9 | @pytest.fixture
10 | def drakcore(drakmon_vm):
11 | return Drakcore(drakmon_vm)
12 |
13 |
14 | def test_running_on_xen(drakmon_ssh):
15 | assert get_hypervisor_type(drakmon_ssh) == "xen"
16 |
17 |
18 | def test_services_running(drakmon_ssh):
19 | def check_status():
20 | infos = [get_service_info(drakmon_ssh, service) for service in DRAKMON_SERVICES]
21 |
22 | for info in infos:
23 | assert info["LoadState"] == "loaded"
24 | assert info["ActiveState"] == "active"
25 | assert info["SubState"] == "running"
26 |
27 | # Wait up to 5 seconds for the services to be up
28 | for _ in range(5):
29 | try:
30 | check_status()
31 | break
32 | except AssertionError:
33 | pass
34 | time.sleep(1.0)
35 | else:
36 | raise Exception("Services down")
37 |
38 |
39 | def test_web_ui_reachable(drakcore):
40 | response = drakcore.get("/")
41 | response.raise_for_status()
42 |
43 |
44 | def test_sample_analysis(drakcore):
45 | task_uuid = drakcore.upload(open("test.exe", "rb"), timeout=120)
46 |
47 | # wait until end of analysis
48 | while True:
49 | r = drakcore.check_status(task_uuid)
50 | if r["status"] != "pending":
51 | break
52 | time.sleep(10.0)
53 |
54 | # give it a bit more time?
55 | time.sleep(10.0)
56 |
57 | # check logs if our binary was ran
58 | response = drakcore.analysis_log(task_uuid, "filetracer")
59 | for line in response.iter_lines():
60 | d = json.loads(line)
61 | # our sample tried to create a file
62 | if d.get("Method") == "NtCreateFile" and "test.txt" in d.get("FileName"):
63 | break
64 | else:
65 | raise Exception("No matching entry found")
66 |
--------------------------------------------------------------------------------
/drakrun/lib/libvmi/vmi_info.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import re
3 | from dataclasses import dataclass, field
4 | from typing import Optional
5 |
6 | from dataclasses_json import DataClassJsonMixin, config
7 |
8 | log = logging.getLogger(__name__)
9 |
10 | hexstring = config(
11 | encoder=lambda v: hex(v),
12 | decoder=lambda v: int(v, 16),
13 | )
14 |
15 |
16 | @dataclass
17 | class VmiOffsets(DataClassJsonMixin):
18 | # Fields correspond to output defined in
19 | # https://github.com/libvmi/libvmi/blob/master/examples/win-offsets.c
20 |
21 | win_ntoskrnl: int = field(metadata=hexstring)
22 | win_ntoskrnl_va: int = field(metadata=hexstring)
23 |
24 | win_tasks: int = field(metadata=hexstring)
25 | win_pdbase: int = field(metadata=hexstring)
26 | win_pid: int = field(metadata=hexstring)
27 | win_pname: int = field(metadata=hexstring)
28 | win_kdvb: int = field(metadata=hexstring)
29 | win_sysproc: int = field(metadata=hexstring)
30 | win_kpcr: int = field(metadata=hexstring)
31 | win_kdbg: int = field(metadata=hexstring)
32 |
33 | kpgd: int = field(metadata=hexstring)
34 |
35 | @staticmethod
36 | def from_tool_output(output: str) -> "VmiOffsets":
37 | """
38 | Parse vmi-win-offsets tool output and return VmiOffsets.
39 | If any of the fields is missing, throw TypeError
40 | """
41 | offsets = re.findall(r"^([a-z_]+):(0x[0-9a-f]+)$", output, re.MULTILINE)
42 | vals = {k: int(v, 16) for k, v in offsets}
43 | return VmiOffsets(**vals)
44 |
45 |
46 | @dataclass
47 | class VmiInfo(DataClassJsonMixin):
48 | vmi_offsets: VmiOffsets
49 | inject_pid: int
50 | inject_tid: Optional[int] = None
51 |
52 | @staticmethod
53 | def load(file_path: str) -> "VmiInfo":
54 | with open(file_path) as file_obj:
55 | return VmiInfo.from_json(file_obj.read())
56 |
57 |
58 | @dataclass
59 | class VmiGuidInfo:
60 | version: str
61 | guid: str
62 | filename: str
63 |
--------------------------------------------------------------------------------
/drakrun/lib/drakvuf_cmdline.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 |
3 | from .libvmi import VmiInfo, get_dll_cmdline_args
4 |
5 |
6 | def get_base_drakvuf_cmdline(
7 | vm_name: str,
8 | kernel_profile_path: str,
9 | vmi_info: VmiInfo,
10 | exec_cmd: Optional[str] = None,
11 | extra_args: Optional[List[str]] = None,
12 | ) -> List[str]:
13 | args = [
14 | "drakvuf",
15 | "-o",
16 | "json",
17 | # be aware of https://github.com/tklengyel/drakvuf/pull/951
18 | "-F", # enable fast singlestep
19 | "-k",
20 | hex(vmi_info.vmi_offsets.kpgd),
21 | "-r",
22 | kernel_profile_path,
23 | "-d",
24 | vm_name,
25 | ]
26 | args.extend(get_dll_cmdline_args())
27 | if exec_cmd is not None:
28 | args.extend(
29 | [
30 | "-j",
31 | "60",
32 | "-i",
33 | str(vmi_info.inject_pid),
34 | "-e",
35 | exec_cmd,
36 | *(
37 | ["-I", str(vmi_info.inject_tid), "--exit-injection-thread"]
38 | if vmi_info.inject_tid is not None
39 | else []
40 | ),
41 | ]
42 | )
43 | if extra_args:
44 | args.extend(extra_args)
45 | return args
46 |
47 |
48 | def get_base_injector_cmdline(
49 | vm_name: str,
50 | kernel_profile_path: str,
51 | vmi_info: VmiInfo,
52 | method: str,
53 | args: Optional[List[str]] = None,
54 | ) -> List[str]:
55 | args = args or []
56 | return [
57 | "injector",
58 | "-o",
59 | "json",
60 | "-d",
61 | vm_name,
62 | "-r",
63 | kernel_profile_path,
64 | "-i",
65 | str(vmi_info.inject_pid),
66 | "-k",
67 | hex(vmi_info.vmi_offsets.kpgd),
68 | "-m",
69 | method,
70 | *(["-I", str(vmi_info.inject_tid)] if vmi_info.inject_tid is not None else []),
71 | *args,
72 | ]
73 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/AnalysisMetadataTable.jsx:
--------------------------------------------------------------------------------
1 | import { PluginList } from "./PluginPicker.jsx";
2 |
3 | export function AnalysisMetadataTable({ analysis }) {
4 | const startCommand = analysis.options["start_command"] || "-";
5 | return (
6 |
7 |
8 |
9 | File name
10 | {analysis.file.name}
11 |
12 |
13 | SHA256
14 | {analysis.file.sha256}
15 |
16 |
17 | Type
18 | {analysis.file.type}
19 |
20 |
21 | Start command
22 |
23 | {Array.isArray(startCommand)
24 | ? startCommand.join(" ")
25 | : startCommand}
26 |
27 |
28 |
29 | Analysis time
30 | {analysis.options["timeout"]} seconds
31 |
32 |
33 | Job started at
34 | {analysis["time_started"] || "-"}
35 |
36 |
37 | Execution started at
38 | {analysis["time_execution_started"] || "-"}
39 |
40 |
41 | Job finished at
42 | {analysis["time_finished"] || "-"}
43 |
44 |
45 | Plugins
46 |
47 |
48 |
49 |
50 |
51 |
52 | );
53 | }
54 |
--------------------------------------------------------------------------------
/drakrun/lib/libvmi/dlls.py:
--------------------------------------------------------------------------------
1 | from typing import NamedTuple, Optional
2 |
3 | from ..paths import VMI_PROFILES_DIR
4 |
5 | DLL = NamedTuple("DLL", [("path", str), ("dest", str), ("arg", Optional[str])])
6 |
7 | # profile file list, without 'C:\' and with '/' instead of '\'
8 | # Profiles required by Drakvuf core
9 | required_dll_file_list = [
10 | DLL("Windows/System32/ntdll.dll", "native_ntdll_profile", "--json-ntdll"),
11 | DLL("Windows/SysWOW64/ntdll.dll", "wow64_ntdll_profile", "--json-wow"),
12 | DLL("Windows/System32/win32k.sys", "native_win32k_profile", "--json-win32k"),
13 | DLL("Windows/System32/kernel32.dll", "native_kernel32_profile", "--json-kernel32"),
14 | DLL(
15 | "Windows/SysWOW64/kernel32.dll",
16 | "wow64_kernel32_profile",
17 | "--json-wow-kernel32",
18 | ),
19 | ]
20 |
21 | # Profiles required by some Drakvuf plugins
22 | optional_dll_file_list = [
23 | DLL("Windows/System32/drivers/tcpip.sys", "native_tcpip_profile", "--json-tcpip"),
24 | DLL("Windows/System32/sspicli.dll", "native_sspicli_profile", "--json-sspicli"),
25 | DLL(
26 | "Windows/System32/KernelBase.dll",
27 | "native_kernelbase_profile",
28 | "--json-kernelbase",
29 | ),
30 | DLL("Windows/System32/IPHLPAPI.DLL", "native_iphlpapi_profile", "--json-iphlpapi"),
31 | DLL("Windows/System32/mpr.dll", "native_mpr_profile", "--json-mpr"),
32 | # .NET DLLs aren't present in winsxs and are 32-bit, use x86_prefix
33 | DLL(
34 | "Windows/Microsoft.NET/Framework/v4.0.30319/clr.dll",
35 | "native_clr_profile",
36 | "--json-clr",
37 | ),
38 | DLL(
39 | "Windows/Microsoft.NET/Framework/v2.0.50727/mscorwks.dll",
40 | "native_mscorwks_profile",
41 | "--json-mscorwks",
42 | ),
43 | ]
44 |
45 | dll_file_list = required_dll_file_list + optional_dll_file_list
46 |
47 |
48 | def get_dll_cmdline_args():
49 | args = []
50 | for dll in dll_file_list:
51 | dll_profile_path = VMI_PROFILES_DIR / f"{dll.dest}.json"
52 | if dll_profile_path.exists():
53 | args.extend([dll.arg, dll_profile_path.as_posix()])
54 | return args
55 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/postprocess.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import pathlib
4 | from typing import Any, Dict
5 |
6 | from drakrun.lib.config import DrakrunConfig
7 |
8 | from .plugins import POSTPROCESS_PLUGINS
9 | from .plugins.plugin_base import PostprocessContext, PostprocessPlugin
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 |
14 | def check_plugin_requirements(
15 | analysis_dir: pathlib.Path, plugin: PostprocessPlugin
16 | ) -> bool:
17 | plugin_name = plugin.function.__name__
18 | for required_path in plugin.requires:
19 | if not (analysis_dir / required_path).exists():
20 | logger.warning(
21 | f"{plugin_name} won't be run because {required_path} does not exist"
22 | )
23 | return False
24 | for generated_path in plugin.generates:
25 | if (analysis_dir / generated_path).exists():
26 | logger.warning(
27 | f"{plugin_name} won't be run because {generated_path} already exists"
28 | )
29 | return False
30 | return True
31 |
32 |
33 | def run_postprocessing(context: PostprocessContext):
34 | for plugin in POSTPROCESS_PLUGINS:
35 | plugin_name = plugin.function.__name__
36 | if not check_plugin_requirements(context.analysis_dir, plugin):
37 | continue
38 | try:
39 | plugin.function(context)
40 | except Exception:
41 | logger.exception(f"{plugin_name} failed with uncaught exception")
42 |
43 |
44 | def append_metadata_to_analysis(
45 | analysis_dir: pathlib.Path, extra_metadata: Dict[str, Any]
46 | ):
47 | metadata_path = analysis_dir / "metadata.json"
48 | metadata = {}
49 | if metadata_path.exists():
50 | metadata = json.loads(metadata_path.read_text())
51 | metadata.update(extra_metadata)
52 | metadata_path.write_text(json.dumps(metadata))
53 |
54 |
55 | def postprocess_analysis_dir(analysis_dir: pathlib.Path, config: DrakrunConfig):
56 | context = PostprocessContext(
57 | analysis_dir=analysis_dir,
58 | config=config,
59 | )
60 | run_postprocessing(context)
61 | return context.metadata
62 |
--------------------------------------------------------------------------------
/drakrun/ipt/ipt_utils.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 |
4 |
5 | class CustomFormatter(logging.Formatter):
6 | """Logging Formatter to add colors and count warning / errors"""
7 |
8 | grey = "\x1b[38;21m"
9 | yellow = "\x1b[33;21m"
10 | red = "\x1b[31;21m"
11 | bold_red = "\x1b[31;1m"
12 | reset = "\x1b[0m"
13 | format = "%(levelname)8s - %(message)s"
14 | # format = "%(asctime)s %(levelname)8s - %(message)s (%(filename)s:%(lineno)d)"
15 |
16 | FORMATS = {
17 | logging.DEBUG: grey + format + reset,
18 | logging.INFO: grey + format + reset,
19 | logging.WARNING: yellow + format + reset,
20 | logging.ERROR: red + format + reset,
21 | logging.CRITICAL: bold_red + format + reset,
22 | }
23 |
24 | def format(self, record):
25 | log_fmt = self.FORMATS.get(record.levelno)
26 | formatter = logging.Formatter(log_fmt)
27 | return formatter.format(record)
28 |
29 |
30 | ch = logging.StreamHandler()
31 | ch.setLevel(logging.DEBUG)
32 | ch.setFormatter(CustomFormatter())
33 |
34 | log = logging.getLogger("hax")
35 | log.setLevel(logging.DEBUG)
36 | log.addHandler(ch)
37 |
38 |
39 | def load_drakvuf_output(path):
40 | log.info("Parsing %s...", path.name)
41 | result = []
42 | with path.open() as f:
43 | for line in f:
44 | try:
45 | result.append(json.loads(line))
46 | except json.JSONDecodeError:
47 | log.warning("Failed to parse %s", line)
48 | log.info("Loaded %d entries", len(result))
49 | return result
50 |
51 |
52 | def hexint(v):
53 | return int(v, 16)
54 |
55 |
56 | def get_fault_va(fault):
57 | return hexint(fault["VA"])
58 |
59 |
60 | def get_fault_pa(fault):
61 | return hexint(fault["PA"])
62 |
63 |
64 | def get_trap_pa(execframe):
65 | return hexint(execframe["TrapPA"])
66 |
67 |
68 | def get_frame_va(execframe):
69 | return hexint(execframe["PageVA"])
70 |
71 |
72 | def page_align(addr):
73 | return addr & ~0xFFF
74 |
75 |
76 | def is_page_aligned(addr):
77 | return (addr & 0xFFF) == 0
78 |
79 |
80 | def select_cr3(pred, entries):
81 | return filter(lambda v: pred(hexint(v["CR3"])), entries)
82 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/split_drakmon_log.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import re
4 | from collections import Counter
5 |
6 | from .plugin_base import PostprocessContext
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | def split_drakmon_log(context: PostprocessContext) -> None:
12 | analysis_dir = context.analysis_dir
13 | drakmon_log_path = analysis_dir / "drakmon.log"
14 | error_path = analysis_dir / "parse_errors.log"
15 | error_file = None
16 | plugin_files = {}
17 | failures = Counter()
18 |
19 | with drakmon_log_path.open("rb") as drakmon_log:
20 | for line in drakmon_log:
21 | try:
22 | line_s = line.strip().decode()
23 | obj = json.loads(line_s)
24 |
25 | plugin = obj.get("Plugin", "unknown")
26 |
27 | if plugin not in plugin_files:
28 | plugin_files[plugin] = (
29 | drakmon_log_path.with_name(f"{plugin}.log")
30 | ).open("w")
31 |
32 | plugin_file = plugin_files[plugin]
33 | plugin_file.write(json.dumps(obj) + "\n")
34 | except (UnicodeDecodeError, json.JSONDecodeError):
35 | # Log the failure and count statistics
36 |
37 | plugin_heuristic: bytes = r'"Plugin": "(\w+)"'.encode()
38 | match = re.match(plugin_heuristic, line)
39 | if match:
40 | # we've matched a unicode word, this shouldn't fail
41 | plugin = match.group(1).decode("utf-8", "replace")
42 | else:
43 | plugin = "unknown"
44 |
45 | failures[plugin] += 1
46 | if not error_file:
47 | error_file = error_path.open("wb")
48 | error_file.write(line)
49 |
50 | for plugin_file in plugin_files.values():
51 | plugin_file.close()
52 |
53 | if error_file:
54 | error_file.close()
55 |
56 | for plugin, count in failures.items():
57 | logger.warning("Failed to parse %d lines generated by %s", count, plugin)
58 | # Remove drakmon.log is successfully split
59 | drakmon_log_path.unlink()
60 |
--------------------------------------------------------------------------------
/docs/usage/managing_snapshots.rst:
--------------------------------------------------------------------------------
1 | ==================
2 | Managing snapshots
3 | ==================
4 |
5 | .. _snapshot-modification:
6 |
7 | Snapshot modification
8 | =====================
9 |
10 | modify-vm0 tool
11 | ---------------
12 |
13 | Before trying to modify the installation, make sure that all ``drakrun-worker@`` services are stopped and VMs are destroyed.
14 |
15 | Execute ``draksetup modify-vm0 begin`` as root. This will run vm-0 and at this point you can connect to VNC
16 | and perform the modifications.
17 |
18 | When you're done, open another terminal window and execute ``draksetup modify-vm0 commit``. The command
19 | will recreate the snapshot and profiles for other virtual machines.
20 |
21 | If modification doesn't go well and you want to rollback vm-0 to the state before modifications, run
22 | ``draksetup modify-vm0 rollback``.
23 |
24 | .. warning::
25 | vm-0 is a base for other virtual machines. Leaving it in a broken or inconsistent state will
26 | result in analysis failures, BSODs and other unexpected errors. When modifying the vm-0 always
27 | make sure to perform the complete commit/rollback step.
28 |
29 | Importing and exporting snapshots
30 | =================================
31 |
32 | You can use ``drakrun snapshot`` command to import/export your VM disk image and memory snapshot.
33 |
34 | .. code-block:: console
35 |
36 | $ drakrun snapshot
37 | Usage: drakrun snapshot [OPTIONS] COMMAND [ARGS]...
38 |
39 | Snapshot management commands (import/export)
40 |
41 | Options:
42 | --help Show this message and exit.
43 |
44 | Commands:
45 | export Export snapshot into local directory
46 | import Import snapshot from local directory
47 |
48 | ``drakrun snapshot import`` accepts similar arguments as the ``drakrun install`` and can be used as an initial configuration command.
49 |
50 | When snapshot is imported onto different hardware configuration, it may throw an error when trying to restore the snapshot.
51 | In that case, you may need to:
52 |
53 | - cold boot your snapshot using ``drakrun modify-vm0 begin --cold-boot``
54 | - wait for Windows to boot into Desktop
55 | - use ``drakrun modify-vm0 commit`` to make a new VM-0 snapshot and regenerate VMI profile.
56 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/parse_utils.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import logging
3 | import pathlib
4 | import string
5 | from datetime import datetime, timezone
6 | from typing import Callable, Iterator, List, Optional, Union
7 |
8 | import orjson
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | def parse_log(
14 | log_file: pathlib.Path, filter_cb: Callable[[dict], Optional[dict]]
15 | ) -> Iterator[dict]:
16 | with log_file.open("r") as f:
17 | for line_no, line in enumerate(f):
18 | try:
19 | data = orjson.loads(line)
20 | if result := filter_cb(data):
21 | yield result
22 | except Exception:
23 | logger.exception(
24 | "Failed to parse line %d of %s", line_no + 1, log_file.as_posix()
25 | )
26 |
27 |
28 | def trim_method_name(method: str) -> str:
29 | """
30 | WinAPI has two variants for each method using strings: Unicode (W) and ANSI (A).
31 | We don't care about it, it's easier to trim it from the method name while processing.
32 | """
33 | if method[-1] in ["A", "W"] and method[-2] in (
34 | string.ascii_lowercase + string.digits
35 | ):
36 | return method[:-1]
37 | return method
38 |
39 |
40 | def parse_apimon_arguments(args: List[str]) -> List[Union[int, str]]:
41 | parsed_args = []
42 | for arg in args:
43 | if not arg.startswith("Arg"):
44 | raise RuntimeError(f"Wrong argument format: {arg}")
45 | _, value = arg.split("=", 1)
46 | if ":" not in value:
47 | parsed_args.append(int(value, 16))
48 | continue
49 | _, str_value = arg.split(":", 1)
50 | parsed_args.append(ast.literal_eval(str_value))
51 | return parsed_args
52 |
53 |
54 | def epoch_to_timestring(unix_time: Optional[float]) -> Optional[str]:
55 | # This method converts a unix epoch time into a formated time string.
56 | # Example:
57 | # Input: 1716998460.000
58 | # Return: '2024-05-29 17:01:00'
59 | if not unix_time:
60 | # Sometimes the time in the logs would be zero or None
61 | return None
62 |
63 | tm = datetime.fromtimestamp(unix_time, tz=timezone.utc)
64 | return tm.isoformat()
65 |
--------------------------------------------------------------------------------
/drakrun/cli/drakshell.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import sys
3 | import time
4 |
5 | import click
6 |
7 | from drakrun.lib.config import load_config
8 | from drakrun.lib.drakshell import Drakshell
9 | from drakrun.lib.injector import Injector
10 | from drakrun.lib.install_info import InstallInfo
11 | from drakrun.lib.libvmi import VmiInfo
12 | from drakrun.lib.paths import (
13 | INSTALL_INFO_PATH,
14 | PACKAGE_TOOLS_PATH,
15 | VMI_INFO_PATH,
16 | VMI_KERNEL_PROFILE_PATH,
17 | )
18 | from drakrun.lib.vm import VirtualMachine
19 |
20 | from .check_root import check_root
21 |
22 | log = logging.getLogger(__name__)
23 |
24 |
25 | @click.command("drakshell")
26 | @click.option(
27 | "--vm-id",
28 | "vm_id",
29 | default=1,
30 | type=int,
31 | show_default=True,
32 | help="VM id to use for generating profile",
33 | )
34 | @click.argument("cmd", nargs=-1, type=str)
35 | @check_root
36 | def drakshell(vm_id, cmd):
37 | """
38 | Run drakshell session
39 | """
40 | config = load_config()
41 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
42 | vm = VirtualMachine(vm_id, install_info, config.network)
43 | if not vm.is_running:
44 | click.echo("VM is not running", err=True)
45 | raise click.Abort()
46 | vmi_info = VmiInfo.load(VMI_INFO_PATH)
47 | injector = Injector(vm.vm_name, vmi_info, VMI_KERNEL_PROFILE_PATH)
48 |
49 | drakshell = Drakshell(vm.vm_name)
50 | connected = False
51 | try:
52 | drakshell.connect()
53 | connected = True
54 | except Exception as e:
55 | log.warning(f"Failed to connect to drakshell: {str(e)}")
56 |
57 | if not connected:
58 | log.info("Injecting drakshell...")
59 | drakshell_path = (
60 | (PACKAGE_TOOLS_PATH / "drakshell" / "drakshell").resolve().as_posix()
61 | )
62 | injector.inject_shellcode(drakshell_path)
63 | log.info("Injected. Trying to connect.")
64 | time.sleep(1)
65 | drakshell.connect()
66 |
67 | info = drakshell.get_info()
68 | log.info(f"Drakshell active on: {str(info)}")
69 |
70 | process = drakshell.run_interactive(cmd, sys.stdin, sys.stdout, sys.stderr)
71 | exit_code = process.join()
72 | log.info(f"Process terminated with exit code {exit_code}")
73 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/generate_report.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from pathlib import Path
3 | from typing import Dict, Optional
4 |
5 | import orjson
6 |
7 | from ..process_tree import ProcessTree
8 | from .plugin_base import PostprocessContext
9 |
10 | logger = logging.getLogger(__name__)
11 |
12 |
13 | def get_metadata(analysis_dir: Path) -> Dict:
14 | # Currently, all metadata is contained in the metadata.json file
15 | metadata_file = analysis_dir / "metadata.json"
16 | with metadata_file.open("r") as f:
17 | metadata = orjson.loads(f.read())
18 |
19 | return metadata
20 |
21 |
22 | def get_inject_info(analysis_dir: Path, process_tree: Optional[ProcessTree]) -> Dict:
23 | inject_file = analysis_dir / "inject.log"
24 | if not inject_file.exists():
25 | return {}
26 | with inject_file.open("r") as f:
27 | inject = orjson.loads(f.read().strip())
28 | status = inject.get("Status")
29 | if status == "Success":
30 | pid = inject.get("InjectedPid")
31 | if pid and process_tree:
32 | process = process_tree.get_process_for_evtid(pid, 0)
33 | else:
34 | process = None
35 | inject_info = {
36 | "status": inject["Status"],
37 | "process_name": inject.get("ProcessName"),
38 | "arguments": inject.get("Arguments"),
39 | "pid": pid,
40 | "process": process.seqid if process else None,
41 | }
42 | elif status == "Error":
43 | inject_info = {
44 | "status": inject["Status"],
45 | "error_code": inject.get("ErrorCode"),
46 | "error": inject.get("Error"),
47 | }
48 | else:
49 | logger.warning("Unknown status found in inject.log")
50 | inject_info = {}
51 |
52 | return inject_info
53 |
54 |
55 | def generate_report(context: PostprocessContext) -> None:
56 | analysis_dir = context.analysis_dir
57 | process_tree = context._process_tree
58 | report = {
59 | "info": get_metadata(analysis_dir),
60 | "startup": get_inject_info(analysis_dir, process_tree),
61 | **context.report,
62 | }
63 |
64 | with (analysis_dir / "report.json").open("wb") as f:
65 | f.write(orjson.dumps(report, option=orjson.OPT_INDENT_2))
66 |
--------------------------------------------------------------------------------
/drakrun/cli/make_profile.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import click
4 |
5 | from drakrun.lib.config import load_config
6 | from drakrun.lib.drakshell import Drakshell
7 | from drakrun.lib.install_info import InstallInfo
8 | from drakrun.lib.paths import INSTALL_INFO_PATH, VMI_INFO_PATH
9 | from drakrun.lib.vm import VirtualMachine
10 | from drakrun.lib.vmi_profile import create_vmi_info, create_vmi_json_profile
11 |
12 | from .check_root import check_root
13 |
14 | log = logging.getLogger(__name__)
15 |
16 |
17 | @click.command(help="Make VMI profile")
18 | @click.option(
19 | "--vm-id",
20 | "vm_id",
21 | default=1,
22 | type=int,
23 | show_default=True,
24 | help="VM id to use for generating profile",
25 | )
26 | @click.option(
27 | "--no-restore",
28 | is_flag=True,
29 | show_default=True,
30 | default=False,
31 | help="Don't restore VM before making profile and don't destroy after, assume it's already running",
32 | )
33 | @check_root
34 | def make_profile(vm_id, no_restore):
35 | config = load_config()
36 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
37 |
38 | vm = VirtualMachine(vm_id, install_info, config.network)
39 | if not no_restore:
40 | vm.restore()
41 | try:
42 | vmi_info = create_vmi_info(vm, with_drakshell=False)
43 | if vmi_info.inject_tid:
44 | try:
45 | drakshell = Drakshell(vm.vm_name)
46 | drakshell.connect()
47 | drakshell_info = drakshell.get_info()
48 | assert (
49 | drakshell_info["pid"] == vmi_info.inject_pid
50 | or drakshell_info["tid"] == vmi_info.inject_tid
51 | )
52 | except Exception as e:
53 | log.warning(
54 | "Drakshell is not running or has incorrect state. Removing inject_tid. "
55 | "Consider making a new vm0 snapshot using modify-vm0 utility.",
56 | exc_info=e,
57 | )
58 | vmi_info.inject_tid = None
59 | VMI_INFO_PATH.write_text(vmi_info.to_json(indent=4))
60 |
61 | create_vmi_json_profile(vm, vmi_info)
62 | finally:
63 | if not no_restore:
64 | vm.destroy()
65 | log.info("Profile successfully created")
66 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/TabSwitcher.jsx:
--------------------------------------------------------------------------------
1 | import { createContext, Children } from "react";
2 | import { useContext } from "react";
3 |
4 | const TabContext = createContext(undefined);
5 |
6 | export function useTabContext() {
7 | return useContext(TabContext);
8 | }
9 |
10 | export function TabSwitcher({
11 | activeTab,
12 | onTabSwitch,
13 | children,
14 | tabs = undefined,
15 | getHeader = (tab) => tab,
16 | tabClassName = "nav-tabs",
17 | contentClassName = "",
18 | }) {
19 | return (
20 |
21 | <>
22 |
23 |
28 | {(
29 | tabs ??
30 | Children.map(children, (child) => child?.props?.tab)
31 | ).map((tab) => {
32 | return (
33 | onTabSwitch(tab)}
38 | key={`tab-${tab}`}
39 | >
40 | {getHeader(tab)}
41 |
42 | );
43 | })}
44 |
45 |
46 |
50 |
51 | {children}
52 |
53 |
54 | >
55 |
56 | );
57 | }
58 |
59 | export function Tab({ tab, children }) {
60 | const tabContext = useTabContext();
61 | if (tabContext.activeTab === tab) {
62 | return children;
63 | } else {
64 | return [];
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/ProcessInfoTable.jsx:
--------------------------------------------------------------------------------
1 | function epochToTimestamp(timestamp) {
2 | return new Date(timestamp * 1000).toISOString();
3 | }
4 |
5 | export function ProcessInfoTable({ processInfo }) {
6 | return (
7 |
8 |
9 |
10 | Process name
11 | {processInfo.procname}
12 |
13 |
14 | PID
15 | {processInfo.pid}
16 |
17 |
18 | PPID
19 | {processInfo.ppid}
20 |
21 |
22 | Arguments
23 |
24 | {Array.isArray(processInfo.args)
25 | ? processInfo.args.join(" ")
26 | : processInfo.args}
27 |
28 |
29 |
30 | Started at
31 |
32 | {processInfo.ts_from ? (
33 | epochToTimestamp(processInfo.ts_from)
34 | ) : (
35 | (running from the beginning of analysis)
36 | )}
37 |
38 |
39 |
40 | Finished at
41 |
42 | {processInfo.ts_to ? (
43 | epochToTimestamp(processInfo.ts_to)
44 | ) : (
45 | (never)
46 | )}
47 |
48 |
49 | {processInfo.exit_code_str ? (
50 |
51 | Exit code
52 |
53 | {processInfo.exit_code_str} (0x
54 | {processInfo.exit_code.toString(16)})
55 |
56 |
57 | ) : (
58 | []
59 | )}
60 |
61 |
62 | );
63 | }
64 |
--------------------------------------------------------------------------------
/docs/faq.rst:
--------------------------------------------------------------------------------
1 | ===================
2 | DRAKVUF Sandbox FAQ
3 | ===================
4 |
5 | Can I run DRAKVUF Sandbox in the cloud?
6 | ---------------------------------------
7 |
8 | We've done some research regarding the deployment of the sandbox in the cloud.
9 | Unfortunately, due to the nature of the project and extensive use of low level CPU features,
10 | none of the popular "instance" services were able to run DRAKVUF.
11 | If you're interested to learn more about underlying problems see `relevant issues on GitHub `_.
12 |
13 | However, this doesn't mean that cloud deployment is impossible. You can still leverage modern
14 | deployment techniques and IaC (infrastructure as code) using bare metal servers.
15 |
16 | Tested service providers:
17 |
18 | * `Equinix Metal `_
19 | * `Scaleway Bare Metal `_
20 |
21 | Unfortunately, AWS EC2 Metal seems to be broken at the moment (see `this issue `_).
22 | If you've managed to run DRAKVUF Sandbox on a previously untested cloud service, send us a PR to add it to this list.
23 |
24 | .. _check-cpu:
25 |
26 | How can I verify if my CPU is supported?
27 | ----------------------------------------
28 |
29 | If you're running fairly recent Intel CPU, it's probably going to have all of the required features.
30 |
31 | 0. Make sure VT-x extensions are enabled in BIOS.
32 | 1. Check virtualization extensions support.
33 |
34 | .. code-block :: console
35 |
36 | $ lscpu | grep vmx
37 |
38 | 2. Check EPT support.
39 |
40 | .. code-block :: console
41 |
42 | $ lscpu | grep ept
43 |
44 | If both flags are present, you're good to go.
45 |
46 | I have an AMD CPU which supports NPT. Can I run DRAKVUF Sandbox?
47 | ----------------------------------------------------------------
48 |
49 | DRAKVUF is tightly coupled with `altp2m `_ feature, implemented
50 | only for Intel CPUs. Thus it's not possible to run it on a AMD CPU.
51 |
52 |
53 | I have some other question
54 | --------------------------
55 |
56 | Feel free to `submit an issue `_, write us an email or contact in any other way.
57 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 | import sphinx_rtd_theme
17 |
18 | import os
19 | import sys
20 | sys.path.insert(0, os.path.abspath('../drakrun'))
21 |
22 |
23 | # -- Project information -----------------------------------------------------
24 |
25 | project = 'DRAKVUF Sandbox'
26 | copyright = '2025, CERT Polska'
27 | author = 'CERT Polska'
28 |
29 | # The full version, including alpha/beta/rc tags
30 | release = 'v0.20.0'
31 |
32 | latex_engine = 'xelatex'
33 |
34 | def setup(app):
35 | app.add_css_file("theme-overrides.css")
36 |
37 | # -- General configuration ---------------------------------------------------
38 |
39 | # Add any Sphinx extension module names here, as strings. They can be
40 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
41 | # ones.
42 | extensions = [
43 | "sphinx_rtd_theme",
44 | "sphinx.ext.graphviz",
45 | ]
46 |
47 | graphviz_output_format = 'svg'
48 |
49 | # Add any paths that contain templates here, relative to this directory.
50 | templates_path = ['_templates']
51 |
52 | # List of patterns, relative to source directory, that match files and
53 | # directories to ignore when looking for source files.
54 | # This pattern also affects html_static_path and html_extra_path.
55 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
56 |
57 |
58 | # -- Options for HTML output -------------------------------------------------
59 |
60 | # The theme to use for HTML and HTML Help pages. See the documentation for
61 | # a list of builtin themes.
62 | #
63 | html_theme = 'sphinx_rtd_theme'
64 |
65 | # Add any paths that contain custom static files (such as style sheets) here,
66 | # relative to this directory. They are copied after the builtin static files,
67 | # so a file named "default.css" will overwrite the builtin "default.css".
68 | html_static_path = ['_static']
69 |
70 |
--------------------------------------------------------------------------------
/drakrun/cli/s3_storage.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import click
4 |
5 | from drakrun.lib.config import load_config
6 | from drakrun.lib.paths import ANALYSES_DIR
7 | from drakrun.lib.s3_storage import download_analysis, get_s3_client, upload_analysis
8 |
9 | log = logging.getLogger(__name__)
10 |
11 |
12 | def _get_s3_client():
13 | s3_config = load_config().s3
14 | if not s3_config:
15 | click.echo("S3 storage is not configured", err=True)
16 | raise click.Abort()
17 | if not s3_config.enabled:
18 | click.echo("S3 storage is not enabled", err=True)
19 | raise click.Abort()
20 | return get_s3_client(s3_config), s3_config.bucket
21 |
22 |
23 | @click.group(name="s3", help="S3 storage utilities")
24 | def s3_storage():
25 | pass
26 |
27 |
28 | @s3_storage.command(name="export", help="Export local analysis to S3 storage")
29 | @click.argument("analysis_id", type=click.UUID)
30 | @click.option(
31 | "--analysis-dir",
32 | "analysis_dir",
33 | default=None,
34 | type=click.Path(exists=True),
35 | help=f"Alternative analysis storage path (default is {ANALYSES_DIR.as_posix()})",
36 | )
37 | def s3_export(analysis_id, analysis_dir):
38 | if analysis_dir is None:
39 | analysis_dir = ANALYSES_DIR
40 | analysis_id = str(analysis_id)
41 | analysis_path = analysis_dir / analysis_id
42 | if not analysis_path.exists():
43 | click.echo(f"Analysis {analysis_id} does not exist", err=True)
44 | raise click.Abort()
45 | s3_client, s3_bucket = _get_s3_client()
46 | upload_analysis(analysis_id, analysis_path, s3_client, s3_bucket)
47 |
48 |
49 | @s3_storage.command(
50 | name="import", help="Import analysis from S3 storage to local storage"
51 | )
52 | @click.argument("analysis_id", type=click.UUID)
53 | @click.option(
54 | "--analysis-dir",
55 | "analysis_dir",
56 | default=None,
57 | type=click.Path(exists=True),
58 | help=f"Alternative analysis storage path (default is {ANALYSES_DIR.as_posix()})",
59 | )
60 | def s3_import(analysis_id, analysis_dir):
61 | if analysis_dir is None:
62 | analysis_dir = ANALYSES_DIR
63 | analysis_id = str(analysis_id)
64 | analysis_path = analysis_dir / analysis_id
65 | if analysis_path.exists():
66 | click.echo(f"Analysis {analysis_id} already exists", err=True)
67 | raise click.Abort()
68 | analysis_path.mkdir()
69 | s3_client, s3_bucket = _get_s3_client()
70 | download_analysis(analysis_id, analysis_path, s3_client, s3_bucket)
71 |
--------------------------------------------------------------------------------
/drakrun/web/schema.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | from typing import Annotated, List, Optional
3 |
4 | from flask_openapi3 import FileStorage
5 | from pydantic import AfterValidator, BaseModel, Field, RootModel
6 |
7 |
8 | class APIErrorResponse(BaseModel):
9 | error: str = Field(description="Error message")
10 |
11 |
12 | class UploadFileForm(BaseModel):
13 | file: FileStorage
14 | timeout: Optional[int] = Field(default=None, description="Analysis timeout")
15 | file_name: Optional[str] = Field(default=None, description="Target file name")
16 | file_path: Optional[str] = Field(default=None, description="Target file path")
17 | start_command: Optional[str] = Field(default=None, description="Start command")
18 | plugins: Optional[List[str]] = Field(
19 | default=None, description="Plugins to use (in JSON array string)"
20 | )
21 | preset: Optional[str] = Field(default=None, description="Analysis settings preset")
22 | no_internet: Optional[bool] = Field(
23 | default=False, description="Disable Internet connection"
24 | )
25 | no_screenshots: Optional[bool] = Field(
26 | default=False, description="Disable screenshots"
27 | )
28 | extract_archive: Optional[bool] = Field(
29 | default=False, description="Sample is an archive, extract it"
30 | )
31 | archive_password: Optional[str] = Field(
32 | default=None, description="Archive password"
33 | )
34 |
35 |
36 | class UploadAnalysisResponse(BaseModel):
37 | task_uid: str = Field(description="Unique analysis ID")
38 |
39 |
40 | class AnalysisResponse(BaseModel):
41 | id: str = Field(description="Unique analysis ID")
42 | status: str = Field(description="Analysis status")
43 | time_started: Optional[str] = Field(
44 | default=None, description="Analysis start time in ISO format"
45 | )
46 | time_ended: Optional[str] = Field(
47 | default=None, description="Analysis end time in ISO format"
48 | )
49 |
50 |
51 | AnalysisListResponse = RootModel[List[AnalysisResponse]]
52 |
53 |
54 | class AnalysisRequestPath(BaseModel):
55 | task_uid: Annotated[str, AfterValidator(lambda x: str(uuid.UUID(x, version=4)))] = (
56 | Field(description="Unique analysis ID")
57 | )
58 |
59 |
60 | class AnalysisFileRequestQuery(BaseModel):
61 | filename: str
62 |
63 |
64 | class LogsRequestPath(AnalysisRequestPath):
65 | log_type: str
66 |
67 |
68 | class ProcessInfoRequestPath(AnalysisRequestPath):
69 | seqid: int
70 |
71 |
72 | class ProcessLogsRequestPath(AnalysisRequestPath):
73 | log_type: str
74 | seqid: int
75 |
76 |
77 | class ScreenshotRequestPath(AnalysisRequestPath):
78 | which: int
79 |
--------------------------------------------------------------------------------
/scripts/bump_version.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python3
2 | # bump_version.py 0.1.0
3 | import difflib
4 | import json
5 | import re
6 | import sys
7 | from pathlib import Path
8 |
9 | CONFIG = json.loads((Path(__file__).parent / "bump_version.json").read_text())
10 | CURRENT_DIR = Path.cwd()
11 | VERSION_FILES = {
12 | (CURRENT_DIR / path): pattern for path, pattern in CONFIG["files"].items()
13 | }
14 | VERSION_REGEX = CONFIG["regex"]
15 |
16 |
17 | def main(new_version):
18 | input_files = {}
19 | output_files = {}
20 | old_version = None
21 |
22 | if not re.match(fr"^{VERSION_REGEX}$", new_version):
23 | print(f"[!] '{new_version}' doesn't match the regex: {VERSION_REGEX}")
24 | return
25 |
26 | def subst_version(repl):
27 | return (
28 | repl.string[repl.start(0) : repl.start(1)]
29 | + new_version
30 | + repl.string[repl.end(1) : repl.end(0)]
31 | )
32 |
33 | for path in VERSION_FILES.keys():
34 | if not path.exists():
35 | print(f"[!] File {str(path)} is missing. Are you in project root dir?")
36 | return False
37 |
38 | with open(path, "r") as f:
39 | content = input_files[path] = f.read()
40 |
41 | pattern = VERSION_FILES[path].replace("$VERSION", VERSION_REGEX)
42 | print(pattern)
43 | version = next(re.finditer(pattern, content)).group(1)
44 | output_files[path] = re.sub(pattern, subst_version, content, count=1)
45 |
46 | if old_version is not None and version != old_version:
47 | print(
48 | f"[!] {str(path)} contains different version than other files "
49 | f"({version} != {old_version})"
50 | )
51 | old_version = version
52 |
53 | for path in VERSION_FILES.keys():
54 | input_lines = input_files[path].splitlines()
55 | output_lines = output_files[path].splitlines()
56 | if input_lines == output_lines:
57 | print("[*] No changes detected.")
58 | return
59 | print("=== " + str(path))
60 | for line in difflib.unified_diff(input_lines, output_lines, lineterm=""):
61 | print(line)
62 |
63 | response = ""
64 | while response.lower() not in {"y", "n", "yes", "no"}:
65 | response = input("[*] Check above diff ^ Is it correct? (y/n): ")
66 |
67 | if response.lower() in {"y", "yes"}:
68 | for path, content in output_files.items():
69 | with open(path, "w") as f:
70 | f.write(content)
71 | print("[+] Changes applied!")
72 | else:
73 | print("[-] Changes discarded.")
74 |
75 |
76 | if __name__ == "__main__":
77 | if not sys.argv[1:]:
78 | print("Usage: bump_version.py [new_version]")
79 | else:
80 | main(sys.argv[1])
81 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/get_socket_info.py:
--------------------------------------------------------------------------------
1 | from collections import defaultdict
2 | from typing import Optional
3 |
4 | from .parse_utils import parse_log
5 | from .plugin_base import PostprocessContext
6 |
7 |
8 | def get_socket_info(context: PostprocessContext) -> None:
9 | analysis_dir = context.analysis_dir
10 | process_tree = context.process_tree
11 |
12 | def filter_socketmon(data: dict) -> Optional[dict]:
13 | event_uid = int(data["EventUID"], 16)
14 | pid = data["PID"]
15 | process = process_tree.get_process_for_evtid(pid, event_uid)
16 |
17 | if data.get("Method") in [
18 | "UdpSendMessages",
19 | "TcpCreateAndConnectTcbComplete",
20 | "TcpCreateAndConnectTcbRateLimitComplete",
21 | ]:
22 | return {
23 | "process": process,
24 | "method": "connection",
25 | "Protocol": data["Protocol"],
26 | "LocalIp": data["LocalIp"],
27 | "LocalPort": data["LocalPort"],
28 | "RemoteIp": data["RemoteIp"],
29 | "RemotePort": data["RemotePort"],
30 | }
31 | elif data.get("Method") == "DnsQueryEx":
32 | return {
33 | "process": process,
34 | "method": "dns-query",
35 | "DnsName": data["DnsName"],
36 | }
37 | else:
38 | return None
39 |
40 | socketmon_log = parse_log(analysis_dir / "socketmon.log", filter_socketmon)
41 | connections = defaultdict(set)
42 | dns_queries = defaultdict(set)
43 |
44 | for data in socketmon_log:
45 | if data["method"] == "connection":
46 | key = (
47 | data["Protocol"],
48 | data["LocalIp"],
49 | data["LocalPort"],
50 | data["RemoteIp"],
51 | data["RemotePort"],
52 | )
53 | connections[key].add(data["process"].seqid)
54 | elif data["method"] == "dns-query":
55 | dns_queries[data["DnsName"]].add(data["process"].seqid)
56 |
57 | context.update_report(
58 | {
59 | "connections": [
60 | {
61 | "protocol": protocol,
62 | "local_ip": local_ip,
63 | "local_port": local_port,
64 | "remote_ip": remote_ip,
65 | "remote_port": remote_port,
66 | "process_seqids": list(processes),
67 | }
68 | for (
69 | protocol,
70 | local_ip,
71 | local_port,
72 | remote_ip,
73 | remote_port,
74 | ), processes in connections.items()
75 | ],
76 | "dns_queries": [
77 | {"domain": domain, "process_seqids": list(processes)}
78 | for domain, processes in dns_queries.items()
79 | ],
80 | }
81 | )
82 |
--------------------------------------------------------------------------------
/drakrun/analyzer/startup_command.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import pathlib
3 | import random
4 | import string
5 | import unicodedata
6 | from typing import List
7 |
8 | from pathvalidate import Platform, is_valid_filename
9 |
10 | log = logging.getLogger(__name__)
11 |
12 |
13 | def random_filename() -> str:
14 | chars = string.ascii_letters + string.digits
15 | return "".join(random.choice(chars) for _ in range(10))
16 |
17 |
18 | def get_target_filename_from_sample_path(sample_path: pathlib.Path) -> str:
19 | name, extension = sample_path.name, sample_path.suffix
20 | if extension is None:
21 | raise ValueError(
22 | "Sample path must have extension if target filename is not provided"
23 | )
24 | extension = extension[1:].lower()
25 | # Normalize/remove Unicode characters as current version of Drakvuf
26 | # isn't really good at handling them in logs
27 | file_name = (
28 | unicodedata.normalize("NFKD", name).encode("ascii", "ignore").decode("ascii")
29 | )
30 | if file_name and is_valid_filename(file_name, platform=Platform.UNIVERSAL):
31 | return file_name
32 | else:
33 | # Use random filename if name is invalid
34 | return random_filename() + f".{extension}"
35 |
36 |
37 | def get_startup_argv(
38 | target_path: str,
39 | ) -> List[str]:
40 | extension = target_path.rsplit(".", 1)[-1].lower()
41 | if extension == "dll":
42 | return ["rundll32", target_path]
43 | elif extension in ["exe", "bat"]:
44 | return [target_path]
45 | elif extension == "ps1":
46 | return ["powershell.exe", "-executionpolicy", "bypass", "-File", target_path]
47 | elif is_office_file(extension):
48 | argv = []
49 | if is_office_word_file(extension):
50 | argv.append("winword.exe")
51 | elif is_office_excel_file(extension):
52 | argv.append("excel.exe")
53 | elif is_office_powerpoint_file(extension):
54 | argv.append("powerpnt.exe")
55 | else:
56 | raise RuntimeError(f"Unknown office file extension {extension}.")
57 | argv.extend(["/t", target_path])
58 | return ["cmd.exe", "/C", "start", *argv]
59 | elif extension in ["js", "jse", "vbs", "vbe"]:
60 | return ["wscript.exe", target_path]
61 | elif extension in ["hta", "html", "htm"]:
62 | return ["mshta.exe", target_path]
63 | else:
64 | return ["cmd.exe", "/C", "start", target_path]
65 |
66 |
67 | def is_office_word_file(extension: str) -> bool:
68 | return extension in ["doc", "docm", "docx", "dotm", "rtf"]
69 |
70 |
71 | def is_office_excel_file(extension: str) -> bool:
72 | return extension in ["xls", "xlsx", "xlsm", "xltx", "xltm"]
73 |
74 |
75 | def is_office_powerpoint_file(extension: str) -> bool:
76 | return extension in ["ppt", "pptx"]
77 |
78 |
79 | def is_office_file(extension: str) -> bool:
80 | return (
81 | is_office_word_file(extension)
82 | or is_office_excel_file(extension)
83 | or is_office_powerpoint_file(extension)
84 | )
85 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/__init__.py:
--------------------------------------------------------------------------------
1 | from drakrun.lib.paths import DUMPS_DIR, DUMPS_ZIP, IPT_DIR, IPT_ZIP
2 |
3 | from .build_process_tree import build_process_tree
4 | from .capa_plugin.capa_processor import capa_analysis
5 | from .compress_ipt import compress_ipt
6 | from .generate_report import generate_report
7 | from .generate_wireshark_key_file import generate_wireshark_key_file
8 | from .get_http_info import get_http_info
9 | from .get_modified_files_info import get_modified_files_info
10 | from .get_socket_info import get_socket_info
11 | from .get_ttps_info import get_ttps_info
12 | from .gzip_syscalls import gzip_syscalls
13 | from .index_logs import index_logs
14 | from .plugin_base import PostprocessPlugin
15 | from .process_dumps import process_dumps
16 | from .screenshot_metadata import screenshot_metadata
17 | from .split_drakmon_log import split_drakmon_log
18 |
19 | POSTPROCESS_PLUGINS = [
20 | PostprocessPlugin(
21 | function=split_drakmon_log, requires=["drakmon.log"], generates=[]
22 | ),
23 | PostprocessPlugin(
24 | function=generate_wireshark_key_file,
25 | requires=["tlsmon.log"],
26 | generates=["wireshark_key_file.txt"],
27 | ),
28 | PostprocessPlugin(
29 | function=build_process_tree,
30 | requires=["procmon.log"],
31 | generates=[], # Always regenerate
32 | ),
33 | PostprocessPlugin(
34 | function=capa_analysis,
35 | requires=[
36 | "process_tree.json",
37 | "inject.log",
38 | ],
39 | generates=["ttps.json"],
40 | ),
41 | PostprocessPlugin(
42 | function=screenshot_metadata,
43 | requires=["screenshots.json"],
44 | generates=[],
45 | ),
46 | PostprocessPlugin(
47 | function=process_dumps,
48 | requires=[DUMPS_DIR, "memdump.log", "process_tree.json"],
49 | generates=[DUMPS_ZIP],
50 | ),
51 | PostprocessPlugin(function=compress_ipt, requires=[IPT_DIR], generates=[IPT_ZIP]),
52 | PostprocessPlugin(
53 | function=gzip_syscalls,
54 | requires=["syscall.log"],
55 | generates=["syscall.log.gz", "sysret.log.gz"],
56 | ),
57 | PostprocessPlugin(
58 | function=get_http_info,
59 | requires=["process_tree.json", "apimon.log"],
60 | generates=[],
61 | ),
62 | PostprocessPlugin(
63 | function=get_modified_files_info,
64 | requires=["process_tree.json", "filetracer.log"],
65 | generates=[],
66 | ),
67 | PostprocessPlugin(
68 | function=get_socket_info,
69 | requires=["process_tree.json", "socketmon.log"],
70 | generates=[],
71 | ),
72 | PostprocessPlugin(
73 | function=get_ttps_info,
74 | requires=["process_tree.json", "ttps.json"],
75 | generates=[],
76 | ),
77 | PostprocessPlugin(
78 | function=generate_report,
79 | requires=[],
80 | generates=[],
81 | ),
82 | PostprocessPlugin(
83 | function=index_logs, requires=["process_tree.json"], generates=["log_index"]
84 | ),
85 | ]
86 |
--------------------------------------------------------------------------------
/drakrun/analyzer/run_tools.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import pathlib
3 | import subprocess
4 | from typing import List, Optional
5 |
6 | from drakrun.analyzer.screenshotter import Screenshotter
7 | from drakrun.lib.config import NetworkConfigSection
8 | from drakrun.lib.drakvuf_cmdline import get_base_drakvuf_cmdline
9 | from drakrun.lib.install_info import InstallInfo
10 | from drakrun.lib.libvmi import VmiInfo
11 | from drakrun.lib.network_info import NetworkInfo
12 | from drakrun.lib.networking import start_tcpdump_collector
13 | from drakrun.lib.vm import VirtualMachine
14 |
15 |
16 | @contextlib.contextmanager
17 | def process_graceful_exit(proc: subprocess.Popen, termination_timeout: int = 5):
18 | try:
19 | yield proc
20 | finally:
21 | proc.terminate()
22 | try:
23 | proc.wait(termination_timeout)
24 | except subprocess.TimeoutExpired:
25 | proc.kill()
26 | proc.wait(termination_timeout)
27 |
28 |
29 | def run_tcpdump(network_info: NetworkInfo, output_file: pathlib.Path):
30 | return process_graceful_exit(
31 | start_tcpdump_collector(network_info.bridge_name, output_file)
32 | )
33 |
34 |
35 | @contextlib.contextmanager
36 | def run_drakvuf(
37 | vm_name: str,
38 | vmi_info: VmiInfo,
39 | kernel_profile_path: str,
40 | output_file: pathlib.Path,
41 | drakvuf_args: List[str],
42 | exec_cmd: Optional[str] = None,
43 | cwd: Optional[pathlib.Path] = None,
44 | ):
45 | drakvuf_cmdline = get_base_drakvuf_cmdline(
46 | vm_name,
47 | kernel_profile_path,
48 | vmi_info,
49 | exec_cmd=exec_cmd,
50 | extra_args=drakvuf_args,
51 | )
52 |
53 | with output_file.open("wb") as output:
54 | drakvuf = subprocess.Popen(drakvuf_cmdline, stdout=output, cwd=cwd)
55 | with process_graceful_exit(drakvuf):
56 | yield drakvuf
57 |
58 |
59 | @contextlib.contextmanager
60 | def run_vm(
61 | vm_id: int,
62 | install_info: InstallInfo,
63 | network_conf: NetworkConfigSection,
64 | no_restore: bool = False,
65 | ):
66 | vm = VirtualMachine(vm_id, install_info, network_conf)
67 | if no_restore:
68 | if not vm.is_running:
69 | raise RuntimeError(f"Virtual machine {vm.vm_name} is not running")
70 | yield vm
71 | else:
72 | vm.restore()
73 | try:
74 | yield vm
75 | finally:
76 | vm.destroy()
77 |
78 |
79 | @contextlib.contextmanager
80 | def run_screenshotter(
81 | vm_id: int,
82 | install_info: InstallInfo,
83 | output_dir: pathlib.Path,
84 | enabled: bool = True,
85 | ):
86 | if not enabled:
87 | yield
88 | return
89 | screenshotter = Screenshotter(
90 | output_dir=output_dir,
91 | vnc_host="localhost",
92 | vnc_port=5900 + vm_id,
93 | vnc_password=install_info.vnc_passwd,
94 | )
95 | try:
96 | screenshotter.start()
97 | yield
98 | finally:
99 | screenshotter.stop()
100 |
--------------------------------------------------------------------------------
/drakrun/lib/libvmi/libvmi.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import pathlib
3 | import re
4 | import subprocess
5 |
6 | from drakrun.lib.paths import PACKAGE_TOOLS_PATH
7 |
8 | from .vmi_info import VmiGuidInfo, VmiOffsets
9 |
10 | log = logging.getLogger(__name__)
11 |
12 |
13 | def get_vmi_kernel_guid(vm_name: str) -> VmiGuidInfo:
14 | result = subprocess.run(
15 | ["vmi-win-guid", "name", vm_name],
16 | timeout=30,
17 | capture_output=True,
18 | )
19 |
20 | output = result.stdout.decode()
21 |
22 | version = re.search(r"Version: (.*)", output)
23 | pdb_guid = re.search(r"PDB GUID: ([0-9a-f]+)", output)
24 | kernel_filename = re.search(r"Kernel filename: ([a-z]+\.[a-z]+)", output)
25 |
26 | if version is None or pdb_guid is None or kernel_filename is None:
27 | raise RuntimeError("Invalid vmi-win-guid output")
28 |
29 | return VmiGuidInfo(version.group(1), pdb_guid.group(1), kernel_filename.group(1))
30 |
31 |
32 | def extract_vmi_offsets(
33 | domain: str, kernel_profile_path: pathlib.Path, timeout: int = 30
34 | ) -> VmiOffsets:
35 | """Call vmi-win-offsets helper and obtain VmiOffsets values"""
36 | try:
37 | output = subprocess.check_output(
38 | [
39 | "vmi-win-offsets",
40 | "--name",
41 | domain,
42 | "--json-kernel",
43 | kernel_profile_path.as_posix(),
44 | ],
45 | timeout=timeout,
46 | ).decode("utf-8", "ignore")
47 | except TypeError:
48 | raise RuntimeError("Invalid output of vmi-win-offsets")
49 | except subprocess.CalledProcessError:
50 | raise RuntimeError("vmi-win-offsets exited with an error")
51 | except subprocess.TimeoutExpired:
52 | raise RuntimeError("vmi-win-offsets timed out")
53 | except Exception:
54 | raise RuntimeError("Extracting VMI offsets failed")
55 |
56 | return VmiOffsets.from_tool_output(output)
57 |
58 |
59 | def extract_explorer_pid(
60 | domain: str,
61 | kernel_profile_path: pathlib.Path,
62 | vmi_offsets: VmiOffsets,
63 | timeout: int = 30,
64 | ):
65 | pid_tool = PACKAGE_TOOLS_PATH / "get-explorer-pid"
66 | if not pid_tool.exists():
67 | raise RuntimeError(
68 | "get-explorer-pid not found, draktools package is not built with tools"
69 | )
70 | try:
71 | explorer_pid_s = subprocess.check_output(
72 | [
73 | pid_tool.as_posix(),
74 | domain,
75 | kernel_profile_path.as_posix(),
76 | hex(vmi_offsets.kpgd),
77 | ],
78 | timeout=timeout,
79 | ).decode("utf-8", "ignore")
80 | except subprocess.CalledProcessError:
81 | raise RuntimeError("get-explorer-pid exited with an error")
82 | except subprocess.TimeoutExpired:
83 | raise RuntimeError("get-explorer-pid timed out")
84 | except Exception:
85 | raise RuntimeError("Extracting explorer PID failed")
86 |
87 | m = re.search(r"explorer\.exe:([0-9]+)", explorer_pid_s)
88 | if m is None:
89 | raise RuntimeError("Explorer PID not found in output")
90 |
91 | return int(m.group(1))
92 |
--------------------------------------------------------------------------------
/docs/whats_changed.rst:
--------------------------------------------------------------------------------
1 | ===============================
2 | What's changed, how to upgrade?
3 | ===============================
4 |
5 | v0.20.0
6 | -------
7 |
8 | This release mostly fixes the bugs found in v0.19.0.
9 |
10 | The new addition is an experimental "Extract archive" option for guest-side archive extraction using Expand-Archive or 7-Zip installed on guest VM. It works well, but it's still WIP so it's not yet documented and may change in the future.
11 |
12 | This version was tested using `DRAKVUF v1.1-f619440 `_.
13 |
14 | Complete changelog can be found here: `v0.20.0 changelog `_.
15 |
16 | v0.19.0
17 | -------
18 |
19 | v0.19.0 is a complete rewrite compared to v0.18.x. That's why it's recommended to start from scratch
20 | and bring up a new instance.
21 |
22 | Not everything changed though and you may still try to reuse your guest disk image or parts of your previous configuration.
23 | Here the list of the most crucial changes comparing to v0.18.x:
24 |
25 | - There is no built-in Karton integration. The main interface for interacting with sandbox is Web UI/API.
26 | - Analyses are by default stored locally in ``/var/lib/drakrun/analyses``. S3 integration is optional.
27 | - There is no ``drakplayground``. Former ``draksetup`` CLI command is now ``drakrun`` and comes with a rich toolset for configuration and debugging.
28 | - Volume structure has not changed, so if you use e.g. qcow2 backend, you will still find ``vm-0.img`` in ``/var/lib/drakrun/volumes``.
29 | ``snapshot.sav`` is still there as well.
30 | - ``/etc/drakrun`` changes:
31 |
32 | - ``config.ini`` is now ``config.toml``. Configuration structure changed significantly, so you can't apply previous configuration file directly.
33 | - XL template is moved from ``scripts/cfg.template`` to ``cfg.template``. There is an additional serial port device that is required for drakshell.
34 | - VNC password was moved from ``cfg.template`` to ``install.json``. ``install.json`` should keep all variables that
35 | are applied on ``cfg.template``
36 | - There is no ``configs`` dir, generated configurations are moved to ``/var/lib/drakrun/configs`` and should not be changed by user.
37 |
38 | - Analysis files structure is a bit different:
39 |
40 | - There are no `apicall` and `index` directories. Per-process logs are indexed using ``log_index`` file. It's a binary file so if you want to check its structure, check the ``drakrun.analyzer.postprocessing.indexer`` module.
41 | - ``dumps.zip`` doesn't contain ``.metadata`` files. More information about dumps can be found in ``metadata.json`` and ``report.json`` files
42 | - S3 directories are additionally prefixed with the first 4 letters of the UUID ``0/f/2/9/0f29ae1f-322a-496a-a79e-92d3a859053d/<...>`` and we call it "hash pathing", because same thing is done in MWDB S3 integration.
43 | Some S3 backends map the object name directly to the file-system hierarchy, so this naming highly increases S3 operation performance.
44 | - Other files should follow the same convention as in previous versions.
45 |
46 | - Drakvuf Sandbox Web UI and API changed a lot, but API is documented in ``http:///openapi/swagger``
47 |
--------------------------------------------------------------------------------
/drakrun/cli/modify_vm0.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import pathlib
3 | import shutil
4 | import tempfile
5 |
6 | import click
7 |
8 | from drakrun.lib.config import load_config
9 | from drakrun.lib.install_info import InstallInfo
10 | from drakrun.lib.paths import INSTALL_INFO_PATH
11 | from drakrun.lib.vm import VirtualMachine
12 | from drakrun.lib.vmi_profile import create_vmi_info, create_vmi_json_profile
13 |
14 | from .banner import banner
15 | from .check_root import check_root
16 |
17 | log = logging.getLogger(__name__)
18 |
19 |
20 | @click.group(name="modify-vm0", help="Modify base VM snapshot (vm-0)")
21 | @check_root
22 | def modify_vm0():
23 | pass
24 |
25 |
26 | @modify_vm0.command(name="begin", help="Safely restore vm-0 for modification")
27 | @click.option(
28 | "--cold-boot",
29 | "cold_boot",
30 | is_flag=True,
31 | default=False,
32 | help="Cold-boot vm-0 instead of restoring from snapshot",
33 | )
34 | def begin_modify_vm0(cold_boot):
35 | config = load_config()
36 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
37 |
38 | vm0 = VirtualMachine(0, install_info, config.network)
39 |
40 | # Internally, it's expected that VM-0 will be restored from vm-modify snapshot
41 | vm0.restore(cold_boot=cold_boot)
42 | banner(
43 | f"""
44 | Initial VM setup is complete and the vm-0 was launched.
45 | Please now VNC to the port 5900 on this machine to perform Windows installation.
46 | After you have installed Windows and booted it to the desktop, please execute:
47 | - 'draksetup modify-vm0 commit' to apply your modification to the base image
48 | - 'draksetup modify-vm0 rollback' to rollback your changes
49 | Your configured VNC password is:
50 | {install_info.vnc_passwd}
51 | """
52 | )
53 |
54 |
55 | @modify_vm0.command(name="commit", help="Commit changes made during vm-0 modification")
56 | def commit_modify_vm0():
57 | config = load_config()
58 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
59 |
60 | vm0 = VirtualMachine(0, install_info, config.network)
61 | tmp_path = pathlib.Path(tempfile.gettempdir())
62 | temporary_snapshot_path = tmp_path / "snapshot.sav"
63 | target_snapshot_path = pathlib.Path(install_info.snapshot_dir) / "snapshot.sav"
64 | try:
65 | vmi_info = create_vmi_info(vm0)
66 | vm0.save(temporary_snapshot_path.as_posix())
67 | log.info("Snapshot was saved succesfully.")
68 |
69 | # Memory state is frozen, we can't do any writes to persistent storage
70 | log.info("Committing persistent memory...")
71 | vm0.storage.commit_vm0_modify_storage()
72 | shutil.move(temporary_snapshot_path, target_snapshot_path)
73 | finally:
74 | temporary_snapshot_path.unlink(missing_ok=True)
75 |
76 | vm = VirtualMachine(1, install_info, config.network)
77 | vm.restore()
78 | try:
79 | create_vmi_json_profile(vm, vmi_info)
80 | finally:
81 | vm.destroy()
82 | log.info("Profile successfully created")
83 |
84 |
85 | @modify_vm0.command(
86 | name="rollback", help="Rollback changes made during vm-0 modification"
87 | )
88 | def rollback_modify_vm0():
89 | config = load_config()
90 | install_info = InstallInfo.load(INSTALL_INFO_PATH)
91 |
92 | vm0 = VirtualMachine(0, install_info, config.network)
93 | vm0.destroy()
94 | vm0.storage.delete_vm0_modify_storage()
95 |
--------------------------------------------------------------------------------
/drakrun/analyzer/analysis_options.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any, Dict, List, Optional, Union
3 |
4 | from pydantic import BaseModel
5 |
6 | from drakrun.lib.config import DrakrunConfig
7 |
8 |
9 | class AnalysisOptions(BaseModel):
10 | # Host sample path
11 | sample_path: Optional[pathlib.Path] = None
12 | # Target file name on guest VM
13 | target_filename: Optional[str] = None
14 | # Target filepath on guest VM
15 | target_filepath: pathlib.PureWindowsPath = pathlib.PureWindowsPath(
16 | "%USERPROFILE%\\Desktop\\"
17 | )
18 | # Start command to run on the VM
19 | start_command: Optional[Union[List[str], str]] = None
20 | # Preset of defaults to be used for analysis
21 | preset: Optional[str] = None
22 | # Plugins to enable
23 | plugins: List[str]
24 | # Alternative hooks list for apimon
25 | apimon_hooks_path: Optional[pathlib.Path] = None
26 | # Alternative syscall list for apimon
27 | syscall_hooks_path: Optional[pathlib.Path] = None
28 | # Analysis timeout
29 | timeout: Optional[int] = None
30 | # Job timeout leeway for worker
31 | job_timeout_leeway: Optional[int] = None
32 | # networking: Enable Internet access
33 | net_enable: bool
34 | # extra arguments for Drakvuf command line
35 | extra_drakvuf_args: Optional[Dict[str, Any]] = None
36 | # extra directories to create in output dir
37 | extra_output_subdirs: Optional[List[str]] = None
38 | # Don't restore/destroy the VM
39 | no_vm_restore: Optional[bool] = None
40 | # Don't run a post-restore script
41 | no_post_restore: Optional[bool] = None
42 | # Don't make screenshots during analysis
43 | no_screenshotter: Optional[bool] = None
44 | # If .zip archive is passed, extract it before analysis
45 | extract_archive: Optional[bool] = None
46 | # Archive password for 'extract_archive' function
47 | archive_password: Optional[str] = None
48 |
49 | @staticmethod
50 | def _apply_defaults(
51 | config: DrakrunConfig, options: Dict[str, Any]
52 | ) -> Dict[str, Any]:
53 | defaults = config.get_drakrun_defaults(options.get("preset"))
54 | if not config.network.net_enable:
55 | # If network access is globally disabled, enforce net_enable=False
56 | net_enable = False
57 | else:
58 | # If network access is globally enabled, use value from options
59 | net_enable = options.get("net_enable", defaults.net_enable)
60 | # If unset, set True
61 | if net_enable is None:
62 | net_enable = True
63 | defaults_dict = dict(defaults)
64 | return {
65 | **options,
66 | **{
67 | key: (
68 | options.get(key)
69 | if options.get(key) is not None
70 | else defaults_dict[key]
71 | )
72 | for key in defaults_dict.keys()
73 | },
74 | **dict(net_enable=net_enable),
75 | }
76 |
77 | def __init__(self, config: DrakrunConfig, **kwargs):
78 | super().__init__(
79 | **self._apply_defaults(config, kwargs),
80 | )
81 |
82 | def to_dict(self, exclude_none=True):
83 | return self.model_dump(
84 | mode="json",
85 | exclude={"vm_id", "output_dir"},
86 | exclude_none=exclude_none,
87 | )
88 |
--------------------------------------------------------------------------------
/drakrun/analyzer/postprocessing/plugins/get_modified_files_info.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from collections import defaultdict
3 | from typing import Optional
4 |
5 | from .parse_utils import parse_log
6 | from .plugin_base import PostprocessContext
7 |
8 |
9 | def get_modified_files_info(context: PostprocessContext) -> None:
10 | analysis_dir = context.analysis_dir
11 | process_tree = context.process_tree
12 |
13 | def filter_modified_files(data: dict) -> Optional[dict]:
14 | if data.get("Method") == "NtSetInformationFile":
15 | if not data.get("Operation") == "FileDispositionInformation":
16 | return None
17 | method = "delete"
18 | elif data.get("Method") in ["NtCreateFile", "NtOpenFile"]:
19 | desired_access = data.get("DesiredAccess").split(" | ")
20 | if not any(
21 | access
22 | in ["GENERIC_WRITE", "FILE_WRITE_DATA", "FILE_APPEND_DATA", "DELETE"]
23 | for access in desired_access
24 | ):
25 | return None
26 | filename = data.get("FileName").lower()
27 | if not filename.startswith("\\??\\"):
28 | return None
29 | path = pathlib.PureWindowsPath(filename[len("\\??\\") :])
30 | if not path.drive:
31 | return None
32 | method = "open"
33 | elif data.get("Method") == "NtWriteFile":
34 | method = "write"
35 | else:
36 | return None
37 |
38 | event_uid = int(data["EventUID"], 16)
39 | pid = data["PID"]
40 | process = process_tree.get_process_for_evtid(pid, event_uid)
41 |
42 | return {
43 | "process": process,
44 | "handle": data["FileHandle"],
45 | "file_name": data["FileName"],
46 | "method": method,
47 | }
48 |
49 | modified_files_log = parse_log(
50 | analysis_dir / "filetracer.log", filter_modified_files
51 | )
52 |
53 | opened_files = {}
54 | modified_files = defaultdict(set)
55 | deleted_files = defaultdict(set)
56 |
57 | for data in modified_files_log:
58 | seqid = data["process"].seqid
59 | key = (seqid, data["handle"])
60 | if data["method"] == "open":
61 | opened_files[key] = data["file_name"]
62 | elif key in opened_files:
63 | filename = opened_files[key]
64 | if filename.lower().startswith("\\??\\c:\\windows\\prefetch"):
65 | continue
66 | if filename.startswith("\\??\\"):
67 | filename = filename[len("\\??\\") :]
68 | if data["method"] == "delete":
69 | deleted_files[filename].add(seqid)
70 | elif data["method"] == "write":
71 | modified_files[filename].add(seqid)
72 |
73 | context.update_report(
74 | {
75 | "modified_files": [
76 | {
77 | "filename": filename,
78 | "process_seqids": sorted(list(modified_files[filename])),
79 | }
80 | for filename in sorted(modified_files.keys())
81 | ],
82 | "deleted_files": [
83 | {
84 | "filename": filename,
85 | "process_seqids": sorted(list(deleted_files[filename])),
86 | }
87 | for filename in sorted(deleted_files.keys())
88 | ],
89 | }
90 | )
91 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: "Build and test Drakvuf Sandbox"
2 | on:
3 | push:
4 | branches:
5 | - master
6 | pull_request:
7 | branches:
8 | - master
9 | jobs:
10 | build_docs:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: Set up Python 3.11
15 | uses: actions/setup-python@v5
16 | with:
17 | python-version: "3.11"
18 | - name: Install dependencies
19 | working-directory: docs
20 | run: pip install -r requirements.txt
21 | - name: Build docs
22 | working-directory: docs
23 | run: make html
24 | lint_drakrun:
25 | runs-on: ubuntu-latest
26 | steps:
27 | - uses: actions/checkout@v3
28 | - uses: CERT-Polska/lint-python-action@v2
29 | with:
30 | python-version: 3.9
31 | build_drakrun_tools:
32 | runs-on: ubuntu-latest
33 | container: "debian:bookworm"
34 | steps:
35 | - name: Install git and wget
36 | run: |
37 | export DEBIAN_FRONTEND=noninteractive
38 | apt-get update && apt-get install -y -q git wget
39 | - uses: actions/checkout@v4
40 | with:
41 | submodules: recursive
42 | - name: Build libvmi
43 | run: |
44 | apt install -y make cmake gcc libglib2.0-dev libjson-c-dev autoconf-archive libtool libxen-dev flex bison nasm
45 | cd drakvuf/libvmi
46 | autoreconf -vif
47 | ./configure --disable-kvm --disable-bareflank --disable-file
48 | make install
49 | - name: Make drakvuf tools
50 | run: |
51 | make -C drakrun/tools
52 | - uses: actions/upload-artifact@v4
53 | with:
54 | name: drakrun-tools
55 | path: |
56 | drakrun/tools/get-explorer-pid
57 | drakrun/tools/drakshell/drakshell
58 | build_drakrun_web:
59 | runs-on: ubuntu-latest
60 | container: "node:18"
61 | steps:
62 | - uses: actions/checkout@v4
63 | - name: Install web dependencies
64 | working-directory: drakrun/web/frontend
65 | run: npm ci
66 | - name: Check with prettier
67 | working-directory: drakrun/web/frontend
68 | run: npx prettier --check src/
69 | - name: Build web bundle
70 | working-directory: drakrun/web/frontend
71 | run: npm run build
72 | - uses: actions/upload-artifact@v4
73 | with:
74 | name: drakrun-web
75 | path: |
76 | drakrun/web/frontend/dist/*
77 | build_drakrun:
78 | needs: [ build_drakrun_tools, build_drakrun_web ]
79 | runs-on: ubuntu-latest
80 | container: "python:3.9"
81 | steps:
82 | - uses: actions/checkout@v4
83 | with:
84 | submodules: recursive
85 | - name: Download tools
86 | uses: actions/download-artifact@v4
87 | with:
88 | name: drakrun-tools
89 | path: drakrun/tools
90 | - name: Download web bundle
91 | uses: actions/download-artifact@v4
92 | with:
93 | name: drakrun-web
94 | path: drakrun/web/frontend/dist
95 | - name: Build package
96 | run: |
97 | chmod +x drakrun/tools/get-explorer-pid # gh artifacts don't keep file permissions
98 | python3 setup.py bdist_wheel
99 | - uses: actions/upload-artifact@v4
100 | with:
101 | name: drakvuf-sandbox-whl
102 | path: ./dist/drakvuf_sandbox-*.whl
103 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/AnalysisView.jsx:
--------------------------------------------------------------------------------
1 | import { useParams } from "react-router-dom";
2 | import { useCallback, useEffect, useRef, useState } from "react";
3 | import { getAnalysisStatus } from "./api.js";
4 | import { CanceledError } from "axios";
5 | import { isStatusPending } from "./analysisStatus.js";
6 | import {
7 | AnalysisPendingView,
8 | AnalysisPendingStatusBox,
9 | } from "./AnalysisPendingView.jsx";
10 | import { AnalysisReport } from "./AnalysisReport.jsx";
11 |
12 | function AnalysisViewComponent({ analysisId }) {
13 | const checkInterval = useRef(null);
14 | const [analysisInfo, setAnalysisInfo] = useState();
15 | const [error, setError] = useState();
16 |
17 | const checkStatus = useCallback(() => {
18 | getAnalysisStatus({ analysisId })
19 | .then((response) => {
20 | if (response && response["time_execution_started"]) {
21 | // Inject remaining time here. We're calling this method every second,
22 | // while tracking a pending analysis status, so it's very good place
23 | // for injecting such information
24 | const timeout = response.options?.timeout;
25 | const elapsedSeconds =
26 | (new Date() -
27 | new Date(response["time_execution_started"])) /
28 | 1000;
29 | response["remaining_time"] = Math.max(
30 | 0,
31 | timeout - elapsedSeconds,
32 | );
33 | }
34 | setAnalysisInfo(response);
35 | if (isStatusPending(response?.status)) {
36 | if (!checkInterval.current)
37 | checkInterval.current = setTimeout(() => {
38 | checkInterval.current = null;
39 | checkStatus();
40 | }, 1000);
41 | }
42 | })
43 | .catch((error) => {
44 | if (!(error instanceof CanceledError)) {
45 | setError(error);
46 | console.error(error);
47 | }
48 | });
49 | }, [analysisId]);
50 |
51 | useEffect(() => {
52 | checkStatus();
53 | return () => {
54 | if (checkInterval.current) {
55 | clearTimeout(checkInterval.current);
56 | checkInterval.current = null;
57 | }
58 | };
59 | }, [analysisId, checkStatus]);
60 |
61 | if (typeof error !== "undefined") {
62 | return Error: {error.toString()}
;
63 | }
64 |
65 | if (typeof analysisInfo === "undefined") {
66 | return (
67 |
68 |
69 |
70 | Fetching analysis status...
71 |
72 |
73 |
74 | );
75 | }
76 | if (isStatusPending(analysisInfo?.status)) {
77 | return ;
78 | }
79 | return ;
80 | }
81 |
82 | export default function AnalysisView() {
83 | const { jobid } = useParams();
84 | return (
85 |
86 |
Analysis report
87 |
88 |
89 | );
90 | }
91 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Contribute to DRAKVUF Sandbox
2 | =============================
3 |
4 | ## Setup development environment
5 |
6 | ### Prerequisites
7 |
8 | Very first thing to consider is to setup and configure your local instance of DRAKVUF Sandbox. There are two basic options in that matter:
9 |
10 | * Develop on local machine: Install Debian Buster in [VMware Workstation 15 Player](https://www.vmware.com/products/workstation-player/workstation-player-evaluation.html).
11 | * Develop on a remote server: Just get some bare-metal or rent a dedicated server (e.g. [Kimsufi](https://www.kimsufi.com/us/en/servers.xml)) with Debian Buster.
12 |
13 | **Caution!** Your host machine must be an Intel processor with VT-x and EPT support, even when using VMware or other nested virtualization. You can check it by executing the following command on your native system (i.e. host system and without hypervisor loaded):
14 |
15 | ```
16 | # should return non-empty output and exit code 0
17 | lscpu | grep -i flags | grep -w ept
18 | ```
19 |
20 | DRAKVUF will not run on incompatible processors, as it directly relies on particular hardware virtualization extensions.
21 |
22 |
23 | ### Clone the repository
24 |
25 | In order to obtain the source code of DRAKVUF Sandbox, you need to execute the following commands:
26 |
27 | ```
28 | git clone --recurse-submodules https://github.com/CERT-Polska/drakvuf-sandbox.git
29 | cd drakvuf-sandbox
30 | ```
31 |
32 | ### Build Debian packages
33 |
34 | #### On local computer
35 |
36 | The DRAKVUF Sandbox distribution packages are built using Docker, in order to make them more reproducible. In order to build the packages by yourself, perform the following steps:
37 |
38 | 1. Obtain and install [Docker](https://docs.docker.com/engine/install/debian/).
39 | 2. Execute:
40 | ```
41 | sh drakcore/package/build.sh
42 | ```
43 | 3. The Debian packages will be produced to the `out/` directory. You can install them similarly as you would install the released packages. See ["Basic installation" section of README.md](https://github.com/CERT-Polska/drakvuf-sandbox/blob/icedevml-patch-1/README.md#basic-installation).
44 |
45 | #### On the Drone CI server
46 |
47 | When you open a pull request to this project, Drone CI will run some tests on your code and Debian packages will be also built during the process. The artifacts produced by the CI are available in S3 bucket at [minio.drakvuf.cert.pl/debs](https://minio.drakvuf.cert.pl/debs).
48 |
49 | Packages are numbered according to the Drone CI job numbers. You can figure out the job number by inspecting the [status checks](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-status-checks) related to your commit and clicking on `Details` link near `continuous-integration/drone/push` label. The build ID is in the URL and also in the breadcrumb, e.g.: `Repositories -> CERT-Polska/drakvuf-sandbox -> #200`.
50 |
51 |
52 | ### Install editable Python packages
53 |
54 | Now you can re-install Python packages from sources, using:
55 |
56 | ```
57 | /opt/venvs/drakcore/bin/pip3 install --editable ./drakcore/
58 | /opt/venvs/drakrun/bin/pip3 install --editable ./drakrun/
59 | ```
60 |
61 | your changes to the DRAKVUF Sandbox services will be immediately visible after you restart them.
62 |
63 | ### Test local changes
64 |
65 | 1. Open `drakcore/drakcore/app.py`
66 | 2. Add these lines before `def main()`:
67 | ```python
68 | @app.route("/hello-world")
69 | def hello_world():
70 | return 'hello'
71 | ```
72 | 3. Save the file and execute `systemctl restart drak-web`
73 | 4. Navigate to `http://localhost:6300/hello-world`, your new subpage should appear.
74 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/AnalysisList.jsx:
--------------------------------------------------------------------------------
1 | import { Link } from "react-router-dom";
2 | import { useEffect, useState } from "react";
3 | import { getAnalysisList } from "./api";
4 | import { CanceledError } from "axios";
5 | import { AnalysisStatusBadge } from "./AnalysisStatusBadge.jsx";
6 |
7 | function AnalysisListRow({ analysis }) {
8 | return (
9 |
10 |
11 |
12 | {analysis.id}
13 |
14 |
15 |
16 |
SHA256:
17 |
{analysis.file.sha256}
18 |
19 |
20 |
Name:
21 |
{analysis.file.name}
22 |
23 |
24 |
Type:
25 |
{analysis.file.type}
26 |
27 |
28 | {analysis.time_started || "-"}
29 | {analysis.time_finished || "-"}
30 |
31 | );
32 | }
33 |
34 | function AnalysisListTable() {
35 | const [error, setError] = useState();
36 | const [analysisList, setAnalysisList] = useState();
37 |
38 | useEffect(() => {
39 | const abortController = new AbortController();
40 | getAnalysisList({ abortController })
41 | .then((response) => {
42 | setAnalysisList(response);
43 | })
44 | .catch((error) => {
45 | if (!(error instanceof CanceledError)) {
46 | setError(error);
47 | console.error(error);
48 | }
49 | });
50 | return () => {
51 | abortController.abort();
52 | };
53 | }, []);
54 |
55 | if (typeof error !== "undefined") {
56 | return Error: {error.toString()}
;
57 | }
58 |
59 | if (typeof analysisList === "undefined") {
60 | return Loading...
;
61 | }
62 |
63 | if (analysisList.length === 0) {
64 | return (
65 | There are no analyses. Upload sample to create a new one.
66 | );
67 | }
68 |
69 | return (
70 |
71 |
72 |
73 |
74 | Analysis ID
75 | Sample info
76 | Started
77 | Finished
78 |
79 |
80 |
81 | {analysisList.map((analysis) => (
82 |
86 | ))}
87 |
88 |
89 |
90 | );
91 | }
92 |
93 | export default function AnalysisList() {
94 | return (
95 |
99 | );
100 | }
101 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/PluginPicker.jsx:
--------------------------------------------------------------------------------
1 | import { useCallback, useState } from "react";
2 | import CreatableSelect from "react-select/creatable";
3 |
4 | const createOption = (option) => ({ label: option, value: option });
5 |
6 | const plugins = [
7 | createOption("apimon"),
8 | createOption("bsodmon"),
9 | createOption("clipboardmon"),
10 | createOption("codemon"),
11 | createOption("delaymon"),
12 | createOption("exmon"),
13 | createOption("fileextractor"),
14 | createOption("filetracer"),
15 | createOption("hidevm"),
16 | createOption("hidsim"),
17 | createOption("ipt"),
18 | createOption("memdump"),
19 | createOption("objmon"),
20 | createOption("procmon"),
21 | createOption("regmon"),
22 | createOption("socketmon"),
23 | createOption("syscalls"),
24 | createOption("tlsmon"),
25 | createOption("windowmon"),
26 | ];
27 |
28 | const defaultPlugins = [
29 | createOption("apimon"),
30 | createOption("filetracer"),
31 | createOption("memdump"),
32 | createOption("procmon"),
33 | createOption("socketmon"),
34 | createOption("tlsmon"),
35 | ];
36 |
37 | const pluginPickerStyles = {
38 | multiValue: (styles, { data }) => {
39 | if (!data.__isNew__)
40 | return { ...styles, backgroundColor: "rgba(0, 82, 204, 0.1)" };
41 | else return { ...styles, backgroundColor: "rgba(255, 86, 48, 0.1)" };
42 | },
43 | multiValueLabel: (styles, { data }) => {
44 | if (!data.__isNew__) return { ...styles, color: "rgb(0, 82, 204)" };
45 | else return { ...styles, color: "rgb(255, 86, 48)" };
46 | },
47 | };
48 |
49 | export function PluginList({ plugins }) {
50 | return (
51 |
52 | {plugins.map((plugin) => (
53 |
61 | {plugin}
62 |
63 | ))}
64 |
65 | );
66 | }
67 |
68 | export function PluginPicker({ onChange, name }) {
69 | const [warning, setWarning] = useState(undefined);
70 | const onSelectChange = useCallback(
71 | (currentValue) => {
72 | if (currentValue.some((data) => data.__isNew__)) {
73 | setWarning(
74 | "Picked custom plugin which may be not supported by Drakvuf Sandbox",
75 | );
76 | } else if (
77 | currentValue.length > 0 &&
78 | !currentValue.some((data) => data.value === "procmon")
79 | ) {
80 | setWarning(
81 | "It's recommended to include 'procmon' plugin for complete process information",
82 | );
83 | } else {
84 | setWarning(undefined);
85 | }
86 | if (onChange) onChange(currentValue);
87 | },
88 | [onChange],
89 | );
90 | return (
91 |
92 |
100 | {warning ?
{warning}
: []}
101 |
102 | );
103 | }
104 |
--------------------------------------------------------------------------------
/docs/ipt.rst:
--------------------------------------------------------------------------------
1 | ===================================================
2 | Using Intel Processor Trace Features (Experimental)
3 | ===================================================
4 |
5 | Enable IPT plugin in drakrun
6 | ----------------------------
7 |
8 | 1. In ``/etc/drakrun/scripts/cfg.template`` add a new entry: ``vmtrace_buf_kb = 8192``
9 | 2. Execute ``systemctl restart drakrun-worker@1`` (repeat for each drakrun instance if you have scaled them up).
10 | 3. Use ``ipt`` and ``codemon`` Drakvuf plugins while submitting your analysis.
11 |
12 |
13 |
14 | Install required extra dependencies
15 | -----------------------------------
16 |
17 | In order to analyze IPT data streams, you need to install ``libipt``, ``xed``, ``ptdump`` (modified), ``ptxed`` and ``drak-ipt-blocks`` tools.
18 |
19 | .. code-block :: console
20 |
21 | rm -rf /tmp/iptbuild
22 | mkdir /tmp/iptbuild
23 | cd /tmp/iptbuild
24 |
25 | git clone https://github.com/icedevml/libipt.git
26 | git clone https://github.com/intelxed/xed.git
27 | git clone https://github.com/intelxed/mbuild.git
28 | git clone https://github.com/gabime/spdlog.git
29 | git clone https://github.com/p-ranav/argparse.git -b v2.9
30 | git clone https://github.com/CERT-Polska/drakvuf-sandbox.git
31 |
32 | cd xed
33 | ./mfile.py --share
34 | ./mfile.py --prefix=/usr/local install
35 | ldconfig
36 |
37 | cd ../libipt
38 | git checkout
39 | cmake -D PTDUMP=On -D PTXED=On .
40 | make install
41 |
42 | cd ../spdlog
43 | cmake .
44 | make -j$(nproc) install
45 |
46 | cd ../argparse
47 | cmake .
48 | make -j$(nproc) install
49 |
50 | cd ../drakvuf-sandbox/drakrun/drakrun/tools/ipt
51 | cmake .
52 | make install
53 |
54 |
55 | Generate trace disassembly
56 | --------------------------
57 |
58 | 1. Perform an analysis with IPT plugin enabled
59 | 2. Download the completed analysis from MinIO to your local hard drive
60 | 3. Find CR3 of the target process you want to disassemble (hint: `syscall.log` will contain CR3 values)
61 | 4. Execute ``drak-ipt-disasm --analysis . --cr3 --vcpu 0``
62 | 5. After few minutes it should start printing full trace disassembly of the targeted process
63 | 6. You can also try `--blocks` switch for `drak-ipt-disasm` to get a list of executed basic blocks for this process
64 |
65 | **Example (executed basic blocks):**
66 |
67 | .. code-block :: console
68 |
69 | # drak-ipt-disasm --analysis . --cr3 0x735bb000 --vcpu 0 --blocks
70 | [2021-04-19 23:47:41.717] [console] [info] Decoding
71 | { "event": "block_executed", "data": "0x7feff565088" }
72 | { "event": "block_executed", "data": "0x7feff75450f" }
73 | { "event": "block_executed", "data": "0x7feff754505" }
74 | { "event": "block_executed", "data": "0x7feff75450d" }
75 | { "event": "block_executed", "data": "0x7feff5656ac" }
76 | { "event": "block_executed", "data": "0x7feff5656dc" }
77 | { "event": "block_executed", "data": "0x7feff5656fb" }
78 | { "event": "block_executed", "data": "0x7feff565068" }
79 | { "event": "block_executed", "data": "0x7feff751530" }
80 | { "event": "block_executed", "data": "0x7feff751552" }
81 | ...
82 |
83 |
84 | **Example (full usermode disassembly):**
85 |
86 | .. code-block :: console
87 |
88 | # drak-ipt-disasm --analysis . --cr3 0x735bb000 --vcpu 0 | grep -v ptwrite | grep -v cbr
89 | [enabled]
90 | [exec mode: 64-bit]
91 | 000007feff565088 movdqu xmmword ptr [rip+0x1b2b80], xmm0
92 | 000007feff565090 ret
93 | 000007feff75450f add rbx, 0x8
94 | 000007feff754513 cmp rbx, rdi
95 | 000007feff754516 jb 0x7feff754505
96 | 000007feff754505 mov rax, qword ptr [rbx]
97 | 000007feff754508 test rax, rax
98 | 000007feff75450b jz 0x7feff75450f
99 | ...
100 |
--------------------------------------------------------------------------------
/drakrun/lib/xen.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import subprocess
3 | from typing import Dict, Optional
4 |
5 | log = logging.getLogger(__name__)
6 |
7 |
8 | def xen_is_vm_running(vm_name: str) -> bool:
9 | result = subprocess.run(["xl", "list", vm_name], capture_output=True)
10 | if result.returncode == 0:
11 | return True
12 | elif b"is an invalid domain identifier" in result.stderr:
13 | return False
14 | else:
15 | raise RuntimeError(f"Unexpected xl list output: {result.stderr}")
16 |
17 |
18 | def xen_create_vm(
19 | vm_name: str,
20 | config_path: str,
21 | pause: bool = False,
22 | timeout: Optional[float] = None,
23 | ) -> None:
24 | try:
25 | subprocess.run(
26 | ["xl", "create", *(["-p"] if pause else []), config_path],
27 | check=True,
28 | timeout=timeout,
29 | )
30 | except subprocess.CalledProcessError:
31 | raise RuntimeError(f"Failed to launch VM {vm_name}")
32 | except subprocess.TimeoutExpired:
33 | raise RuntimeError(f"Failed to launch VM {vm_name} within {timeout} seconds")
34 |
35 |
36 | def xen_unpause_vm(vm_name: str, timeout: Optional[float] = None) -> None:
37 | try:
38 | subprocess.run(["xl", "unpause", vm_name], check=True, timeout=timeout)
39 | except subprocess.CalledProcessError:
40 | raise RuntimeError(f"Failed to unpause VM {vm_name}")
41 | except subprocess.TimeoutExpired:
42 | raise RuntimeError(f"Failed to unpause VM {vm_name} within {timeout} seconds")
43 |
44 |
45 | def xen_restore_vm(
46 | vm_name: str,
47 | config_path: str,
48 | snapshot_path: str,
49 | pause: bool = False,
50 | ) -> None:
51 | try:
52 | subprocess.run(
53 | ["xl", "restore", *(["-p"] if pause else []), config_path, snapshot_path],
54 | check=True,
55 | )
56 | except subprocess.CalledProcessError:
57 | raise RuntimeError(f"Failed to restore VM {vm_name}")
58 |
59 |
60 | def xen_save_vm(
61 | vm_name: str,
62 | snapshot_path: str,
63 | pause: bool = False,
64 | ) -> None:
65 | try:
66 | subprocess.run(
67 | ["xl", "save", *(["-p"] if pause else []), vm_name, snapshot_path],
68 | check=True,
69 | )
70 | except subprocess.CalledProcessError:
71 | raise RuntimeError(f"Failed to save VM {vm_name}")
72 |
73 |
74 | def xen_destroy_vm(vm_name: str) -> None:
75 | try:
76 | subprocess.run(["xl", "destroy", vm_name], check=True)
77 | except subprocess.CalledProcessError:
78 | raise RuntimeError(f"Failed to pause VM {vm_name}")
79 |
80 |
81 | def xen_get_domid(vm_name: str) -> int:
82 | output = subprocess.check_output(["xl", "domid", vm_name], text=True)
83 | return int(output.strip())
84 |
85 |
86 | def parse_xen_commandline(xen_commandline: str) -> Dict[str, str]:
87 | opts = xen_commandline.split(" ")
88 | elements = {}
89 | for opt in opts:
90 | if not opt.strip():
91 | continue
92 |
93 | if "=" not in opt:
94 | elements[opt] = "1"
95 | else:
96 | k, v = opt.split("=", 1)
97 | elements[k] = v
98 |
99 | return elements
100 |
101 |
102 | def get_xen_info() -> Dict[str, str]:
103 | xl_info_out = subprocess.check_output(["xl", "info"], text=True)
104 | xl_info_lines = xl_info_out.strip().split("\n")
105 |
106 | elements = {}
107 | for line in xl_info_lines:
108 | k, v = line.split(":", 1)
109 | k, v = k.strip(), v.strip()
110 | elements[k] = v
111 | return elements
112 |
113 |
114 | def xen_insert_cd(domain, drive, iso):
115 | subprocess.run(["xl", "cd-insert", domain, drive, iso], check=True)
116 |
117 |
118 | def xen_eject_cd(domain, drive):
119 | subprocess.run(["xl", "cd-eject", domain, drive], check=True)
120 |
--------------------------------------------------------------------------------
/drakrun/cli/install.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os.path
3 | import secrets
4 | import string
5 |
6 | import click
7 |
8 | from drakrun.lib.config import load_config
9 | from drakrun.lib.install_info import InstallInfo
10 | from drakrun.lib.paths import INSTALL_INFO_PATH, initialize_config_files, make_dirs
11 | from drakrun.lib.storage import REGISTERED_BACKEND_NAMES, get_storage_backend
12 | from drakrun.lib.vm import VirtualMachine
13 |
14 | from .banner import banner
15 | from .check_root import check_root
16 | from .sanity_check import sanity_check
17 |
18 | log = logging.getLogger(__name__)
19 |
20 |
21 | @click.command(help="Install guest Virtual Machine", no_args_is_help=True)
22 | @click.argument("iso_path", type=click.Path(exists=True))
23 | @click.option(
24 | "--vcpus",
25 | "vcpus",
26 | default=2,
27 | type=int,
28 | show_default=True,
29 | help="Number of vCPUs per single VM",
30 | )
31 | @click.option(
32 | "--memory",
33 | "memory",
34 | default=4096,
35 | type=int,
36 | show_default=True,
37 | help="Memory per single VM (in MB)",
38 | )
39 | @click.option(
40 | "--storage-backend",
41 | "storage_backend",
42 | type=click.Choice(REGISTERED_BACKEND_NAMES, case_sensitive=False),
43 | default="qcow2",
44 | show_default=True,
45 | help="Storage backend type",
46 | is_eager=True,
47 | )
48 | @click.option(
49 | "--disk-size", "disk_size", default="100G", show_default=True, help="Disk size"
50 | )
51 | @click.option(
52 | "--zfs-tank-name",
53 | "zfs_tank_name",
54 | help="Tank name (only for ZFS storage backend)",
55 | )
56 | @click.option(
57 | "--lvm-volume-group",
58 | "lvm_volume_group",
59 | help="Volume group (only for lvm storage backend)",
60 | )
61 | @check_root
62 | def install(
63 | vcpus,
64 | memory,
65 | storage_backend,
66 | disk_size,
67 | iso_path,
68 | zfs_tank_name,
69 | lvm_volume_group,
70 | ):
71 | if storage_backend == "lvm" and lvm_volume_group is None:
72 | logging.error("lvm storage backend requires --lvm-volume-group")
73 | raise click.Abort()
74 | if storage_backend == "zfs" and zfs_tank_name is None:
75 | logging.error("zfs storage backend requires --zfs-tank-name")
76 | raise click.Abort()
77 |
78 | sanity_check()
79 | make_dirs()
80 | initialize_config_files()
81 |
82 | config = load_config()
83 |
84 | log.info("Performing installation...")
85 | passwd_characters = string.ascii_letters + string.digits
86 | vnc_passwd = "".join(secrets.choice(passwd_characters) for _ in range(8))
87 | install_info = InstallInfo(
88 | vcpus=vcpus,
89 | memory=memory,
90 | storage_backend=storage_backend,
91 | disk_size=disk_size,
92 | vnc_passwd=vnc_passwd,
93 | zfs_tank_name=zfs_tank_name,
94 | lvm_volume_group=lvm_volume_group,
95 | )
96 | install_info.save(INSTALL_INFO_PATH)
97 |
98 | backend = get_storage_backend(install_info)
99 |
100 | vm0 = VirtualMachine(
101 | vm_id=0, install_info=install_info, network_conf=config.network
102 | )
103 | # Ensure VM0 is destroyed
104 | vm0.destroy()
105 |
106 | backend.initialize_vm0_volume(disk_size)
107 |
108 | iso_path = os.path.abspath(iso_path)
109 | vm0.create(iso_path=iso_path)
110 |
111 | banner(
112 | f"""
113 | Initial VM setup is complete and the vm-0 was launched.
114 | Please now VNC to the port 5900 on this machine to perform Windows installation.
115 | After you have installed Windows and booted it to the desktop, please execute:
116 | # drakrun postinstall
117 | Your configured VNC password is:
118 | {vnc_passwd}
119 | Please note that on some machines, system installer may boot for up to 10 minutes
120 | and may look unresponsive during the process. Please be patient.
121 | """
122 | )
123 |
--------------------------------------------------------------------------------
/drakrun/lib/injector.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | from typing import List
3 |
4 | from .drakvuf_cmdline import get_base_injector_cmdline
5 | from .libvmi import VmiInfo
6 |
7 |
8 | class Injector:
9 | """Helper class, simplifying usage of DRAKVUF Injector"""
10 |
11 | def __init__(self, vm_name: str, vmi_info: VmiInfo, kernel_profile_path: str):
12 | self.vm_name = vm_name
13 | self.kernel_profile_path = kernel_profile_path
14 | self.vmi_info = vmi_info
15 |
16 | def _run_with_timeout(
17 | self,
18 | args: List[str],
19 | timeout: int,
20 | check: bool = False,
21 | capture_output: bool = False,
22 | ):
23 | """
24 | subprocess.run(timeout=...) kills process instead of sending SIGTERM after
25 | reaching timeout. In our case, we want to let injector do a clean termination.
26 | """
27 | kwargs = {}
28 | if capture_output:
29 | kwargs["stdout"] = subprocess.PIPE
30 | kwargs["stderr"] = subprocess.PIPE
31 | with subprocess.Popen(args, **kwargs) as proc:
32 | try:
33 | outs, errs = proc.communicate(timeout=timeout)
34 | except subprocess.TimeoutExpired:
35 | proc.terminate()
36 | proc.wait(timeout)
37 | raise
38 | finally:
39 | if proc.poll() is None:
40 | proc.kill()
41 | retcode = proc.poll()
42 | if check and retcode:
43 | raise subprocess.CalledProcessError(
44 | retcode, proc.args, output=outs, stderr=errs
45 | )
46 | return subprocess.CompletedProcess(proc.args, retcode, outs, errs)
47 |
48 | def get_cmdline_generic(self, method: str, args: List[str]) -> List[str]:
49 | """Build base command line for all injection methods"""
50 | return get_base_injector_cmdline(
51 | self.vm_name, self.kernel_profile_path, self.vmi_info, method, args
52 | )
53 |
54 | def get_cmdline_writefile(self, local: str, remote: str) -> List[str]:
55 | return self.get_cmdline_generic("writefile", ["-e", remote, "-B", local])
56 |
57 | def get_cmdline_readfile(self, remote: str, local: str) -> List[str]:
58 | return self.get_cmdline_generic("readfile", ["-e", remote, "-B", local])
59 |
60 | def get_cmdline_createproc(self, exec_cmd: str, wait: bool = False) -> List[str]:
61 | return self.get_cmdline_generic(
62 | "createproc", ["-e", exec_cmd, *(["-w"] if wait else [])]
63 | )
64 |
65 | def get_cmdline_shellcode(self, shellcode_path: str) -> List[str]:
66 | return self.get_cmdline_generic("shellcode", ["-e", shellcode_path])
67 |
68 | def write_file(
69 | self, local_path: str, remote_path: str, timeout: int = 60
70 | ) -> subprocess.CompletedProcess:
71 | """
72 | Copy local file to the VM
73 | """
74 | injector_cmd = self.get_cmdline_writefile(local_path, remote_path)
75 | return self._run_with_timeout(
76 | injector_cmd, timeout=timeout, check=True, capture_output=True
77 | )
78 |
79 | def read_file(
80 | self, remote_path: str, local_path: str, timeout: int = 60
81 | ) -> subprocess.CompletedProcess:
82 | """
83 | Copy VM file to local
84 | """
85 | injector_cmd = self.get_cmdline_readfile(remote_path, local_path)
86 | return self._run_with_timeout(
87 | injector_cmd, timeout=timeout, capture_output=True
88 | )
89 |
90 | def create_process(
91 | self, cmdline: str, wait: bool = False, timeout: int = 60
92 | ) -> subprocess.CompletedProcess:
93 | """
94 | Create a process inside the VM with given command line
95 | """
96 | injector_cmd = self.get_cmdline_createproc(cmdline, wait=wait)
97 | return self._run_with_timeout(injector_cmd, timeout=timeout, check=True)
98 |
99 | def inject_shellcode(self, shellcode_path: str, timeout: int = 60):
100 | injector_cmd = self.get_cmdline_shellcode(shellcode_path)
101 | return self._run_with_timeout(injector_cmd, timeout=timeout, check=True)
102 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/ProcessTree.jsx:
--------------------------------------------------------------------------------
1 | import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
2 | import { faPlusCircle, faMinusCircle } from "@fortawesome/free-solid-svg-icons";
3 | import { useEffect, useRef } from "react";
4 | import { Tooltip } from "bootstrap/js/index.esm.js";
5 |
6 | function trimProcessName(procname) {
7 | return procname.split("\\").at(-1);
8 | }
9 |
10 | function TooltipSpan({ className, tooltip, children }) {
11 | const node = useRef(undefined);
12 | useEffect(() => {
13 | if (node.current) {
14 | const tooltip = new Tooltip(node.current);
15 | return () => tooltip.dispose();
16 | }
17 | }, []);
18 | if (tooltip) {
19 | return (
20 |
27 | {children}
28 |
29 | );
30 | } else {
31 | return (
32 |
33 | {children}
34 |
35 | );
36 | }
37 | }
38 |
39 | export function ProcessNode({ node, onClick }) {
40 | let nodeStyle = "";
41 | if (!node.ts_from) {
42 | nodeStyle = "text-muted";
43 | } else if (!node.ts_to) {
44 | nodeStyle = "text-primary";
45 | }
46 | const commandLine = (node.args || []).join(" ");
47 | return (
48 |
49 |
50 | {trimProcessName(node.procname)}
51 |
52 | ({node.pid})
53 |
54 | );
55 | }
56 |
57 | export function ProcessTree({
58 | processTree,
59 | uncollapsedSeqid,
60 | setCollapse,
61 | selected,
62 | onSelect = () => {},
63 | }) {
64 | return (
65 |
66 | {processTree.map((element) => {
67 | const leaf = element.children.length === 0;
68 | const collapsed = !uncollapsedSeqid.has(element.seqid);
69 | const isSelected = element.seqid === selected;
70 | return (
71 | <>
72 |
73 | {!leaf ? (
74 | {
81 | ev.preventDefault();
82 | setCollapse(element.seqid);
83 | }}
84 | />
85 | ) : (
86 |
91 | )}
92 | onSelect(element.seqid)}
95 | />
96 |
97 | {!leaf && !collapsed ? (
98 |
105 | ) : (
106 | []
107 | )}
108 | >
109 | );
110 | })}
111 |
112 | );
113 | }
114 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/ProcessTreeView.jsx:
--------------------------------------------------------------------------------
1 | import { useEffect, useState } from "react";
2 | import { getAnalysisProcessTree } from "./api.js";
3 | import { ProcessTree } from "./ProcessTree.jsx";
4 |
5 | function isProcessInteresting(process) {
6 | return process.procname.endsWith("explorer.exe");
7 | }
8 |
9 | function getProcessParents(processTree, processId) {
10 | let stack = processTree.map((element) => ({ parents: [], element }));
11 | while (stack.length > 0) {
12 | let { parents, element } = stack.shift();
13 | if (element.seqid === processId) {
14 | return parents;
15 | }
16 | if (element.children && element.children.length > 0) {
17 | stack = [
18 | ...element.children.map((child) => ({
19 | parents: [...parents, element.seqid],
20 | element: child,
21 | })),
22 | ...stack,
23 | ];
24 | }
25 | }
26 | return undefined;
27 | }
28 |
29 | function getInterestingProcesses(processTree) {
30 | let activeSet = new Set();
31 | for (let process of processTree) {
32 | if (isProcessInteresting(process)) {
33 | activeSet.add(process.seqid);
34 | }
35 | if (process.children.length > 0) {
36 | const activeChildren = getInterestingProcesses(process.children);
37 | if (activeChildren.size) {
38 | activeSet = activeSet.union(activeChildren);
39 | activeSet.add(process.seqid);
40 | }
41 | }
42 | }
43 | return activeSet;
44 | }
45 |
46 | export function ProcessTreeView({
47 | analysisId,
48 | selectedProcess,
49 | onProcessSelect,
50 | }) {
51 | const [uncollapsed, setUncollapsed] = useState(new Set());
52 | const [processTree, setProcessTree] = useState();
53 | const [error, setError] = useState();
54 |
55 | useEffect(() => {
56 | getAnalysisProcessTree({ analysisId })
57 | .then((data) => {
58 | setProcessTree(data);
59 | setUncollapsed(getInterestingProcesses(data));
60 | })
61 | .catch((e) => {
62 | console.error(e);
63 | setError(e);
64 | });
65 | }, []);
66 |
67 | useEffect(() => {
68 | if (!processTree) return;
69 | const parents = getProcessParents(processTree, selectedProcess);
70 | if (parents) {
71 | setUncollapsed((currentValue) =>
72 | currentValue.union(new Set(parents)),
73 | );
74 | }
75 | }, [selectedProcess, processTree]);
76 |
77 | return (
78 |
79 |
80 | {typeof processTree === "undefined" ? (
81 |
Loading process tree...
82 | ) : (
83 | []
84 | )}
85 | {typeof error !== "undefined" ? (
86 |
87 | Unable to load process tree
88 |
89 | ) : (
90 | []
91 | )}
92 | {typeof processTree !== "undefined" ? (
93 |
{
97 | const collapse = uncollapsed.has(seqid);
98 | setUncollapsed((currentValue) => {
99 | let newSet = new Set(currentValue);
100 | if (!collapse) {
101 | newSet.add(seqid);
102 | } else {
103 | newSet.delete(seqid);
104 | }
105 | return newSet;
106 | });
107 | }}
108 | selected={selectedProcess}
109 | onSelect={(seqid) => {
110 | onProcessSelect(seqid);
111 | }}
112 | />
113 | ) : (
114 | []
115 | )}
116 |
117 |
118 | );
119 | }
120 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DRAKVUF Sandbox
2 |
3 | > [!WARNING]
4 | > Here be dragons 🐉. Maintaining your own sandbox is a difficult task and this project uses technology that is not user-friendly.
5 | > Be prepared to brush up on your debugging skills as bugs may be reproducible only on your configuration.
6 | > On the other hand, it's not purely an R&D project and it is used in production! Source code and issues section on both
7 | > DRAKVUF Sandbox and [DRAKVUF engine](https://github.com/tklengyel/drakvuf) projects are your best friend.
8 |
9 | DRAKVUF Sandbox is an automated black-box malware analysis system with [DRAKVUF](https://drakvuf.com/) engine under the hood, which does not require an agent on guest OS.
10 |
11 | This project provides you with a friendly web interface that allows you to upload suspicious files to be analyzed. Once the sandboxing job is finished, you can explore the analysis result through the mentioned interface and get an insight on whether the file is truly malicious or not.
12 |
13 | Because it is usually pretty hard to set up a malware sandbox, this project also provides you with an installer app that would guide you through the necessary steps and configure your system using settings that are recommended for beginners. At the same time, experienced users can tweak some settings or even replace some infrastructure parts to better suit their needs.
14 |
15 | ## Quick start
16 | * **[👋 Getting started](https://drakvuf-sandbox.readthedocs.io/en/latest/usage/getting_started.html)**
17 | * [Latest releases](https://github.com/CERT-Polska/drakvuf-sandbox/releases)
18 | * [Latest docs](https://drakvuf-sandbox.readthedocs.io/en/latest/)
19 |
20 | 
21 |
22 | ## Recommended hardware & software
23 |
24 | In order to run DRAKVUF Sandbox, your setup should fulfill all the listed requirements.
25 |
26 | * Processor:
27 | * ✔️ Required Intel processor with Intel Virtualization Technology (VT-x) and Extended Page Tables (EPT) features
28 | * Host system with at least 2 core CPU and 5 GB RAM, running GRUB as bootloader, one of:
29 | * ✔️ Debian 12 Bookworm
30 | * ✔️ Ubuntu 22.04 Jammy
31 | * Guest system, one of:
32 | * ✔️ Windows 10 build at least 2004 (x64), recommended 22H2
33 | * ✔️ Windows 7 (x64)
34 |
35 | Nested virtualization:
36 |
37 | * ✔️ Xen - works out of the box.
38 | * ✔️ KVM - works, we often use it for development purposes. If you experience any bugs, please report them to us for further investigation.
39 | * ✔️ VMware Workstation Player - works, but you need to check Virtualize EPT option for a VM; Intel processor with EPT still required.
40 | * ❌ AWS, GCP, Azure - due to lack of exposed CPU features, hosting DRAKVUF Sandbox in the cloud is **not** supported (although it might change in the future).
41 | * ❌ Hyper-V - doesn't work.
42 | * ❌ VMWare Fusion (Mac) - doesn't work.
43 |
44 | ## Maintainers/authors
45 |
46 | Feel free to contact us if you have any questions or comments.
47 |
48 | **General contact email: info@cert.pl** (fastest response)
49 |
50 | You can also chat with us about this project on Discord: [https://discord.gg/Q7eTsHnpn4](https://discord.gg/Q7eTsHnpn4)
51 |
52 | This project is authored by:
53 |
54 | * Michał Leszczyński ([@icedevml](https://github.com/icedevml))
55 | * Adam Kliś ([@BonusPlay](https://github.com/BonusPlay))
56 | * Hubert Jasudowicz ([@chivay](https://github.com/chivay))
57 | * Paweł Srokosz ([@psrok1](https://github.com/psrok1))
58 | * Konstanty Cieśliński ([@kscieslinski](https://github.com/kscieslinski))
59 | * Arkadiusz Wróbel ([@catsuryuu](https://github.com/catsuryuu))
60 | * Jarosław Jedynak ([@msm-cert](https://github.com/msm-cert))
61 |
62 | If you have any questions about [DRAKVUF](https://drakvuf.com/) engine itself, contact tamas@tklengyel.com
63 |
64 | ## Acknowledgements
65 |
66 | This project was created and/or upgraded thanks to the following organizations and initiatives:
67 |
68 | ### Connecting Europe Facility of the European Union
69 |
70 |
71 |
72 | ### The Honeynet Project
73 |
74 |
75 |
76 | ### CERT Polska
77 |
78 |
79 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/AnalysisPendingView.jsx:
--------------------------------------------------------------------------------
1 | import { AnalysisLiveInteraction } from "./AnalysisLiveInteraction.jsx";
2 | import { AnalysisStatusBadge } from "./AnalysisStatusBadge.jsx";
3 | import { AnalysisMetadataTable } from "./AnalysisMetadataTable.jsx";
4 | import { useState } from "react";
5 | import { Tab, TabSwitcher } from "./TabSwitcher.jsx";
6 |
7 | export function AnalysisPendingStatusBox({ children }) {
8 | return (
9 |
10 |
11 |
{children}
12 |
22 |
23 |
24 | );
25 | }
26 |
27 | function AnalysisPendingTabs({ analysis }) {
28 | const [activeTab, setActiveTab] = useState("metadata");
29 | const enableLiveInteraction =
30 | analysis["vm_id"] &&
31 | analysis["status"] === "started" &&
32 | analysis["substatus"] !== "starting_vm";
33 | return (
34 | {
36 | if (tabid === "metadata") {
37 | return "Analysis info";
38 | } else if (tabid === "live-interaction") {
39 | return `Live interaction (vm-${analysis["vm_id"]})`;
40 | }
41 | }}
42 | activeTab={activeTab}
43 | onTabSwitch={setActiveTab}
44 | >
45 |
46 |
47 |
48 | {enableLiveInteraction ? (
49 |
50 |
51 |
52 | ) : (
53 | []
54 | )}
55 |
56 | );
57 | }
58 |
59 | function formatTime(tm) {
60 | const minutes = Math.floor(tm / 60);
61 | const seconds = Math.floor(tm % 60)
62 | .toString()
63 | .padStart(2, "0");
64 | return `${minutes}:${seconds}`;
65 | }
66 |
67 | export function AnalysisRemainingTimeBadge({ remainingTime }) {
68 | return (
69 |
70 | Remaining time: {formatTime(remainingTime)}
71 |
72 | );
73 | }
74 |
75 | export function AnalysisPendingView({ analysis }) {
76 | return (
77 | <>
78 |
79 |
80 |
81 | Please wait until analysis is completed...
82 |
83 |
84 | Current status:
85 |
86 |
90 | {analysis["status"] === "started" &&
91 | analysis["substatus"] === "analyzing" &&
92 | analysis["remaining_time"] ? (
93 |
96 | ) : (
97 | []
98 | )}
99 |
100 |
101 |
102 |
103 |
112 | >
113 | );
114 | }
115 |
--------------------------------------------------------------------------------
/drakrun/lib/vmi_profile.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import pathlib
4 | import tempfile
5 | import time
6 |
7 | from drakpdb import make_pdb_profile, pe_codeview_data
8 |
9 | from drakrun.lib.drakshell import get_drakshell
10 | from drakrun.lib.fetch_pdb import vmi_fetch_pdb
11 | from drakrun.lib.injector import Injector
12 | from drakrun.lib.libvmi.dlls import DLL, optional_dll_file_list, required_dll_file_list
13 | from drakrun.lib.libvmi.vmi_info import VmiInfo
14 | from drakrun.lib.paths import VMI_INFO_PATH, VMI_KERNEL_PROFILE_PATH, VMI_PROFILES_DIR
15 | from drakrun.lib.vm import VirtualMachine
16 |
17 | from .libvmi import extract_explorer_pid, extract_vmi_offsets, get_vmi_kernel_guid
18 |
19 | log = logging.getLogger(__name__)
20 |
21 |
22 | def extract_dll_profile(injector: Injector, dll: DLL):
23 | tempdir = pathlib.Path(tempfile.gettempdir())
24 | local_dll_path = (tempdir / dll.dest).as_posix()
25 | guest_dll_path = str(pathlib.PureWindowsPath("C:/", dll.path))
26 |
27 | log.info("Generating VMI profile for %s", local_dll_path)
28 |
29 | proc = injector.read_file(guest_dll_path, local_dll_path)
30 | out = json.loads(proc.stdout.decode())
31 | if out["Status"] == "Error" and out["Error"] in [
32 | "ERROR_FILE_NOT_FOUND",
33 | "ERROR_PATH_NOT_FOUND",
34 | ]:
35 | raise FileNotFoundError
36 | if out["Status"] != "Success":
37 | raise RuntimeError(f"Injector failed with {proc.stderr}")
38 |
39 | codeview_data = pe_codeview_data(local_dll_path)
40 | pdb_filepath = vmi_fetch_pdb(
41 | codeview_data["filename"], codeview_data["symstore_hash"]
42 | )
43 | profile = make_pdb_profile(
44 | pdb_filepath,
45 | dll_origin_path=guest_dll_path,
46 | dll_path=local_dll_path,
47 | dll_symstore_hash=codeview_data["symstore_hash"],
48 | )
49 | profile_path = VMI_PROFILES_DIR / f"{dll.dest}.json"
50 | profile_path.write_text(json.dumps(profile, indent=4))
51 |
52 |
53 | def create_vmi_info(vm: VirtualMachine, with_drakshell: bool = True) -> VmiInfo:
54 | if not vm.is_running:
55 | raise RuntimeError("VM is not running")
56 | kernel_info = get_vmi_kernel_guid(vm.vm_name)
57 | log.info(f"Determined PDB GUID: {kernel_info.guid}")
58 | log.info(f"Determined kernel filename: {kernel_info.filename}")
59 |
60 | pdb_file = vmi_fetch_pdb(kernel_info.filename, kernel_info.guid)
61 | kernel_profile = make_pdb_profile(pdb_file.as_posix())
62 | VMI_KERNEL_PROFILE_PATH.write_text(json.dumps(kernel_profile, indent=4))
63 |
64 | vmi_offsets = extract_vmi_offsets(vm.vm_name, VMI_KERNEL_PROFILE_PATH)
65 | explorer_pid = extract_explorer_pid(
66 | vm.vm_name, VMI_KERNEL_PROFILE_PATH, vmi_offsets
67 | )
68 | vmi_info = VmiInfo(vmi_offsets, inject_pid=explorer_pid)
69 | if with_drakshell:
70 | injector = Injector(vm.vm_name, vmi_info, VMI_KERNEL_PROFILE_PATH.as_posix())
71 | for try_no in range(5):
72 | try:
73 | _, drakshell_info = get_drakshell(vm, injector)
74 | vmi_info.inject_pid = drakshell_info["pid"]
75 | vmi_info.inject_tid = drakshell_info["tid"]
76 | break
77 | except Exception as e:
78 | log.warning("Failed to install drakshell on the VM", exc_info=e)
79 | if try_no < 4:
80 | log.warning(
81 | f"Another try ({try_no+2}/5) in 5 seconds... You can try connecting to the VM using VNC and moving "
82 | f"mouse over the desktop while we're trying to setup the drakshell."
83 | )
84 | time.sleep(5)
85 | else:
86 | log.warning(
87 | "I surrender, drakshell will be inactive. I hope you won't have problems with profile generation."
88 | )
89 |
90 | VMI_INFO_PATH.write_text(vmi_info.to_json(indent=4))
91 | return vmi_info
92 |
93 |
94 | def create_vmi_json_profile(vm: VirtualMachine, vmi_info: VmiInfo):
95 | if not vm.is_running:
96 | raise RuntimeError("VM is not running")
97 |
98 | injector = Injector(vm.vm_name, vmi_info, VMI_KERNEL_PROFILE_PATH.as_posix())
99 | for dll in required_dll_file_list:
100 | extract_dll_profile(injector, dll)
101 |
102 | for dll in optional_dll_file_list:
103 | try:
104 | extract_dll_profile(injector, dll)
105 | except Exception:
106 | logging.exception(f"Failed to get profile for {dll.path}")
107 |
--------------------------------------------------------------------------------
/drakrun/web/frontend/src/api.js:
--------------------------------------------------------------------------------
1 | import axios from "axios";
2 |
3 | if (import.meta.env.VITE_API_SERVER) {
4 | axios.defaults.baseURL = import.meta.env.VITE_API_SERVER;
5 | } else {
6 | axios.defaults.baseURL = "/api";
7 | }
8 |
9 | export async function getAnalysisList({ abortController }) {
10 | const listRequest = await axios.get("/list", {
11 | signal: abortController.signal,
12 | });
13 | return listRequest.data;
14 | }
15 |
16 | export async function getAnalysisStatus({
17 | analysisId,
18 | abortController = undefined,
19 | }) {
20 | const listRequest = await axios.get(
21 | `/status/${analysisId}`,
22 | abortController
23 | ? {
24 | signal: abortController.signal,
25 | }
26 | : {},
27 | );
28 | return listRequest.data;
29 | }
30 |
31 | export async function getAnalysisSummary({
32 | analysisId,
33 | abortController = undefined,
34 | }) {
35 | const reportRequest = await axios.get(
36 | `/report/${analysisId}`,
37 | abortController
38 | ? {
39 | signal: abortController.signal,
40 | }
41 | : {},
42 | );
43 | return reportRequest.data;
44 | }
45 |
46 | export async function getAnalysisProcessTree({
47 | analysisId,
48 | abortController = undefined,
49 | }) {
50 | const listRequest = await axios.get(
51 | `/processed/${analysisId}/process_tree`,
52 | abortController
53 | ? {
54 | signal: abortController.signal,
55 | }
56 | : {},
57 | );
58 | return listRequest.data;
59 | }
60 |
61 | export async function getLog({ analysisId, logType, rangeStart, rangeEnd }) {
62 | const logRequest = await axios.get(`/logs/${analysisId}/${logType}`, {
63 | responseType: "text",
64 | headers: {
65 | Range: `bytes=${rangeStart}-${rangeEnd}`,
66 | },
67 | });
68 | return logRequest.data;
69 | }
70 |
71 | export async function getLogList({ analysisId }) {
72 | const logRequest = await axios.get(`/logs/${analysisId}`);
73 | return logRequest.data;
74 | }
75 |
76 | export async function getProcessInfo({ analysisId, processSeqId }) {
77 | const logRequest = await axios.get(
78 | `/process_info/${analysisId}/${processSeqId}`,
79 | );
80 | return logRequest.data;
81 | }
82 |
83 | export async function getProcessLog({
84 | analysisId,
85 | logType,
86 | selectedProcess,
87 | rangeStart,
88 | rangeEnd,
89 | methodsFilter = [],
90 | }) {
91 | const logRequest = await axios.get(
92 | `/logs/${analysisId}/${logType}/process/${selectedProcess}`,
93 | {
94 | responseType: "text",
95 | headers: {
96 | Range: `bytes=${rangeStart}-${rangeEnd}`,
97 | },
98 | params: {
99 | filter: methodsFilter,
100 | },
101 | },
102 | );
103 | return logRequest.data;
104 | }
105 |
106 | export async function getAnalysisFileList({
107 | analysisId,
108 | abortController = undefined,
109 | }) {
110 | const listRequest = await axios.get(
111 | `/files/${analysisId}`,
112 | abortController
113 | ? {
114 | signal: abortController.signal,
115 | }
116 | : {},
117 | );
118 | return listRequest.data;
119 | }
120 |
121 | export async function uploadSample({
122 | file,
123 | timeout,
124 | file_name,
125 | file_path,
126 | plugins,
127 | start_command,
128 | no_internet,
129 | no_screenshots,
130 | extract_archive,
131 | archive_password,
132 | }) {
133 | const formData = new FormData();
134 | formData.append("file", file);
135 | formData.append("timeout", timeout);
136 | formData.append("plugins", JSON.stringify(plugins));
137 | if (file_name) formData.append("file_name", file_name);
138 | if (file_path) formData.append("file_path", file_path);
139 | if (start_command) formData.append("start_command", start_command);
140 | if (no_internet) formData.append("no_internet", "1");
141 | if (no_screenshots) formData.append("no_screenshots", "1");
142 | if (extract_archive) formData.append("extract_archive", "1");
143 | if (archive_password) formData.append("archive_password", archive_password);
144 | const request = await axios.post("/upload", formData, {
145 | headers: {
146 | "Content-Type": "multipart/form-data",
147 | },
148 | });
149 | return request.data;
150 | }
151 |
--------------------------------------------------------------------------------
/drakrun/analyzer/screenshotter.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | import logging
4 | import threading
5 | import time
6 | from pathlib import Path
7 |
8 | import asyncvnc
9 | from perception import hashers
10 | from PIL import Image
11 |
12 | logger = logging.getLogger(__name__)
13 |
14 |
15 | class Screenshotter:
16 | def __init__(
17 | self,
18 | output_dir: Path,
19 | vnc_host: str,
20 | vnc_port: int,
21 | vnc_password: str,
22 | loop_interval=5,
23 | max_screenshots=30,
24 | diff_threshold=0.05,
25 | ):
26 | self._aioloop = None
27 | self._thread = None
28 | self._task = None
29 |
30 | self.output_dir = output_dir
31 | self.vnc_host = vnc_host
32 | self.vnc_port = vnc_port
33 | self.vnc_password = vnc_password
34 | self.loop_interval = loop_interval
35 | self.max_screenshots = max_screenshots
36 | self.diff_threshold = diff_threshold
37 |
38 | async def perform(self):
39 | hasher = hashers.PHash()
40 | prev_image_hash = None
41 | screenshot_no = 0
42 | screenshot_log_path = self.output_dir / "screenshots.json"
43 | screenshot_dir = self.output_dir / "screenshots"
44 | screenshot_dir.mkdir()
45 | try:
46 | with screenshot_log_path.open("w") as screenshot_log:
47 | async with asyncvnc.connect(
48 | host=self.vnc_host, port=self.vnc_port, password=self.vnc_password
49 | ) as client:
50 | logger.info(f"Connected to VNC {self.vnc_host}:{self.vnc_port}")
51 | while screenshot_no < self.max_screenshots:
52 | pixels = await asyncio.wait_for(client.screenshot(), timeout=30)
53 | timestamp = time.time()
54 | image = Image.fromarray(pixels)
55 | image_hash = hasher.compute(image)
56 | if (
57 | not prev_image_hash
58 | or hasher.compute_distance(image_hash, prev_image_hash)
59 | > self.diff_threshold
60 | ):
61 | prev_image_hash = image_hash
62 | screenshot_no += 1
63 | screenshot_name = (
64 | screenshot_dir / f"screenshot_{screenshot_no}.png"
65 | )
66 | image.save(screenshot_name)
67 | screenshot_log.write(
68 | json.dumps(
69 | {
70 | "timestamp": timestamp,
71 | "image_hash": image_hash,
72 | "index": screenshot_no,
73 | }
74 | )
75 | + "\n"
76 | )
77 | logger.info(f"Got screenshot {screenshot_no}: {image_hash}")
78 | await asyncio.sleep(self.loop_interval)
79 | except asyncio.CancelledError:
80 | logger.info("Screenshot task was cancelled.")
81 | # This exception is raised when stop() cancels the task.
82 | except Exception as e:
83 | logger.exception(f"Error in screenshotter task: {e}")
84 | finally:
85 | logger.info("Screenshot task finished.")
86 |
87 | def perform_loop(self):
88 | asyncio.set_event_loop(self._aioloop)
89 | try:
90 | self._task = self._aioloop.create_task(self.perform())
91 | self._aioloop.run_until_complete(self._task)
92 | finally:
93 | if not self._aioloop.is_closed():
94 | self._aioloop.close()
95 | logger.info("Screenshotter event loop closed.")
96 |
97 | def start(self):
98 | self._aioloop = asyncio.new_event_loop()
99 | self._thread = threading.Thread(target=self.perform_loop)
100 | self._thread.start()
101 |
102 | def stop(self):
103 | if not self._thread:
104 | return
105 |
106 | if self._aioloop and not self._aioloop.is_closed() and self._task:
107 | logger.info("Stopping screenshotter...")
108 | self._aioloop.call_soon_threadsafe(self._task.cancel)
109 |
110 | self._thread.join(timeout=15)
111 |
112 | if self._thread.is_alive():
113 | logger.error("Screenshotter thread did not stop in time!")
114 |
115 | logger.info("Screenshotter stopped.")
116 |
--------------------------------------------------------------------------------
/drakrun/lib/s3_storage.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import pathlib
3 | from typing import BinaryIO, Optional
4 |
5 | import boto3
6 | from botocore.client import BaseClient
7 | from botocore.credentials import (
8 | ContainerProvider,
9 | InstanceMetadataFetcher,
10 | InstanceMetadataProvider,
11 | )
12 | from botocore.exceptions import ClientError
13 | from botocore.session import get_session
14 |
15 | from .config import S3StorageConfigSection
16 |
17 | logger = logging.getLogger(__name__)
18 |
19 |
20 | def is_s3_enabled(s3_config: Optional[S3StorageConfigSection]) -> bool:
21 | return bool(s3_config is not None and s3_config.enabled)
22 |
23 |
24 | def get_s3_client(s3_config: S3StorageConfigSection) -> BaseClient:
25 | if s3_config.iam_auth:
26 | boto_session = get_session()
27 | iam_providers = [
28 | ContainerProvider(),
29 | InstanceMetadataProvider(
30 | iam_role_fetcher=InstanceMetadataFetcher(timeout=1000, num_attempts=2)
31 | ),
32 | ]
33 | for provider in iam_providers:
34 | creds = provider.load()
35 | if creds:
36 | boto_session._credentials = creds # type: ignore
37 | return boto3.Session(botocore_session=boto_session).client(
38 | "s3",
39 | endpoint_url=s3_config.address,
40 | )
41 | else:
42 | raise RuntimeError("Unable to fetch IAM credentials")
43 | else:
44 | return boto3.client(
45 | "s3",
46 | endpoint_url=s3_config.address,
47 | aws_access_key_id=s3_config.access_key,
48 | aws_secret_access_key=s3_config.secret_key,
49 | )
50 |
51 |
52 | def upload_sample_to_s3(
53 | analysis_id: str, sample_stream: BinaryIO, s3_client: BaseClient, s3_bucket: str
54 | ) -> None:
55 | sample_s3_name = "/".join([*analysis_id[0:4], analysis_id + ".sample"])
56 | logger.info("Uploading sample for analysis %s...", analysis_id)
57 | s3_client.put_object(Bucket=s3_bucket, Key=sample_s3_name, Body=sample_stream)
58 | logger.info("Sample for %s uploaded successfully", analysis_id)
59 |
60 |
61 | def download_sample_from_s3(
62 | analysis_id: str, target_path: pathlib.Path, s3_client: BaseClient, s3_bucket: str
63 | ) -> None:
64 | sample_s3_name = "/".join([*analysis_id[0:4], analysis_id + ".sample"])
65 | logger.info("Downloading sample for analysis %s...", analysis_id)
66 | s3_client.download_file(Bucket=s3_bucket, Key=sample_s3_name, Filename=target_path)
67 | logger.info("Sample for %s downloaded successfully", analysis_id)
68 |
69 |
70 | def upload_analysis(
71 | analysis_id: str, analysis_path: pathlib.Path, s3_client: BaseClient, s3_bucket: str
72 | ) -> None:
73 | s3_name_prefix = "/".join([*analysis_id[0:4], analysis_id])
74 |
75 | logger.info("Uploading analysis %s...", analysis_id)
76 | for analysis_file in analysis_path.rglob("*"):
77 | if not analysis_file.is_file():
78 | continue
79 | relative_path = analysis_file.relative_to(analysis_path).as_posix()
80 | s3_name = s3_name_prefix + "/" + relative_path
81 | logger.info("Uploading %s/%s...", analysis_id, relative_path)
82 | with analysis_file.open("rb") as f:
83 | s3_client.put_object(Bucket=s3_bucket, Key=s3_name, Body=f)
84 | logger.info("Analysis %s uploaded successfully", analysis_id)
85 |
86 |
87 | def download_analysis(
88 | analysis_id: str, target_path: pathlib.Path, s3_client: BaseClient, s3_bucket: str
89 | ) -> None:
90 | s3_name_prefix = "/".join([*analysis_id[0:4], analysis_id])
91 | logger.info("Downloading analysis %s...", analysis_id)
92 | objects = s3_client.list_objects_v2(Bucket=s3_bucket, Prefix=s3_name_prefix + "/")
93 | for object in objects:
94 | object_key = object["Key"]
95 | relative_path = object_key[len(s3_name_prefix + "/") :]
96 | logger.info("Downloading %s/%s...", analysis_id, relative_path)
97 | target_file_path = target_path / relative_path
98 | s3_client.download_file(
99 | Bucket=s3_bucket, Key=object_key, Filename=target_file_path
100 | )
101 | logger.info("Analysis %s downloaded successfully", analysis_id)
102 |
103 |
104 | def is_analysis_on_s3(analysis_id: str, s3_client: BaseClient, s3_bucket: str) -> bool:
105 | s3_name = "/".join([*analysis_id[:4], analysis_id, "metadata.json"])
106 |
107 | try:
108 | s3_client.head_object(Bucket=s3_bucket, Key=s3_name)
109 | return True
110 | except ClientError as e:
111 | if e.response["Error"]["Code"] == "NoSuchKey":
112 | return False
113 | else:
114 | raise
115 |
--------------------------------------------------------------------------------