├── src ├── plugins │ ├── __init__.py │ ├── adtimeline.py │ ├── google.py │ ├── mail.py │ ├── o365.py │ ├── hayabusa.py │ ├── adaudit.py │ └── volatility.py ├── thirdparty │ ├── __init__.py │ ├── ParseMFT │ │ ├── __init__.py │ │ ├── windows_time.py │ │ ├── mft_analyzer.py │ │ └── constants.py │ ├── winactivities │ │ ├── __init__.py │ │ ├── helpers.py │ │ ├── ParseWinactivities.py │ │ ├── logical.py │ │ └── activities.py │ ├── trashparse │ │ ├── _helper.py │ │ ├── _util.py │ │ ├── Trash.py │ │ └── ParseTrash.py │ ├── keycloak │ │ ├── user.py │ │ └── keycloak_utils.py │ ├── AESCipher.py │ ├── ParseTask.py │ ├── ParseLnk.py │ ├── logging.py │ ├── ParsePrefetch.py │ ├── wrapper_docker.py │ ├── ParseWebCache.py │ ├── ParseEVTX.py │ ├── ParseUSNJRNL.py │ ├── ParseJumpList.py │ └── mail │ │ └── mbox_parser.py ├── bin │ ├── unstream │ └── libpff_python-20240826-cp311-cp311-linux_x86_64.whl ├── web │ ├── static │ │ ├── images │ │ │ ├── cert.png │ │ │ └── icon.png │ │ ├── js │ │ │ ├── select.dataTables.js │ │ │ └── dataTables.bootstrap5.js │ │ └── css │ │ │ └── select.dataTables.css │ └── templates │ │ ├── connect.html │ │ ├── collecte_details.html │ │ └── base.html └── __init__.py ├── config ├── certs │ ├── pytriage.crt │ └── pytriage.key └── triage.yaml ├── docs └── images │ ├── parsers.png │ ├── bliimage.png │ ├── pytriage.png │ ├── user_view.png │ ├── zqfimage.png │ ├── plugin_kape.png │ ├── standalone1.png │ ├── standalone2.png │ └── pytriage_blanc.png ├── Docker ├── Dockerfile-volatility ├── Dockerfile └── docker-compose.yml ├── .gitignore ├── requirements.txt └── setup.py /src/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/thirdparty/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /config/certs/pytriage.crt: -------------------------------------------------------------------------------- 1 | xxx 2 | -------------------------------------------------------------------------------- /config/certs/pytriage.key: -------------------------------------------------------------------------------- 1 | xxx 2 | -------------------------------------------------------------------------------- /src/thirdparty/ParseMFT/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/thirdparty/winactivities/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/bin/unstream: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/src/bin/unstream -------------------------------------------------------------------------------- /docs/images/parsers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/parsers.png -------------------------------------------------------------------------------- /docs/images/bliimage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/bliimage.png -------------------------------------------------------------------------------- /docs/images/pytriage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/pytriage.png -------------------------------------------------------------------------------- /docs/images/user_view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/user_view.png -------------------------------------------------------------------------------- /docs/images/zqfimage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/zqfimage.png -------------------------------------------------------------------------------- /docs/images/plugin_kape.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/plugin_kape.png -------------------------------------------------------------------------------- /docs/images/standalone1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/standalone1.png -------------------------------------------------------------------------------- /docs/images/standalone2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/standalone2.png -------------------------------------------------------------------------------- /docs/images/pytriage_blanc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/docs/images/pytriage_blanc.png -------------------------------------------------------------------------------- /src/web/static/images/cert.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/src/web/static/images/cert.png -------------------------------------------------------------------------------- /src/web/static/images/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/src/web/static/images/icon.png -------------------------------------------------------------------------------- /src/bin/libpff_python-20240826-cp311-cp311-linux_x86_64.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CERT-SYNETIS/PyTriage/HEAD/src/bin/libpff_python-20240826-cp311-cp311-linux_x86_64.whl -------------------------------------------------------------------------------- /Docker/Dockerfile-volatility: -------------------------------------------------------------------------------- 1 | FROM python:3.13 2 | LABEL maintainer="SYNETIS " 3 | VOLUME ["/data"] 4 | RUN pip install git+https://github.com/volatilityfoundation/volatility3.git 5 | ENTRYPOINT ["vol"] 6 | #RUN pip install volatility3 -------------------------------------------------------------------------------- /src/thirdparty/trashparse/_helper.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | 4 | def bytes_to_long(byte, format=" None: 6 | self.low = int(low) 7 | self.high = int(high) 8 | 9 | if (low == 0) and (high == 0): 10 | self.dt = None 11 | self.dtstr = "Not defined" 12 | self.unixtime = 0 13 | return 14 | 15 | self.unixtime = self.get_unix_time() 16 | 17 | try: 18 | self.dt = datetime.fromtimestamp(self.unixtime, tz=timezone.utc) 19 | self.dtstr = self.dt.isoformat(timespec="milliseconds").replace( 20 | "+00:00", "Z" 21 | ) 22 | except: 23 | self.dt = None 24 | self.dtstr = "Invalid timestamp" 25 | self.unixtime = 0 26 | 27 | def get_unix_time(self) -> float: 28 | t = float(self.high) * 2**32 + self.low 29 | return (t / 10000000) - 11644473600 30 | -------------------------------------------------------------------------------- /src/thirdparty/keycloak/user.py: -------------------------------------------------------------------------------- 1 | from flask_login import UserMixin 2 | import os 3 | 4 | KEYCLOAK_ADMIN_GROUP = os.getenv("KEYCLOAK_ADMIN_GROUP", "admin").lower() 5 | 6 | 7 | class User(UserMixin): 8 | def __init__( 9 | self, 10 | user_id: str, 11 | username: str, 12 | email: str, 13 | first_name: str, 14 | last_name: str, 15 | groups: list, 16 | email_verified: bool = False, 17 | validate_token: bool = False, 18 | token_expires_in: int = 0, 19 | ): 20 | self.id = user_id 21 | self.username = username 22 | self.email = email 23 | self.first_name = first_name 24 | self.last_name = last_name 25 | self.email_verified = email_verified 26 | self.validate_token = validate_token 27 | self.token_expires_in = token_expires_in 28 | self.groups = list(map(lambda x: x.lower(), groups)) 29 | self.admin = KEYCLOAK_ADMIN_GROUP in self.groups 30 | 31 | def get_id(self): 32 | return str(self.id) 33 | -------------------------------------------------------------------------------- /Docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bookworm 2 | LABEL maintainer="SYNETIS " 3 | VOLUME ["/log"] 4 | VOLUME ["/data"] 5 | VOLUME ["/winlogbeat"] 6 | VOLUME ["/hayabusa"] 7 | 8 | WORKDIR "/pytriage" 9 | #This is used for kape vhdx mount 10 | RUN apt update -y && \ 11 | apt upgrade -y && \ 12 | apt install -y apt-transport-https apt-utils && \ 13 | apt install -y build-essential && \ 14 | apt install -y mount && \ 15 | apt install -y python3 && \ 16 | apt install -y python3-pip && \ 17 | apt install -y libguestfs-tools && \ 18 | apt install -y zip && \ 19 | apt install -y openssl && \ 20 | apt install -y pkg-config && \ 21 | #apt install -y pff-tools && \ 22 | apt clean -y && \ 23 | rm -rf /var/cache/apt/* /var/lib/apt/lists/* 24 | 25 | ENV LIBGUESTFS_BACKEND=direct 26 | ENV HOME=/root 27 | #end for kape 28 | COPY . . 29 | RUN pip3 install --upgrade pip --break-system-packages 30 | RUN pip install -r requirements.txt --break-system-packages 31 | RUN pip install src/bin/libpff_python-20240826-cp311-cp311-linux_x86_64.whl --break-system-packages 32 | EXPOSE 8080 33 | CMD ["python3", "triage.py"] -------------------------------------------------------------------------------- /src/thirdparty/AESCipher.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | from Crypto import Random 4 | from Crypto.Cipher import AES 5 | 6 | 7 | class AESCipher(object): 8 | def __init__(self, key): 9 | self.bs = AES.block_size 10 | self.key = hashlib.sha256(key.encode()).digest() 11 | 12 | def encrypt(self, raw): 13 | raw = self._pad(raw) 14 | iv = Random.new().read(AES.block_size) 15 | cipher = AES.new(self.key, AES.MODE_CBC, iv) 16 | return base64.b64encode(iv + cipher.encrypt(raw.encode())) 17 | 18 | def decrypt(self, enc): 19 | enc = base64.b64decode(enc) 20 | iv = enc[: AES.block_size] 21 | cipher = AES.new(self.key, AES.MODE_CBC, iv) 22 | return AESCipher._unpad(cipher.decrypt(enc[AES.block_size :])).decode("utf-8") 23 | 24 | def _pad(self, s): 25 | return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs) 26 | 27 | @staticmethod 28 | def _unpad(s): 29 | return s[: -ord(s[len(s) - 1 :])] 30 | 31 | 32 | if __name__ == "__main__": 33 | try: 34 | _pk_enc = AESCipher(key="e66b37b8-d473-447d-8375-9cef438299df") 35 | key = _pk_enc.encrypt(raw="123456aA") 36 | print(key) 37 | dkey = _pk_enc.decrypt(enc=key) 38 | print(dkey) 39 | except Exception as ex: 40 | print(f"err-- {ex}") 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv*/ 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # Distribution / packaging 9 | .Python 10 | build/ 11 | develop-eggs/ 12 | dist/ 13 | downloads/ 14 | eggs/ 15 | .eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | wheels/ 22 | pip-wheel-metadata/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | config/certs/ 29 | data/ 30 | logs/ 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | 42 | # pyenv 43 | .python-version 44 | 45 | # pipenv 46 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 47 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 48 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 49 | # install all needed dependencies. 50 | #Pipfile.lock 51 | 52 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 53 | __pypackages__/ 54 | 55 | 56 | # Environments 57 | .env 58 | .venv 59 | env/ 60 | venv/ 61 | ENV/ 62 | 63 | # Pyre type checker 64 | .pyre/ 65 | -------------------------------------------------------------------------------- /src/thirdparty/ParseTask.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import xmltodict, json 3 | from logging import Logger 4 | from pathlib import Path 5 | 6 | 7 | class ParseTask: 8 | """ 9 | Class to Parse Windows Scheduled Task file 10 | """ 11 | 12 | def __init__( 13 | self, 14 | task_file: Path, 15 | result_jsonl_file: Path, 16 | logger: Logger, 17 | ) -> None: 18 | self.task_file = task_file 19 | self.result_jsonl_file = result_jsonl_file 20 | self.logger = logger 21 | 22 | def write_results(self, json_data: dict, output_file: Path): 23 | with open(output_file, "a", encoding="utf-16", errors="ignore") as jsonfile: 24 | json.dump(json_data, jsonfile) 25 | jsonfile.write("\n") 26 | 27 | def analyze(self): 28 | try: 29 | self.logger.info(f"[analyze] Processing Task {self.task_file}") 30 | _tojson = "" 31 | with open( 32 | self.task_file.as_posix(), "r", encoding="utf-16", errors="ignore" 33 | ) as fd: 34 | _tojson = xmltodict.parse(fd.read()) 35 | if _tojson.get("Task", None): 36 | _tojson["Task"]["filename"] = self.task_file.name 37 | self.write_results(json_data=_tojson, output_file=self.result_jsonl_file) 38 | except Exception as ex: 39 | self.logger.error(f"[analyze] --- {ex}") 40 | -------------------------------------------------------------------------------- /src/web/static/js/select.dataTables.js: -------------------------------------------------------------------------------- 1 | /*! DataTables styling wrapper for Select 2 | * © SpryMedia Ltd - datatables.net/license 3 | */ 4 | 5 | (function( factory ){ 6 | if ( typeof define === 'function' && define.amd ) { 7 | // AMD 8 | define( ['jquery', 'datatables.net-dt', 'datatables.net-select'], function ( $ ) { 9 | return factory( $, window, document ); 10 | } ); 11 | } 12 | else if ( typeof exports === 'object' ) { 13 | // CommonJS 14 | var jq = require('jquery'); 15 | var cjsRequires = function (root, $) { 16 | if ( ! $.fn.dataTable ) { 17 | require('datatables.net-dt')(root, $); 18 | } 19 | 20 | if ( ! $.fn.dataTable.select ) { 21 | require('datatables.net-select')(root, $); 22 | } 23 | }; 24 | 25 | if (typeof window === 'undefined') { 26 | module.exports = function (root, $) { 27 | if ( ! root ) { 28 | // CommonJS environments without a window global must pass a 29 | // root. This will give an error otherwise 30 | root = window; 31 | } 32 | 33 | if ( ! $ ) { 34 | $ = jq( root ); 35 | } 36 | 37 | cjsRequires( root, $ ); 38 | return factory( $, root, root.document ); 39 | }; 40 | } 41 | else { 42 | cjsRequires( window, jq ); 43 | module.exports = factory( jq, window, window.document ); 44 | } 45 | } 46 | else { 47 | // Browser 48 | factory( jQuery, window, document ); 49 | } 50 | }(function( $, window, document ) { 51 | 'use strict'; 52 | var DataTable = $.fn.dataTable; 53 | 54 | 55 | 56 | 57 | return DataTable; 58 | })); 59 | -------------------------------------------------------------------------------- /src/thirdparty/ParseLnk.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | from .triageutils import file_exists, delete_file 4 | import LnkParse3 5 | 6 | 7 | class ParseLnk: 8 | """ 9 | Class parse prefetch 10 | """ 11 | 12 | def __init__(self, lnk_file: Path, output: Path, logger) -> None: 13 | """ 14 | The constructor for LnkParser class. 15 | """ 16 | self.lnk_file = lnk_file 17 | self.result = output 18 | self.logger = logger 19 | 20 | def parse_file(self, lnk_file: Path) -> dict: 21 | try: 22 | output = dict() 23 | with open(lnk_file, "rb") as indata: 24 | lnk = LnkParse3.lnk_file(indata) 25 | output = lnk.get_json() 26 | except Exception as ex: 27 | self.logger.error(f"[parse_file] {ex}") 28 | return output 29 | 30 | def write_results(self, json_data: dict, output_file: Path): 31 | with open(output_file, "w", encoding="utf-8") as jsonfile: 32 | json.dump(json_data, jsonfile, indent=4, default=str) 33 | 34 | def analyze(self): 35 | try: 36 | self.logger.info(f"[analyze] Processing Lnk {self.lnk_file}") 37 | _res = self.parse_file(lnk_file=self.lnk_file) 38 | if file_exists(file=self.result, logger=self.logger): 39 | delete_file(src=self.result, logger=self.logger) 40 | self.write_results(json_data=_res, output_file=self.result) 41 | except Exception as ex: 42 | self.logger.error(f"[analyze] --- {ex}") 43 | -------------------------------------------------------------------------------- /src/thirdparty/logging.py: -------------------------------------------------------------------------------- 1 | import os, yaml 2 | from logging import basicConfig, getLogger, Logger, FileHandler, Formatter, INFO 3 | 4 | # from .triageutils import INTERNAL_CONFIG 5 | 6 | 7 | def read_config(conf="") -> dict: 8 | """Lecture du fichier de configuration 9 | 10 | Args: 11 | conf (str): chemin du fichier yaml 12 | Returns: 13 | un dictionnaire contenant les informations du yaml 14 | """ 15 | with open(conf, "r") as stream: 16 | try: 17 | d = yaml.safe_load(stream) 18 | return d 19 | except yaml.YAMLError as ex: 20 | raise (ex) 21 | 22 | 23 | INTERNAL_CONFIG = read_config(os.path.join("config", "triage.yaml")) 24 | LOG_FOLDER = INTERNAL_CONFIG["general"]["logfolder"] # /log" 25 | 26 | basicConfig( 27 | format="%(asctime)s - %(levelname)s - %(message)s", 28 | datefmt="%d-%m-%Y %H:%M:%S", 29 | level="DEBUG", 30 | ) 31 | 32 | 33 | def get_logger(name: str) -> Logger: 34 | _new_name = f"pytriage_{name}" 35 | file_handler = FileHandler(filename=f"{LOG_FOLDER}/{name}.log", mode="a") 36 | formatter = Formatter("%(asctime)s - %(levelname)s - %(message)s") 37 | _logger = getLogger(_new_name) 38 | file_handler.setLevel(INFO) 39 | file_handler.setFormatter(formatter) 40 | _exists = False 41 | for h in _logger.handlers: 42 | if name in h.baseFilename: 43 | _exists = True 44 | break 45 | # l.removeHandler(h) 46 | if not _exists: 47 | _logger.addHandler(file_handler) 48 | return _logger 49 | 50 | 51 | if __name__ == "__main__": 52 | print(LOG_FOLDER) 53 | -------------------------------------------------------------------------------- /src/thirdparty/trashparse/_util.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | from datetime import datetime, timedelta, tzinfo 3 | from calendar import timegm 4 | 5 | EPOCH_AS_FILETIME = 116444736000000000 # January 1, 1970 as MS file time 6 | HUNDREDS_OF_NANOSECONDS = 10000000 7 | HOUR = timedelta(hours=1) 8 | 9 | UNITS = {1000: ["KB", "MB", "GB"], 1024: ["KiB", "MiB", "GiB"]} 10 | 11 | 12 | class UTC(tzinfo): 13 | def utcoffset(self, dt): 14 | return timedelta(0) 15 | 16 | def tzname(self, dt): 17 | return "UTC" 18 | 19 | def dst(self, dt): 20 | return timedelta(0) 21 | 22 | 23 | utc = UTC() 24 | 25 | 26 | class UTCTime(object): 27 | @staticmethod 28 | def time_to_date(ft): 29 | try: 30 | return datetime.utcfromtimestamp( 31 | (ft - EPOCH_AS_FILETIME) / HUNDREDS_OF_NANOSECONDS 32 | ) 33 | except ValueError: 34 | return "year is out of range" 35 | 36 | @staticmethod 37 | def date_to_time(dt): 38 | if (dt.tzinfo is None) or (dt.tzinfo.utcoffset(dt) is None): 39 | dt = dt.replace(tzinfo=utc) 40 | 41 | return EPOCH_AS_FILETIME + (timegm(dt.timetuple()) * HUNDREDS_OF_NANOSECONDS) 42 | 43 | 44 | def approximate_date(timestamp) -> str: 45 | return UTCTime.time_to_date(timestamp).strftime("%d/%m/%Y - %H:%M:%S") 46 | 47 | 48 | def approximate_time(timestamp): 49 | dt = approximate_date(timestamp) 50 | td = dt - datetime(1970, 1, 1) 51 | 52 | return (td.microseconds + (td.seconds + td.days * 86400) * 10**6) / 10**6 53 | 54 | 55 | def approximate_size(size, flag_1024_or_1000=True): 56 | mult = 1000 57 | for unit in UNITS[mult]: 58 | size = size / mult 59 | if size < mult: 60 | return "{0:.3f} {1}".format(size, unit) 61 | -------------------------------------------------------------------------------- /Docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | services: 3 | pytriage: 4 | build: 5 | context: .. 6 | dockerfile: Docker/Dockerfile 7 | no_cache: true 8 | image: pytriage:4.0.0 9 | container_name: pytriage-preprod 10 | volumes: 11 | - /data/hayabusa:/hayabusa 12 | - /data:/data 13 | - /data/log:/log 14 | environment: 15 | - CELERY_BROKER_URL=redis://redis:6379/0 16 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 17 | - USE_KEYCLOAK=False 18 | - KEYCLOAK_SERVER_URL= 19 | - KEYCLOAK_REALM= 20 | - KEYCLOAK_CLIENT_ID= 21 | - KEYCLOAK_CLIENT_SECRET= 22 | - REDIRECT_URI= 23 | - KEYCLOAK_ADMIN_GROUP=ADMIN 24 | - KEYCLOAK_USERS_GROUP=CERT 25 | ports: 26 | - 443:8080 27 | worker: 28 | image: pytriage:4.0.0 29 | command: celery --app triage.celery worker --loglevel=info 30 | environment: 31 | - CELERY_BROKER_URL=redis://redis:6379/0 32 | - CELERY_RESULT_BACKEND=redis://redis:6379/0 33 | depends_on: 34 | pytriage: 35 | condition: service_started 36 | redis: 37 | condition: service_started 38 | cap_add: 39 | - SYS_ADMIN 40 | devices: 41 | - /dev/fuse 42 | security_opt: 43 | - apparmor:unconfined 44 | volumes: 45 | - /data/hayabusa:/hayabusa 46 | - /data:/data 47 | - /data/log:/log 48 | - /winlogbeat:/winlogbeat 49 | - /var/run/docker.sock:/var/run/docker.sock 50 | redis: 51 | image: redis:8.2.2 52 | plaso: 53 | image: log2timeline/plaso:20240317 # must be the same in triage.yaml 54 | command: ["exit 0"] 55 | 56 | filebeat: 57 | image: elastic/filebeat:8.9.1 #must be the same in triage.yaml 58 | command: ["exit 0"] 59 | 60 | volatility: 61 | build: 62 | context: .. 63 | dockerfile: Docker/Dockerfile-volatility 64 | no_cache: true 65 | image: volatility3:2.26.2 #must be the same in triage.yaml 66 | command: ["exit 0"] -------------------------------------------------------------------------------- /config/triage.yaml: -------------------------------------------------------------------------------- 1 | general: 2 | upload: /data 3 | logfolder: /log 4 | hayabusa_bin_path: "/hayabusa/hayabusa" 5 | 6 | pipelines: 7 | hayabusa: 5057 8 | iis: 5053 9 | evtxparser: 5054 10 | adtimeline: 5056 11 | volatility: 5064 12 | o365: 5049 13 | fortinet: 5065 14 | adaudit: 5066 15 | orc: 5067 16 | filebeat: 5058 17 | selfassessment: 5050 18 | mail: 5061 19 | google: 5051 20 | psort: 5051 21 | 22 | administration: 23 | Timesketch: 24 | active: false 25 | url: "https://xxx" 26 | username: xxx 27 | password: xxx 28 | Elastic: 29 | active: false 30 | url: "https://xxx" 31 | port: 9200 32 | username: xxx 33 | password: xxx 34 | index_patterns: 35 | - "ir-lin-*" 36 | - "ir-orc-*" 37 | - "ir-m365-*" 38 | - "ir-ad-*" 39 | - "secop-ad-*" 40 | - "ir-log-*" 41 | - "dlq-*" 42 | - "ir-evtx-*" 43 | Logstash: 44 | active: false 45 | url: "https://xxx" 46 | Kibana: 47 | url: "https://xxx" 48 | Winlogbeat: 49 | active: false 50 | folder: /winlogbeat 51 | 52 | volumes: 53 | data: /data 54 | 55 | docker_images: 56 | plaso: 57 | image: log2timeline/plaso 58 | tag: "20240317" 59 | filebeat: 60 | image: elastic/filebeat 61 | tag: "8.9.1" 62 | volatility3: 63 | image: volatility3 64 | tag: "2.26.2" 65 | 66 | artifacts: 67 | apache: 68 | - /var/log/apache/*access.log* 69 | - /var/log/apache2/*access.log* 70 | - /var/log/apache/*error.log* 71 | - /var/log/apache2/*error.log* 72 | - /etc/httpd/logs/*access.log* 73 | - /etc/httpd/logs/*error.log* 74 | auditd: 75 | - /var/log/audit/audit.log* 76 | nginx: 77 | - /var/log/nginx/*access.log* 78 | - /var/log/nginx/*error.log* 79 | suricata: 80 | - /var/log/*eve.json* 81 | system: 82 | - /var/log/*auth.log* 83 | - /var/log/syslog* 84 | - /var/log/btmp* 85 | - /var/log/wtmp* 86 | - /var/log/utmp* 87 | tomcat: 88 | - /var/log/tomcat6/*catalina* 89 | 90 | plaso_parsers: 91 | - "!mft" 92 | - "!usnjrnl" 93 | - "!filestat" 94 | 95 | -------------------------------------------------------------------------------- /src/thirdparty/ParsePrefetch.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | import pyscca 4 | 5 | 6 | class ParsePrefetch: 7 | """ 8 | Class parse prefetch 9 | """ 10 | 11 | def __init__(self, prefetch: Path, output: Path, logger) -> None: 12 | """ 13 | The constructor for PrefetchParser class. 14 | """ 15 | self.prefetch = prefetch 16 | self.result = output 17 | self.logger = logger 18 | 19 | def parse_file(self, pf_file: Path): 20 | try: 21 | output = dict() 22 | scca = pyscca.open(str(pf_file)) 23 | last_run_times = [] 24 | for x in range(8): 25 | if scca.get_last_run_time_as_integer(x) > 0: 26 | last_run_times.append( 27 | scca.get_last_run_time(x).strftime("%Y-%m-%d %H:%M:%S") 28 | ) 29 | else: 30 | last_run_times.append("N/A") 31 | output["executable"] = str(scca.executable_filename) 32 | output["run_count"] = str(scca.run_count) 33 | output["hash"] = str(scca.prefetch_hash) 34 | output["last_runs"] = last_run_times 35 | output["number_of_volumes"] = scca.number_of_volumes 36 | volumes = [] 37 | for i in range(scca.number_of_volumes): 38 | volume = { 39 | "path": str(scca.get_volume_information(i).device_path), 40 | "creation_time": scca.get_volume_information( 41 | i 42 | ).creation_time.strftime("%Y-%m-%d %H:%M:%S"), 43 | "seriel_number": format( 44 | scca.get_volume_information(i).serial_number, "x" 45 | ).upper(), 46 | } 47 | volumes.append(volume) 48 | output["volumes"] = volumes 49 | output["files"] = list() 50 | for _i in scca.filenames: 51 | output["files"].append(_i) 52 | except Exception as ex: 53 | self.logger.error(f"[parse_file] {ex}") 54 | finally: 55 | return output 56 | 57 | def write_results(self, json_data: dict, output_file: Path): 58 | with open(output_file, "w", encoding="utf-8") as jsonfile: 59 | json.dump(json_data, jsonfile, indent=4, default=str) 60 | 61 | def analyze(self): 62 | try: 63 | self.logger.info(f"[analyze] Processing prefetch {self.prefetch}") 64 | _res = self.parse_file(pf_file=self.prefetch) 65 | self.write_results(json_data=_res, output_file=self.result) 66 | except Exception as ex: 67 | self.logger.error(f"[analyze] --- {ex}") 68 | -------------------------------------------------------------------------------- /src/thirdparty/trashparse/Trash.py: -------------------------------------------------------------------------------- 1 | from __future__ import unicode_literals 2 | from ._helper import bytes_to_long 3 | from unidecode import unidecode 4 | import os 5 | 6 | 7 | class Trash(object): 8 | def __init__(self, path): 9 | self.handle = open(path, "rb") 10 | self.data = self.handle.read() 11 | 12 | @property 13 | def name(self): 14 | return self.handle.name 15 | 16 | @property 17 | def dirname(self): 18 | return os.path.dirname(self.name) 19 | 20 | @property 21 | def basename(self): 22 | return os.path.basename(self.name) 23 | 24 | @property 25 | def content(self): 26 | path = os.path.join(self.dirname, "$R" + self.basename[2:]) 27 | 28 | with open(path) as handle: 29 | return handle.read() 30 | 31 | 32 | class TrashInfo(Trash): 33 | @property 34 | def index_type(self): 35 | return "$I" 36 | 37 | @property 38 | def version(self): 39 | if bytes_to_long(self.data[:4]) == 2: 40 | return "Win 10" 41 | 42 | return "Win Vista-8.1" 43 | 44 | @property 45 | def filesize(self): 46 | return bytes_to_long(self.data[8:16]) 47 | 48 | @property 49 | def deleted_time(self): 50 | return bytes_to_long(self.data[16:24]) 51 | 52 | @property 53 | def original_path_length(self): 54 | if self.version == "Win 10": 55 | return bytes_to_long(self.data[24:28]) 56 | 57 | @property 58 | def original_path(self): 59 | if self.version == "Win 10": 60 | pathname = self.data[28:] 61 | else: 62 | pathname = self.data[24:] 63 | 64 | try: 65 | pathname = pathname[::2][:-1].decode() 66 | except UnicodeError: 67 | pathname = pathname[::2][:-1].decode("ISO-8859-1") 68 | 69 | return unidecode(pathname).strip("\x00") 70 | 71 | @property 72 | def original_name(self): 73 | return os.path.basename(self.original_path.replace("\\", "/")) 74 | 75 | @property 76 | def extension(self): 77 | return os.path.splitext(self.original_name)[-1] 78 | 79 | @property 80 | def type(self): 81 | if self.extension or self.filesize > 0: 82 | return "file" 83 | return "dir" 84 | 85 | 86 | class TrashInfo2(Trash): 87 | pass 88 | 89 | 90 | def identify_type(name): 91 | assert name.startswith("$I") or name.startswith( 92 | "INFO2" 93 | ), "cannot identify artifact file '%s'" % (name) 94 | 95 | return "$I" if name.startswith("$I") else "INFO2" 96 | 97 | 98 | def inspect(path): 99 | basename = os.path.basename(path) 100 | identified_type = identify_type(basename) 101 | 102 | return TrashInfo(path) 103 | -------------------------------------------------------------------------------- /src/web/static/css/select.dataTables.css: -------------------------------------------------------------------------------- 1 | table.dataTable > tbody > tr > .selected { 2 | background-color: rgba(13, 110, 253, 0.9); 3 | color: white; 4 | } 5 | table.dataTable > tbody > tr > .dt-select { 6 | text-align: center; 7 | vertical-align: middle; 8 | } 9 | table.dataTable > thead > tr > .dt-select { 10 | text-align: center; 11 | } 12 | table.dataTable input.dt-select-checkbox { 13 | appearance: none; 14 | position: relative; 15 | display: inline-block; 16 | width: 12px; 17 | height: 12px; 18 | border: 1px solid; 19 | border-radius: 3px; 20 | vertical-align: middle; 21 | margin-top: 1px; 22 | color: inherit; 23 | font-size: 20px; 24 | line-height: 1em; 25 | } 26 | table.dataTable input.dt-select-checkbox:checked:after { 27 | display: block; 28 | content: "✓"; 29 | margin-top: -8px; 30 | } 31 | table.dataTable input.dt-select-checkbox:indeterminate:after { 32 | display: block; 33 | position: absolute; 34 | content: " "; 35 | top: 3px; 36 | left: 3px; 37 | height: 4px; 38 | width: 4px; 39 | background-color: black; 40 | border-radius: 2px; 41 | } 42 | table.dataTable > tbody > tr.selected input.dt-select-checkbox:checked { 43 | border: 1px solid; 44 | } 45 | table.dataTable > tbody > tr > td.select-checkbox, 46 | table.dataTable > tbody > tr > th.select-checkbox { 47 | position: relative; 48 | } 49 | table.dataTable > tbody > tr > td.select-checkbox:before, 50 | table.dataTable > tbody > tr > th.select-checkbox:before { 51 | display: block; 52 | position: absolute; 53 | top: 50%; 54 | left: 50%; 55 | width: 12px; 56 | height: 12px; 57 | box-sizing: border-box; 58 | content: " "; 59 | margin-top: -6px; 60 | margin-left: -6px; 61 | border: 1px solid; 62 | border-radius: 3px; 63 | } 64 | table.dataTable > tbody > tr.selected > td.select-checkbox:before, 65 | table.dataTable > tbody > tr.selected > th.select-checkbox:before { 66 | border: 1px solid; 67 | content: "✓"; 68 | font-size: 20px; 69 | line-height: 6px; 70 | text-align: center; 71 | } 72 | table.dataTable.compact > tbody > tr > td.select-checkbox:before, 73 | table.dataTable.compact > tbody > tr > th.select-checkbox:before { 74 | margin-top: -12px; 75 | } 76 | table.dataTable.compact > tbody > tr.selected > td.select-checkbox:after, 77 | table.dataTable.compact > tbody > tr.selected > th.select-checkbox:after { 78 | margin-top: -16px; 79 | } 80 | 81 | div.dt-container span.select-info, 82 | div.dt-container span.select-item { 83 | margin-left: 0.5em; 84 | } 85 | 86 | html.dark table.dataTable input.dt-select-checkbox:indeterminate:after, 87 | html[data-bs-theme=dark] table.dataTable input.dt-select-checkbox:indeterminate:after { 88 | background-color: white; 89 | } 90 | 91 | @media screen and (max-width: 640px) { 92 | div.dt-container span.select-info, 93 | div.dt-container span.select-item { 94 | margin-left: 0; 95 | display: block; 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /src/plugins/adtimeline.py: -------------------------------------------------------------------------------- 1 | import os 2 | from src.thirdparty import triageutils as triageutils 3 | from src import BasePlugin, Status 4 | 5 | 6 | class Plugin(BasePlugin): 7 | """ 8 | ADTimeline plugin pour triage 9 | """ 10 | 11 | def __init__(self, conf: dict): 12 | super().__init__(config=conf) 13 | self.adtimeline_csv_file = os.path.join( 14 | self.upload_dir, conf["archive"]["name"] 15 | ) 16 | self.adtimeline_dir = os.path.join(self.upload_dir, self.hostname, "ADTimeline") 17 | self.adtimeline_json_file = os.path.join( 18 | self.adtimeline_dir, f"ADTimeline_{self.clientname}.json" 19 | ) 20 | triageutils.create_directory_path(path=self.adtimeline_dir, logger=self.logger) 21 | self.config["general"]["extracted_zip"] = f"{self.adtimeline_dir}" 22 | self.update_config_file(data=self.config) 23 | 24 | @triageutils.LOG 25 | def send_to_elk(self, json_data: list = [], logger=None): 26 | """Fonction qui envoie les résultats ADTimeline vers ELK""" 27 | try: 28 | ip = self.logstash_url 29 | if ip.startswith("http"): 30 | ip = self.logstash_url.split("//")[1] 31 | extrafields = dict() 32 | extrafields["csirt"] = dict() 33 | extrafields["csirt"]["client"] = self.clientname.lower() 34 | extrafields["csirt"]["application"] = "adtimeline" 35 | extrafields["csirt"]["hostname"] = self.hostname.lower() 36 | triageutils.send_data_to_elk( 37 | data=json_data, 38 | ip=ip, 39 | port=self.adtimeline_port, 40 | logger=self.logger, 41 | extrafields=extrafields, 42 | ) 43 | except Exception as e: 44 | self.error(f"[send_to_elk] {str(e)}") 45 | raise e 46 | 47 | @triageutils.LOG 48 | def run(self, logger=None): 49 | """Fonction principale qui exécute l'export ADTimeline vers ELK 50 | 51 | Args: 52 | 53 | Returns: 54 | 55 | """ 56 | try: 57 | self.update_workflow_status(plugin="adtimeline", status=Status.STARTED) 58 | extrafields = dict() 59 | extrafields["csirt"] = dict() 60 | extrafields["csirt"]["client"] = self.clientname.lower() 61 | extrafields["csirt"]["application"] = "adtimeline" 62 | res = triageutils.csv_to_json( 63 | csvFilePath=self.adtimeline_csv_file, 64 | jsonFilePath=self.adtimeline_json_file, 65 | writeToFile=True, 66 | extrafields=extrafields, 67 | logger=self.logger, 68 | ) 69 | self.send_to_elk(json_data=res, logger=self.logger) 70 | self.update_workflow_status(plugin="adtimeline", status=Status.FINISHED) 71 | except Exception as ex: 72 | self.error(f"[ADTimeline] run {str(ex)}") 73 | self.update_workflow_status(plugin="adtimeline", status=Status.ERROR) 74 | raise ex 75 | finally: 76 | self.info("[ADTimeline] End processing") 77 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiocsv==1.3.2 2 | aiofiles==24.1.0 3 | altair==5.5.0 4 | amqp==5.3.1 5 | anyio==4.9.0 6 | asgiref==3.9.1 7 | async-property==0.2.2 8 | async-timeout==5.0.1 9 | attrs==25.3.0 10 | beautifulsoup4==4.13.4 11 | billiard==4.2.1 12 | black==25.1.0 13 | blinker==1.9.0 14 | Brotli==1.2.0 15 | bs4==0.0.2 16 | build==1.2.2.post1 17 | cachetools==5.5.2 18 | celery==5.5.3 19 | certifi==2025.7.14 20 | cffi==1.17.1 21 | chardet==5.2.0 22 | charset-normalizer==3.4.2 23 | click==8.2.1 24 | click-didyoumean==0.3.1 25 | click-plugins==1.1.1.2 26 | click-repl==0.3.0 27 | construct==2.10.70 28 | cryptography==45.0.5 29 | deprecation==2.1.0 30 | docker==7.1.0 31 | elasticsearch==8.19.0 32 | entrypoints==0.4 33 | enum-compat==0.0.3 34 | Flask==3.1.1 35 | Flask-Login==0.6.3 36 | Flask-WTF==1.2.2 37 | flower==2.0.1 38 | frozenlist==1.7.0 39 | google-auth==2.40.3 40 | google-auth-oauthlib==1.2.2 41 | h11==0.16.0 42 | hexdump==3.3 43 | httpcore==1.0.9 44 | httpx==0.28.1 45 | humanize==4.12.3 46 | idna==3.10 47 | importlib_resources==6.5.2 48 | inflate64==1.0.3 49 | inflection==0.5.1 50 | inotify==0.2.12 51 | itsdangerous==2.2.0 52 | Jinja2==3.1.6 53 | jsonschema==4.25.0 54 | jsonschema-specifications==2025.4.1 55 | jwcrypto==1.5.6 56 | kombu==5.5.4 57 | libesedb-python==20240420 58 | libfwps-python==20240417 59 | libfwsi-python==20240423 60 | libpff-python==20231205 61 | libscca-python==20250915 62 | LnkParse3==1.5.2 63 | lxml==6.0.0 64 | mailbox==0.4 65 | markdown-it-py==3.0.0 66 | MarkupSafe==3.0.2 67 | mdurl==0.1.2 68 | multidict==6.6.3 69 | multivolumefile==0.2.3 70 | mypy_extensions==1.1.0 71 | narwhals==2.0.1 72 | nest-asyncio==1.6.0 73 | networkx==3.5 74 | nose==1.3.7 75 | numpy==2.3.2 76 | oauthlib==3.3.1 77 | olefile==0.47 78 | packaging==25.0 79 | pandas==2.3.1 80 | pathspec==0.12.1 81 | pip_search==0.0.14 82 | platformdirs==4.3.8 83 | prometheus_client==0.22.1 84 | prompt_toolkit==3.0.51 85 | propcache==0.3.2 86 | psutil==7.0.0 87 | py7zr==1.0.0 88 | pyasn1==0.6.1 89 | pyasn1_modules==0.4.2 90 | pybcj==1.0.6 91 | pycparser==2.22 92 | pycryptodome==3.23.0 93 | pycryptodomex==3.23.0 94 | Pygments==2.19.2 95 | PyJWT==2.10.1 96 | pylnk3==0.4.3 97 | pyppmd==1.2.0 98 | pyproject_hooks==1.2.0 99 | pyrsistent==0.20.0 100 | python-dateutil==2.9.0.post0 101 | python-evtx==0.8.1 102 | python-keycloak==5.7.0 103 | python-magic==0.4.27 104 | python-registry==1.3.1 105 | python-slugify==8.0.4 106 | pytsk3==20250729 107 | pytz==2025.2 108 | PyYAML==6.0.2 109 | pyzipper==0.3.6 110 | pyzstd==0.17.0 111 | redis==6.2.0 112 | referencing==0.36.2 113 | regipy==5.2.0 114 | regrippy==2.0.2 115 | requests==2.32.4 116 | requests-oauthlib==2.0.0 117 | requests-toolbelt==1.0.0 118 | rich==14.1.0 119 | rpds-py==0.26.0 120 | rsa==4.9.1 121 | six==1.17.0 122 | sniffio==1.3.1 123 | soupsieve==2.7 124 | tabulate==0.9.0 125 | text-unidecode==1.3 126 | texttable==1.7.0 127 | timesketch-api-client==20250521 128 | timesketch-import-client==20241009 129 | tomli==2.2.1 130 | toolz==1.0.0 131 | tornado==6.5.1 132 | typing_extensions==4.14.1 133 | tzdata==2025.2 134 | ujson==5.10.0 135 | unicodecsv==0.14.1 136 | Unidecode==1.4.0 137 | urllib3==2.6.0 138 | vine==5.1.0 139 | wcwidth==0.2.13 140 | websocket-client==1.8.0 141 | Werkzeug==3.1.4 142 | xlrd==2.0.2 143 | xmltodict==0.14.2 144 | yarl==1.20.1 145 | zipfile-deflate64==0.2.0 -------------------------------------------------------------------------------- /src/thirdparty/trashparse/ParseTrash.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import division, unicode_literals 3 | 4 | from collections import OrderedDict 5 | from ._util import approximate_date 6 | from .Trash import inspect 7 | from glob import glob 8 | from pathlib import Path 9 | from logging import Logger 10 | import os, csv, json 11 | 12 | FORMAT = { 13 | "csv": "get_csv_string", 14 | "json": "get_json_string", 15 | "html": "get_html_string", 16 | } 17 | 18 | 19 | class TrashParse(object): 20 | def __init__(self, recyclebin_folder: Path, logger: Logger): 21 | self.files = list() 22 | self.path = recyclebin_folder 23 | self._results = OrderedDict() 24 | self._logger = logger 25 | 26 | def listfile(self): 27 | try: 28 | self.files.extend( 29 | glob(os.path.join(self.path, "INFO2*[! ]")) 30 | + glob(os.path.join(self.path, "$I*[! ]")) 31 | ) 32 | except Exception as ex: 33 | self._logger.error(f"[listfile] {ex}") 34 | self.files = list() 35 | 36 | def parsefile(self) -> OrderedDict: 37 | try: 38 | for file in self.files: 39 | fileinfo = inspect(file) 40 | filename = fileinfo.basename 41 | 42 | if fileinfo.index_type == "INFO2": 43 | continue # Not implemented yet 44 | else: 45 | self._results[filename] = fileinfo 46 | return self._results 47 | except Exception as ex: 48 | self._logger.error(f"[parsefile] {ex}") 49 | self._results = OrderedDict() 50 | return self._results 51 | 52 | def write_csv(self, csv_file: Path): 53 | try: 54 | # delete_file(src=csv_file, logger=self._logger) 55 | _data = list() 56 | for name, fileinfo in self._results.items(): 57 | _row = dict() 58 | _row["name"] = name 59 | _row["DeletedTime"] = approximate_date(fileinfo.deleted_time) 60 | _row["filesize"] = fileinfo.filesize 61 | _row["type"] = fileinfo.type 62 | _row["version"] = fileinfo.version 63 | _row["original_path"] = fileinfo.original_path 64 | _data.append(_row) 65 | with open(csv_file, "w", newline="") as csvfile: 66 | fieldnames = [ 67 | "name", 68 | "DeletedTime", 69 | "filesize", 70 | "type", 71 | "version", 72 | "original_path", 73 | ] 74 | writer = csv.DictWriter(csvfile, fieldnames=fieldnames) 75 | writer.writeheader() 76 | writer.writerows(_data) 77 | except Exception as ex: 78 | self._logger.error(f"[write_csv] {ex}") 79 | 80 | def write_jsonl(self, jsonl_file: Path): 81 | try: 82 | # delete_file(src=jsonl_file, logger=self._logger) 83 | _data = list() 84 | for name, fileinfo in self._results.items(): 85 | _row = dict() 86 | _row["name"] = name 87 | _row["datetime"] = approximate_date(fileinfo.deleted_time) 88 | _row["filesize"] = fileinfo.filesize 89 | _row["type"] = fileinfo.type 90 | _row["version"] = fileinfo.version 91 | _row["original_path"] = fileinfo.original_path 92 | _data.append(_row) 93 | with open(jsonl_file, "w", encoding="utf-8") as jsonf: 94 | for _entry in _data: 95 | json.dump(_entry, jsonf) 96 | jsonf.write("\n") 97 | except Exception as ex: 98 | self._logger.error(f"[write_jsonl] {ex}") 99 | -------------------------------------------------------------------------------- /src/web/static/js/dataTables.bootstrap5.js: -------------------------------------------------------------------------------- 1 | /*! DataTables Bootstrap 5 integration 2 | * 2020 SpryMedia Ltd - datatables.net/license 3 | */ 4 | 5 | (function( factory ){ 6 | if ( typeof define === 'function' && define.amd ) { 7 | // AMD 8 | define( ['jquery', 'datatables.net'], function ( $ ) { 9 | return factory( $, window, document ); 10 | } ); 11 | } 12 | else if ( typeof exports === 'object' ) { 13 | // CommonJS 14 | var jq = require('jquery'); 15 | var cjsRequires = function (root, $) { 16 | if ( ! $.fn.dataTable ) { 17 | require('datatables.net')(root, $); 18 | } 19 | }; 20 | 21 | if (typeof window === 'undefined') { 22 | module.exports = function (root, $) { 23 | if ( ! root ) { 24 | // CommonJS environments without a window global must pass a 25 | // root. This will give an error otherwise 26 | root = window; 27 | } 28 | 29 | if ( ! $ ) { 30 | $ = jq( root ); 31 | } 32 | 33 | cjsRequires( root, $ ); 34 | return factory( $, root, root.document ); 35 | }; 36 | } 37 | else { 38 | cjsRequires( window, jq ); 39 | module.exports = factory( jq, window, window.document ); 40 | } 41 | } 42 | else { 43 | // Browser 44 | factory( jQuery, window, document ); 45 | } 46 | }(function( $, window, document ) { 47 | 'use strict'; 48 | var DataTable = $.fn.dataTable; 49 | 50 | 51 | 52 | /** 53 | * DataTables integration for Bootstrap 5. This requires Bootstrap 5 and 54 | * DataTables 2 or newer. 55 | * 56 | * This file sets the defaults and adds options to DataTables to style its 57 | * controls using Bootstrap. See https://datatables.net/manual/styling/bootstrap 58 | * for further information. 59 | */ 60 | 61 | /* Set the defaults for DataTables initialisation */ 62 | $.extend( true, DataTable.defaults, { 63 | renderer: 'bootstrap' 64 | } ); 65 | 66 | 67 | /* Default class modification */ 68 | $.extend( true, DataTable.ext.classes, { 69 | container: "dt-container dt-bootstrap5", 70 | search: { 71 | input: "form-control form-control-sm" 72 | }, 73 | length: { 74 | select: "form-select form-select-sm" 75 | }, 76 | processing: { 77 | container: "dt-processing card" 78 | } 79 | } ); 80 | 81 | 82 | /* Bootstrap paging button renderer */ 83 | DataTable.ext.renderer.pagingButton.bootstrap = function (settings, buttonType, content, active, disabled) { 84 | var btnClasses = ['dt-paging-button', 'page-item']; 85 | 86 | if (active) { 87 | btnClasses.push('active'); 88 | } 89 | 90 | if (disabled) { 91 | btnClasses.push('disabled') 92 | } 93 | 94 | var li = $('
  • ').addClass(btnClasses.join(' ')); 95 | var a = $('', { 96 | 'href': disabled ? null : '#', 97 | 'class': 'page-link' 98 | }) 99 | .html(content) 100 | .appendTo(li); 101 | 102 | return { 103 | display: li, 104 | clicker: a 105 | }; 106 | }; 107 | 108 | DataTable.ext.renderer.pagingContainer.bootstrap = function (settings, buttonEls) { 109 | return $('