├── requirements.txt ├── ttps.csv ├── .vscode ├── settings.json └── launch.json ├── modules ├── reporting.py ├── configuration.py ├── sql │ ├── sqlite_model.py │ └── sqlite_func.py ├── ttp.py ├── utils.py └── parser │ ├── br_parser.py │ ├── oc2_parser.py │ └── cs_parser.py ├── README.md ├── .gitignore ├── config_template.yml ├── gimmelogs.py └── LICENSE /requirements.txt: -------------------------------------------------------------------------------- 1 | sqlalchemy 2 | pyyaml -------------------------------------------------------------------------------- /ttps.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Patrick-DE/C2-logparser/HEAD/ttps.csv -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.analysis.extraPaths": [ 3 | "./modules", 4 | "./modules/sql" 5 | ] 6 | } -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python: Dbg-GenDB", 9 | "type": "debugpy", 10 | "request": "launch", 11 | "program": "${workspaceFolder}/gimmelogs.py", 12 | "console": "integratedTerminal", 13 | "args":["-w 1", "-l ./", "-c config.yml", "-m"] 14 | }, 15 | { 16 | "name": "Python: Dbg-Report", 17 | "type": "debugpy", 18 | "request": "launch", 19 | "program": "${workspaceFolder}/gimmelogs.py", 20 | "console": "integratedTerminal", 21 | "args":["-w 1", "-p C:\\Users\\patri\\Downloads\\logs\\cs", "-c config.yml", "-m"] 22 | }, 23 | { 24 | "name": "Python: MultiThread", 25 | "type": "debugpy", 26 | "request": "launch", 27 | "program": "${workspaceFolder}/gimmelogs.py", 28 | "console": "integratedTerminal", 29 | "args": ["-w 16", "-p ./", "-c config.yml", "-m"] 30 | }, 31 | { 32 | "name": "Python: CS DEBUG", 33 | "type": "debugpy", 34 | "request": "launch", 35 | "program": "${workspaceFolder}/gimmelogs.py", 36 | "console": "integratedTerminal", 37 | "args": ["-x cs", "-w 1", "-m", "-l C:\\Users\\patri\\Downloads\\logs\\cs\\sc5", "-c config.yml"] 38 | }, 39 | { 40 | "name": "Python: BR DEBUG", 41 | "type": "debugpy", 42 | "request": "launch", 43 | "program": "${workspaceFolder}/gimmelogs.py", 44 | "console": "integratedTerminal", 45 | "args": ["-x br", "-w 1", "-m", "-l C:\\Users\\patri\\Downloads\\logs\\br", "-c config.yml"] 46 | }, 47 | ] 48 | } -------------------------------------------------------------------------------- /modules/reporting.py: -------------------------------------------------------------------------------- 1 | from modules.sql.sqlite_func import * 2 | from modules.utils import * 3 | from modules.ttp import * 4 | 5 | def sort_on_timestamp(elem: Entry): 6 | return elem.timestamp 7 | 8 | def sort_on_joined(elem: Beacon): 9 | return elem.joined 10 | 11 | def report_input_task(output): 12 | # input report 13 | entries = get_all_entries_filtered(filter=EntryType.input) 14 | entries = entries + get_all_entries_filtered(filter=EntryType.task) 15 | entries.sort(key=sort_on_timestamp) 16 | rows = [] 17 | for entry in entries: 18 | rows.append(entry.to_row()) 19 | header = ["Date", "Time", "Hostname", "Command", "User", "IP"] 20 | write_to_csv(os.path.join(output,"activity-report.csv"), header, rows) 21 | 22 | def report_dl_ul(output): 23 | """ 24 | get download and upload report 25 | """ 26 | entries = get_all_entries_filtered_containing(filter=EntryType.task, cont="Tasked beacon to download") 27 | entries = entries + get_upload_entries() 28 | entries.sort(key=sort_on_timestamp) 29 | rows = [] 30 | for entry in entries: 31 | rows.append(entry.to_row(redacting=False)) 32 | header = ["Date", "Time", "Hostname", "File", "User", "IP"] 33 | write_to_csv(os.path.join(output,"dl-ul-report.csv"), header, rows) 34 | 35 | def report_all_beacons_spawned(output): 36 | beacons = get_all_valid_beacons() 37 | beacons.sort(key=sort_on_joined) 38 | rows = [] 39 | for beacon in beacons: 40 | rows.append(beacon.to_row()) 41 | header = ["Hostname", "IP", "Internet via IP", "User", "Process", "Process ID", "Joined", "Exited"] 42 | write_to_csv(os.path.join(output,"beacon-report.csv"), header, rows) 43 | 44 | def report_all_indicators(output): 45 | """ 46 | get download and upload report 47 | """ 48 | entries = get_all_entries_filtered(filter=EntryType.indicator) 49 | entries.sort(key=sort_on_timestamp) 50 | rows = [] 51 | for entry in entries: 52 | rows.append(entry.to_row(redacting=False)) 53 | header = ["Date", "Time", "Hostname", "File", "User", "IP"] 54 | write_to_csv(os.path.join(output,"ioc-report.csv"), header, rows) 55 | 56 | 57 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # C2 Log-Parser 2 | ## Support for Cobalt Strike and Brute Ratel 3 | 4 | ## Usage 5 | Quick usage: 6 | ``` 7 | python3 gimmelogs.py -l 8 | ``` 9 | Recommended usage: 10 | ``` 11 | python3 gimmelogs.py -l -c config.yml -m 12 | ``` 13 | 1. Download the CobaltStrike "logs" folder to disk and specify this folder as -l logs. 14 | 2. For cleaner reports choose -m 15 | 3. If you are testing your payloads exclude them via the config -c 16 | 4. Specify the -p PATH to generate the reports and DB into a custom folder 17 | 18 | ## Commands 19 | ``` 20 | Parse CobaltStrike logs and store them in a DB to create reports 21 | 22 | optional arguments: 23 | -h, --help show this help message and exit 24 | -w WORKER, --worker WORKER Set amount of workers: default=10 25 | -v, --verbose Activate debugging 26 | -l LOGS, --logs LOGS Directory path to start crawling the logs 27 | -p PATH, --path PATH Output path for the reports and DB 28 | -m, --minimize Remove unnecessary data: keyloggs,beaconbot,sleep,exit,clear 29 | -c CONFIG, --config CONFIG A config file, see config_template.yml 30 | -x PARSER, --parser Select either "cs" (default) or "br" 31 | ``` 32 | ## Reporting 33 | * Report for input and tasks being issued via CobaltStrike 34 | * Contains INPUT (operator input) and TASK (cna + response from input) 35 | * Report for downloaded and uploaded files 36 | * Contains download.log, INDICATOR (hash and filename) and entries containing the following keyphrases: 37 | * Uploading beaconloader: 38 | * Uploading payload file: 39 | * Tasked beacon to upload 40 | * Not really pretty right now 🤷‍♂️ 41 | * Report of the valid beacons. They have the following set: 42 | * Beacon.hostname 43 | * Beacon.joined 44 | 45 | 46 | ## Remarks 47 | * Only beacons with input or tasks are being listed to allow the report to focus on actual actions instead of an complete picture. As a result, beacons which will just be spawned due to persistence and not be used will be ignored. 48 | * Beacons which have not been used (no metadata), thus listed under the unknown folder will be ignored 49 | * Beacons without associated IDs, usually happens from broken .cna scripts will be ignored 50 | 51 | ## Todos 52 | ✔ Make it work 😂 53 | ✔ No support for linux as of now :( 54 | ❌ Create cleaner download / upload report 55 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | #custom 2 | logs/ 3 | log.db 4 | results/ 5 | config.yml 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | pip-wheel-metadata/ 30 | share/python-wheels/ 31 | *.egg-info/ 32 | .installed.cfg 33 | *.egg 34 | MANIFEST 35 | 36 | # PyInstaller 37 | # Usually these files are written by a python script from a template 38 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 39 | *.manifest 40 | *.spec 41 | 42 | # Installer logs 43 | pip-log.txt 44 | pip-delete-this-directory.txt 45 | 46 | # Unit test / coverage reports 47 | htmlcov/ 48 | .tox/ 49 | .nox/ 50 | .coverage 51 | .coverage.* 52 | .cache 53 | nosetests.xml 54 | coverage.xml 55 | *.cover 56 | *.py,cover 57 | .hypothesis/ 58 | .pytest_cache/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | gimmelogs.py 137 | -------------------------------------------------------------------------------- /modules/configuration.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from enum import Enum 3 | from typing import List, Dict 4 | import yaml 5 | import ipaddress 6 | 7 | @dataclass 8 | class Flags: 9 | case_insensitive: bool = True 10 | replacement: str = r"\1[REDACTED]" 11 | 12 | @dataclass 13 | class Pattern: 14 | pattern: str 15 | description: str 16 | 17 | @dataclass 18 | class Redactions: 19 | patterns: Dict[str, Pattern] 20 | flags: Flags 21 | 22 | @dataclass 23 | class AndCommand: 24 | _and: List[str] 25 | 26 | @dataclass 27 | class RegexCommand: 28 | _regex: List[str] 29 | 30 | @dataclass 31 | class Exclusions: 32 | internal: List[str] 33 | external: List[str] 34 | hostnames: List[str] 35 | commands: List[str|AndCommand|RegexCommand] 36 | 37 | @dataclass 38 | class Config: 39 | exclusions: Exclusions 40 | redactions: Redactions 41 | 42 | # Initialize the global config with empty placeholders 43 | config: Config = None 44 | 45 | 46 | def load_config(filename: str) -> Config: 47 | """Load and process configuration from YAML file""" 48 | global config 49 | with open(filename) as f: 50 | try: 51 | data = yaml.safe_load(f) 52 | except yaml.YAMLError as e: 53 | print(f"[!] Failed to load configuration: {e}") 54 | config = None 55 | return 56 | 57 | config = Config( 58 | exclusions=Exclusions(**data['exclusions']), 59 | redactions=Redactions( 60 | patterns={k: Pattern(**v) for k, v in data['redactions']['patterns'].items()}, 61 | flags=Flags(**data['redactions']['flags']) 62 | ) 63 | ) 64 | return config 65 | 66 | 67 | def reload_config(filename: str) -> Config: 68 | """Reload configuration from file""" 69 | return load_config(filename) 70 | 71 | def get_config() -> Config: 72 | """Get the current configuration""" 73 | return config 74 | 75 | def is_ip_excluded(ip: str, excluded_ranges: List[str]) -> bool: 76 | """ 77 | Check if IP is in any excluded range 78 | Args: 79 | ip: String IP address to check 80 | excluded_ranges: List of CIDR ranges as strings 81 | Returns: 82 | bool: True if IP is in any excluded range 83 | """ 84 | # if SMB beacons are used, we dont have an IP 85 | if "beacon_" in ip: return False 86 | try: 87 | for range in excluded_ranges: 88 | if ipaddress.ip_address(ip) in ipaddress.ip_network(range): 89 | return True 90 | return False 91 | except ValueError as ex: 92 | print("[!] Invalid IP:" + ip + " : " + ex.strerror) 93 | return False -------------------------------------------------------------------------------- /config_template.yml: -------------------------------------------------------------------------------- 1 | exclusions: 2 | internal: [] 3 | external: [] 4 | hostnames: [] 5 | commands: 6 | - _regex: 7 | - 'sleep \d+\s*\d*' 8 | - '^\s*exit\s*$' 9 | - 'beacon to exit' 10 | - 'beacon to sleep' 11 | - 'beacon to list' 12 | - 'beacon to back' 13 | - 'to become interactive' 14 | - 'beacon queue' 15 | - 'set_edr' 16 | - 'list_edrs' 17 | - 'rating' 18 | - 'list_SAFE_COMMANDS' 19 | - 'save_edr_data' 20 | - 'load_edr_data' 21 | - 'clear' 22 | - _and: 23 | - 'jobs' 24 | - 'input' 25 | - 'jobkill' 26 | - 'cancel' 27 | - 'received keystrokes' 28 | - '' 29 | - 'beacon is late' 30 | - 'received screenshot' 31 | - 'note' 32 | - 'checkin' 33 | - 'helpx' 34 | redactions: 35 | patterns: 36 | password_params: 37 | pattern: '((?:\/|-+|\s)(?:p|pass|password|pvk)\s*(?:=|\s|:)\s*)\S+' 38 | description: 'Matches password parameters and their values' 39 | 40 | hash_headers: 41 | pattern: '((?:NTLM|SHA1)\s+:\s)\b\w+\b' 42 | description: 'Matches NTLM and SHA1 hash headers' 43 | 44 | logon_passwords: 45 | pattern: '(\w+:\d+:)\w+:\w+:::' 46 | description: 'Matches logonpasswords output format' 47 | 48 | crypto_params: 49 | pattern: '(.*\/(?:aes256|rc4|statekey|ticket)\s*(?:=|:)\s*)(.*?)\s*$' 50 | description: 'Matches cryptographic parameters' 51 | 52 | make_token: 53 | pattern: '(make_token .*\s)(.*)' 54 | description: 'Matches make_token command and arguments' 55 | 56 | ntlm: 57 | pattern: '^(\\$NT\\$)?[a-f0-9]{32}$' 58 | description: 'Matches NTLM hash format' 59 | 60 | hex_strings: 61 | pattern: '\b([A-Fa-f0-9]{64}|[A-Fa-f0-9]{32})\b' 62 | description: 'Matches 32 and 64 bit hex strings' 63 | 64 | run_as: 65 | pattern: '(runas.*(\/user:)*\b\w+\b\s+)\S+' 66 | description: 'Redact runas [/user:] ' 67 | 68 | net_user: 69 | pattern: '(net\suser\s\b\w+\b\s)(.*?)\s' 70 | description: 'redact creating new user "net user nviso PW /add"' 71 | 72 | roadtoken: 73 | pattern: '(roadtoken\s)(\S+)' 74 | description: 'Redact sensitive roadtoken token' 75 | 76 | aadprt: 77 | pattern: '(aadprt\s)(\S+)' 78 | description: 'Redact sensitive aadprt token' 79 | 80 | encryptionKey: 81 | pattern: '(--encryptionKey\s)(\S+)' 82 | description: 'Redact encryptionKey' 83 | 84 | kerberos_ask: 85 | pattern: '(KerberosAsk\sasktgt\s/userName:\S+\s/domainName:\S+\s/userPassword:)(.*)' 86 | description: 'Redact sensitive KerberosAsk password, e.g., "KerberosAsk asktgt /userName:Administrator /domainName:ecorp.local /userPassword:Rain Documentation 3nglish $ymbol /ptt -> KerberosAsk asktgt /userName:Administrator /domainName:ecorp.local /userPassword:***** /ptt"' 87 | 88 | flags: 89 | case_insensitive: true 90 | replacement: '\1[REDACTED]' -------------------------------------------------------------------------------- /modules/sql/sqlite_model.py: -------------------------------------------------------------------------------- 1 | import re, enum 2 | from sqlalchemy import Column, DateTime, Integer, String, Enum, Table 3 | from sqlalchemy.orm import declarative_base, relationship 4 | from sqlalchemy.sql.schema import ForeignKey 5 | from modules.utils import excel_save, redact 6 | 7 | class EntryType(enum.Enum): 8 | metadata = 1 9 | input = 2 10 | task = 3 11 | checkin = 4 12 | output = 5 13 | note = 6 14 | error = 7 15 | indicator = 8 16 | job_registered = 9 17 | job_completed = 10 18 | # custom attributes 19 | download = 11 20 | upload = 12 21 | events = 13 22 | warning = 14 23 | event = 15 24 | # brute ratel 25 | http_request = 16 26 | http_log = 17 27 | access_denied = 18 28 | 29 | 30 | Base = declarative_base() 31 | 32 | class Beacon(Base): 33 | """ 34 | Table definition for the SQLite DB 35 | """ 36 | __tablename__ = "beacon" 37 | id = Column(Integer, primary_key= True) 38 | ip = Column(String, unique=False) 39 | ip_ext = Column(String, nullable=True) 40 | hostname = Column(String, nullable=True) 41 | user = Column(String, nullable=True) 42 | process = Column(String, nullable=True) 43 | pid = Column(Integer, nullable=True) 44 | os = Column(String, nullable=True) 45 | version = Column(String, nullable=True) 46 | build = Column(String, nullable=True) 47 | arch = Column(String, nullable=True) 48 | timestamp = Column(DateTime, nullable=False) 49 | timezone = Column(String) 50 | entries = relationship("Entry", back_populates="parent",lazy='joined', join_depth=1) 51 | # for OST 52 | uid_str = Column(String, nullable=True, unique=True, index=True) # For OST C2 beacon tracking 53 | 54 | @property 55 | def joined(self): 56 | if self.entries: 57 | return self.entries[0].timestamp 58 | return self.timestamp 59 | @property 60 | def exited(self): 61 | if self.entries: 62 | return self.entries[-1].timestamp 63 | return self.timestamp 64 | 65 | def is_high_integrity(self): 66 | if "*" in self.user: 67 | return True 68 | else: 69 | return False 70 | 71 | def to_row(self): 72 | return [self.hostname, \ 73 | self.ip, \ 74 | self.ip_ext, \ 75 | self.user, \ 76 | self.process, \ 77 | self.pid, \ 78 | self.joined, \ 79 | self.exited] 80 | 81 | 82 | class Entry(Base): 83 | """ 84 | Table definition for the SQLite DB 85 | """ 86 | __tablename__ = "entry" 87 | id = Column(Integer, primary_key = True) 88 | timestamp = Column(DateTime) 89 | timezone = Column(String) 90 | type = Column(Enum(EntryType)) 91 | operator = Column(String, nullable=True) 92 | ttp = Column(String, nullable=True) 93 | content = Column(String) 94 | parent_id = Column(Integer, ForeignKey('beacon.id')) 95 | parent = relationship("Beacon", back_populates="entries", lazy="joined", join_depth=1) 96 | # for OST 97 | task_uid = Column(String, nullable=True, index=True) # For OST C2 task tracking 98 | 99 | #def __getattribute__(self, item): 100 | # if item == "content" and self.type == EntryType.input: 101 | # return re.sub(r"\s*<.*>\s*(.*)", "", self.content).group(1) 102 | 103 | # def get_input(self): 104 | # if self.type == EntryType.input or self.type == EntryType.task: 105 | # return re.sub(r"\s*<.*?>\s*(.*)", r"\1", self.content) 106 | # else: 107 | # raise ValueError("This function can only be called with EntryType.input or EntryType.task") 108 | 109 | def to_row(self, redacting=True): 110 | hostname, user, ip = "","","" 111 | if redacting: 112 | content = excel_save(redact(self.content)) 113 | else: 114 | content = excel_save(self.content) 115 | 116 | date = self.timestamp.strftime("%d/%m/%y") 117 | time = self.timestamp.strftime("%H:%M") 118 | b = self.parent 119 | if b: 120 | hostname, user, ip = b.hostname, b.user, b.ip 121 | return [date, time, hostname, content, user, ip] 122 | 123 | 124 | # class Action(Base): 125 | # """ 126 | # Table definition for the SQLite DB 127 | # """ 128 | # __tablename__ = "action" 129 | # id = Column(Integer, primary_key = True) 130 | # input_id = Column(Integer, ForeignKey("entry.id")) 131 | # task_id = Column(Integer, ForeignKey("entry.id")) 132 | # output_id = Column(Integer, ForeignKey("entry.id")) 133 | -------------------------------------------------------------------------------- /gimmelogs.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | import argparse 3 | from concurrent import futures 4 | 5 | from modules.reporting import * 6 | from modules.parser.cs_parser import * 7 | from modules.configuration import get_config, load_config, config 8 | from modules.utils import get_all_files 9 | from modules.parser.cs_parser import CSLogParser 10 | from modules.parser.br_parser import BRLogParser 11 | 12 | 13 | """ 14 | TODO 15 | Map to every task an output 16 | Detect based on the next output if the command was successfull or not 17 | """ 18 | 19 | 20 | def run(args): 21 | global config 22 | start = time.time() 23 | 24 | if args.config: 25 | load_config(args.config) 26 | 27 | config = get_config() 28 | if config is None: 29 | log("No configuration loaded!", LogType.ERROR) 30 | exit(-1) 31 | 32 | parser = CSLogParser if args.parser == 'cs' else BRLogParser 33 | 34 | init_db(args.database, args.verbose) 35 | 36 | if args.logs: 37 | log_files = get_all_files(args.logs, ".log") 38 | with futures.ThreadPoolExecutor(max_workers=args.worker) as executor: 39 | result_futures = list(map(lambda file: executor.submit(parser.parse_beacon_log, file, args.database), log_files)) 40 | for idx, future in enumerate(futures.as_completed(result_futures)): 41 | printProgressBar(idx, len(result_futures), "Process logs") 42 | 43 | 44 | if args.minimize: 45 | remove_clutter() 46 | remove_via_ip(config.exclusions.external, True) 47 | remove_via_ip(config.exclusions.internal, False) 48 | remove_beacons_via_hostname(config.exclusions.hostnames) 49 | 50 | if args.output: 51 | report_input_task(args.output) 52 | report_dl_ul(args.output) 53 | report_all_beacons_spawned(args.output) 54 | report_all_indicators(args.output) 55 | report_tiber(args.output) 56 | 57 | print(time.time() - start) 58 | 59 | 60 | class ValidatePath(argparse.Action): 61 | def __call__(self, parser, namespace, values, option_string=None): 62 | npath = path.abspath(values.strip()) 63 | if not npath: 64 | return 65 | 66 | if not path.isdir(npath): 67 | os.mkdir(npath) 68 | #log(f"Please choose a valid path for {self.dest}!", "e") 69 | #exit(-1) 70 | 71 | setattr(namespace, self.dest, npath) 72 | 73 | class ValidateFile(argparse.Action): 74 | def __call__(self, parser, namespace, values, option_string=None): 75 | npath = path.abspath(values.strip()) 76 | if not npath: 77 | return 78 | 79 | if not path.isfile(npath): 80 | log(f"Please choose a valid file for {self.dest}!", "e") 81 | exit(-1) 82 | 83 | setattr(namespace, self.dest, npath) 84 | 85 | def strip_input(choice) -> str: 86 | return choice.strip() 87 | 88 | if __name__ == "__main__": 89 | curr_path = path.dirname(path.abspath(__file__)) 90 | 91 | parser = argparse.ArgumentParser(description='Parse CobaltStrike logs and store them in a DB to create reports') 92 | parser.add_argument('-w', '--worker', type=int, default=10, help='Set amount of workers: default=10') 93 | parser.add_argument('-v', '--verbose', action='store_true', help='Activate debugging') 94 | parser.add_argument('-l', '--logs', action=ValidatePath, help='Directory path containing the CS logs') 95 | parser.add_argument('-m', '--minimize', action='store_true', help='Remove unnecessary data: keyloggs,beaconbot,sleep,exit,clear') 96 | parser.add_argument('-p', '--path', action=ValidatePath, help='Database and reports path: default=') 97 | parser.add_argument('-c', '--config', required=True, action=ValidateFile, help='A file with one IP-Range per line which should be ignored') 98 | parser.add_argument('-x', '--parser', type=strip_input, default='cs', choices=['cs', 'br'], help='Choose the parser: default=cs') 99 | 100 | try: 101 | args = parser.parse_args() 102 | except SystemExit: 103 | parser.print_help(sys.stderr) 104 | exit() 105 | 106 | # either path and parser or database 107 | if (not args.logs and not args.path): 108 | parser.print_help(sys.stderr) 109 | log("-----Examples-----", LogType.WARNING) 110 | log("Recommended: python3 gimmelogs.py -l -c config.yml -m", LogType.WARNING) 111 | log("Minimum: python3 gimmelogs.py -l ", LogType.WARNING) 112 | log("Full: python3 gimmelogs.py -l -c config.yml -m -p -w 15", LogType.WARNING) 113 | log("Generate reports: python3 gimmelogs.py -p -c config.yml -m", LogType.WARNING) 114 | exit() 115 | 116 | if args.logs and not args.path: 117 | args.path = args.logs 118 | 119 | args.database = os.path.join(args.path, 'log.db') 120 | args.output = os.path.join(args.path, 'reports') 121 | if not path.isdir(args.output): 122 | os.mkdir(args.output) 123 | """TODO 124 | Reports: 125 | input -> done 126 | input - output 127 | file upload - download -> need to change type of upload tasks to upload 128 | """ 129 | if args.verbose: 130 | args.worker = 1 131 | 132 | run(args) 133 | -------------------------------------------------------------------------------- /modules/ttp.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | import string 3 | from typing import Dict, List 4 | from modules.sql.sqlite_model import * 5 | from modules.sql.sqlite_func import * 6 | from modules.utils import log, write_to_csv 7 | 8 | def sort_on_timestamp(elem: Entry): 9 | return elem.timestamp 10 | 11 | def sort_on_joined(elem: Beacon): 12 | return elem.joined 13 | 14 | class TTPSearcher(): 15 | path = "ttps.csv" 16 | seperator = ';' 17 | ready = False 18 | ttps :List[List] = [] 19 | 20 | def __init__(self): 21 | self.ready = self.verify_ttpfile() 22 | if self.ready: 23 | self.read_ttps() 24 | self.ttps.sort(key=lambda x: len(x[0]), reverse=True) 25 | 26 | 27 | def add_ttp(self, tiber :Dict) -> Dict: 28 | for arr in self.ttps: 29 | if len(arr) != 6: 30 | if '"\n' != arr[0]: 31 | log(f"The ttp entry (" + ";".join(arr) + ") is not correct!") 32 | return tiber 33 | try: 34 | if arr[0].lower() in tiber["Operational Guidance"].lower(): 35 | tiber["Phase"] = arr[1] 36 | tiber["Tactic"] = arr[2] 37 | tiber["Technique ID"] = arr[3] 38 | tiber["Technique Name"] = arr[4] 39 | tiber["Goal"] = arr[5] 40 | return tiber 41 | except Exception as e: 42 | log(f"Error while adding TTP from ttp.csv: {e}", "e") 43 | 44 | return tiber 45 | 46 | def read_ttps(self): 47 | a_file = open(self.path) 48 | for line in a_file: 49 | if line.startswith("#"): 50 | continue 51 | 52 | arr = line.split(self.seperator) 53 | self.ttps.append(arr) 54 | 55 | def verify_path(self): 56 | self.path = path.abspath(os.path.dirname(self.path)) 57 | if not self.path: 58 | log(f"Please choose a valid path for the TTP file: {self.path}!", "e") 59 | exit(-1) 60 | 61 | def verify_ttpfile(self): 62 | self.path = path.abspath(self.path.strip()) 63 | if not path.isfile(self.path): 64 | log(f"The TTP file could not be found: {self.path}!", "e") 65 | return False 66 | return True 67 | 68 | 69 | 70 | pre = [ 71 | # ["","Resource Development","T1583.001"," Acquire Infrastructure: Domains","N/A","Registration of for Scenario ","Obtain domain for phishing","Success"], 72 | # ["","Resource Development","T1585.001","Establish Accounts: Social Media Accounts","N/A","The LinkedIn persona sent connection requests to a number of employees","Create a trust relationship","Only 2 people accepted"], 73 | # ["","Resource Development","T1328"," Acquire Infrastructure: Domains","N/A","Registration of for C2","Obtain domain for phishing","Not used"], 74 | # ["","Resource Development","T1328"," Acquire Infrastructure: Domains","N/A","Registration of => static ip of x.x.x.x","Evade proxy defenses using domain fronting","Success"], 77 | # ["","Command And Control","T1104","Multi-Stage Channels","N/A","The DLL retrieves the beacon exe as a second stage to inject into svchost. The beacon is hosted via domain fronting on download.visualstudio.microsoft.com -> vstudio.azureedge.net => static ip of x.x.x.x","Evade defenses by loading the payload in memory as second stage","Success"], 78 | # ["","Defense Evasion","T1027","Obfuscated Files or Information","N/A","The VBA payload was obfuscated to evade detection","Defense evasion with obfuscated payloads","Success"], 79 | # ["","Defense Evasion","T1027","Obfuscated Files or Information","N/A","The VBA code itself was obfuscated, as was the DLL embedded inside the VBA code","Defense evasion with obfuscated payloads","Success"], 80 | # ["","Defense Evasion","T1480.001","Execution Guardrails: Environmental Keying","N/A","The VBA payload would only run on domain-joined targets","Defense evasion using limted execution","Success"], 81 | # ["","Defense Evasion","T1112","Modify Registry","N/A","Registry key written to perform COM hijack: Computer\\HKEY_CURRENT_USER\\Software\\Classes\\CLSID\\","Perform COM hijack for persistance","Success"], 82 | # ["","Execution","T1059.005","Command and Scripting Interpreter: Visual Basic","N/A","Office document (.doc) containing VBA code to perform the COM hijack with embedded DLL.","Evade detection by delaying payload execution with the COM hjiack","Success"] 83 | ] 84 | def report_tiber(output): 85 | ttp = TTPSearcher() 86 | if not ttp.ready: 87 | return 88 | 89 | entries = get_all_entries_filtered(filter=EntryType.input) 90 | entries = entries + get_all_entries_filtered_containing(filter=EntryType.task, cont="Tasked beacon to") 91 | entries.sort(key=sort_on_timestamp) 92 | rows = pre 93 | 94 | for entry in entries: 95 | tiber = {"Phase":"", "Tactic":"", "Technique ID":"", "Technique Name":"", "Executed on":"", "Operational Guidance":"", "Goal":"", "Result":"", "Thread Actor":"", "Related Findings(s)":"", "Date":"", "Time":""} 96 | tiber["Executed on"] = entry.parent.hostname if entry.parent.hostname else "N/A" 97 | tiber["Date"] = entry.timestamp.strftime("%d/%m/%Y") 98 | tiber["Time"] = entry.timestamp.strftime("%H:%M:%S") 99 | tiber["Operational Guidance"] = excel_save(redact(entry.content)) 100 | tiber = ttp.add_ttp(tiber) 101 | rows.append(tiber.values()) 102 | write_to_csv(output+"\\tiber-report.csv", tiber.keys(), rows) 103 | -------------------------------------------------------------------------------- /modules/utils.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import re, sys, os, csv 3 | from typing import List 4 | from sqlalchemy.sql.sqltypes import Boolean, String 5 | from modules.configuration import get_config 6 | 7 | class bcolors (enum.Enum): 8 | HEADER = '\033[94m' 9 | OKGREEN = '\033[92m' 10 | WARNING = '\033[93m' 11 | FAIL = '\033[91m' 12 | ENDC = '\033[0m' 13 | BOLD = '\033[1m' 14 | UNDERLINE = '\033[4m' 15 | 16 | class LogType(enum.Enum): 17 | """ 18 | Error = RED 19 | Warning = YELLOW 20 | Header = BLUE 21 | Success = GREEN 22 | INFO = WHITE (default) 23 | """ 24 | ERROR = 0 25 | WARNING = 1 26 | HEADER = 2 27 | SUCCESS = 3 28 | INFO = 4 29 | 30 | 31 | def log(text: String, status: LogType=LogType.INFO) -> None: 32 | if status == LogType.ERROR: 33 | print(f"{bcolors.FAIL.value}[!!] {text} {bcolors.ENDC.value}") 34 | elif status == LogType.WARNING: 35 | print(f"{bcolors.WARNING.value}[!] {text} {bcolors.ENDC.value}") 36 | elif status == LogType.HEADER: 37 | print(f"{bcolors.HEADER.value}[-] {text} {bcolors.ENDC.value}") 38 | elif status == LogType.INFO: 39 | print(f"{bcolors.OKGREEN.value}[+] {text} {bcolors.ENDC.value}") 40 | else: 41 | print(f"{text}") 42 | 43 | 44 | def store_to_file(filename: String, content: String) -> None: 45 | with open(filename, "w") as text_file: 46 | print(content, file=text_file) 47 | 48 | 49 | def write_to_csv(filename: String, header: List, rows: List) -> None: 50 | filename = os.path.abspath(filename.strip()) 51 | while True: 52 | try: 53 | log("Creating csv: " + filename) 54 | with open(filename, 'w', encoding='UTF8', newline="") as f: 55 | writer = csv.writer(f, dialect="excel",delimiter=";") 56 | # writer.writerow("sep=;") 57 | writer.writerow(header) 58 | writer.writerows(rows) 59 | except Exception as ex: 60 | log("The file is already in use, please close it!","w") 61 | ret = yes_no("Have you closed the file or do you want to stop the execution?") 62 | if ret: 63 | continue 64 | else: 65 | break 66 | break 67 | 68 | 69 | def read_file(path: String) -> String: 70 | """path to file will be read as UTF-8 and throw an error + exit if not possible""" 71 | try: 72 | filename = os.path.abspath(path.strip()) 73 | with open(filename, "r", encoding="utf-8") as f: s = f.read() 74 | except Exception as ex: 75 | log(ex.filename + ": " + ex.strerror, "e") 76 | sys.exit(0) 77 | return s 78 | 79 | def get_all_files(path: String, extension: String, prefix: String = "") -> List: 80 | list_of_files = [] 81 | for (dirpath, dirnames, filenames) in os.walk(path): 82 | for filename in filenames: 83 | if filename.endswith(extension) and filename.startswith(prefix): 84 | list_of_files.append(os.sep.join([dirpath, filename])) 85 | return list_of_files 86 | 87 | 88 | def yes_no(question: String) -> Boolean: 89 | answer = input(question + "(y/n/c): ").lower().strip() 90 | print("") 91 | while not(answer == "y" or answer == "n" or answer == "c"): 92 | print("Input yes, no or cancel") 93 | answer = input(question + "(y/n/c):").lower().strip() 94 | print("") 95 | if answer[0] == "y": 96 | return True 97 | elif answer[0] == "c": 98 | sys.exit(0) 99 | else: 100 | return False 101 | 102 | 103 | # Print iterations progress 104 | def progressBar(iterable, prefix = 'Progress:', suffix = 'Complete', decimals = 1, length = 100, fill = '█', printEnd = "\r"): 105 | """ 106 | Call in a loop to create terminal progress bar 107 | @params: 108 | iterable - Required : iterable object (Iterable) 109 | prefix - Optional : prefix string (Str) 110 | suffix - Optional : suffix string (Str) 111 | decimals - Optional : positive number of decimals in percent complete (Int) 112 | length - Optional : character length of bar (Int) 113 | fill - Optional : bar fill character (Str) 114 | printEnd - Optional : end character (e.g. "\r", "\r\n") (Str) 115 | """ 116 | total = len(iterable) 117 | # Progress Bar Printing Function 118 | def printProgressBar (iteration): 119 | percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total))) 120 | filledLength = int(length * iteration // total) 121 | bar = fill * filledLength + '-' * (length - filledLength) 122 | print(f'\r{prefix} |{bar}| {percent}% {suffix}', end = printEnd) 123 | # Initial Call 124 | printProgressBar(0) 125 | # Update Progress Bar 126 | for i, item in enumerate(iterable): 127 | yield item 128 | printProgressBar(i + 1) 129 | # Print New Line on Complete 130 | print() 131 | 132 | 133 | # Print iterations progress 134 | def printProgressBar (iteration, total, prefix = '', suffix = 'Complete', decimals = 1, length = 100, fill = '█', printEnd = "\r"): 135 | """ 136 | Call in a loop to create terminal progress bar 137 | @params: 138 | iteration - Required : current iteration (Int) 139 | total - Required : total iterations (Int) 140 | prefix - Optional : prefix string (Str) 141 | suffix - Optional : suffix string (Str) 142 | decimals - Optional : positive number of decimals in percent complete (Int) 143 | length - Optional : character length of bar (Int) 144 | fill - Optional : bar fill character (Str) 145 | printEnd - Optional : end character (e.g. "\r", "\r\n") (Str) 146 | """ 147 | percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total))) 148 | filledLength = int(length * iteration // total) 149 | bar = fill * filledLength + '-' * (length - filledLength) 150 | print(f'\r{prefix} |{bar}| {percent}% {suffix}', end = printEnd) 151 | # Print New Line on Complete 152 | if iteration == total: 153 | print() 154 | 155 | 156 | def redact(content: str) -> str: 157 | """ 158 | Redact sensitive information based on global config 159 | Args: 160 | content: String to redact 161 | Returns: 162 | Redacted content string 163 | """ 164 | config = get_config() 165 | 166 | replacement = config.redactions.flags.replacement 167 | case_insensitive = config.redactions.flags.case_insensitive 168 | 169 | # Apply each pattern 170 | for name, pattern_config in config.redactions.patterns.items(): 171 | regex_flags = re.I if case_insensitive else 0 172 | try: 173 | content = re.sub( 174 | pattern_config.pattern.strip(), 175 | replacement, 176 | content, 177 | flags=regex_flags 178 | ) 179 | except re.error as e: 180 | log(f"Redaction failed for pattern {name}: {e}", "e") 181 | 182 | return content 183 | 184 | 185 | def excel_save(content: String) -> String: 186 | """Replaces the csv seperator ',' with ';'""" 187 | if "," in content: 188 | content = content.replace(",", ";") 189 | if "\"" in content: 190 | content = content.replace("\"", "'") 191 | return content 192 | 193 | 194 | def extract_ips(content: String) -> List: 195 | """ 196 | Extracts IP addresses from a string 197 | Args: 198 | content: String to extract IPs from 199 | Returns: 200 | List of IP addresses 201 | """ 202 | return re.findall(r"\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b", content) -------------------------------------------------------------------------------- /modules/sql/sqlite_func.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import time 4 | from typing import Dict, List 5 | 6 | import sqlalchemy 7 | from sqlalchemy.future import select 8 | from sqlalchemy.future.engine import create_engine 9 | from sqlalchemy.orm import sessionmaker 10 | from sqlalchemy import exc, update, delete, text, or_, and_, func 11 | 12 | from modules.sql.sqlite_model import * 13 | from modules.configuration import is_ip_excluded, get_config, AndCommand 14 | from modules.utils import log 15 | 16 | config = get_config() 17 | SESSION = None 18 | 19 | def init_db(db_path, debug): 20 | global SESSION 21 | try: 22 | tmp_path = os.path.dirname(db_path) 23 | if not os.path.isdir(tmp_path): 24 | os.mkdir(tmp_path) 25 | engine = create_engine("sqlite:///"+db_path,echo=debug) 26 | except Exception as ex: 27 | log(f"Please provide a valid DB path: {ex}", "e") 28 | sys.exit(-1) 29 | 30 | SESSION = sessionmaker(engine) 31 | try: 32 | Base.metadata.create_all(engine) 33 | return SESSION 34 | except Exception as ex: 35 | log(f"Please provide a valid DB path: {ex}", "e") 36 | sys.exit(-1) 37 | 38 | 39 | # ========================= 40 | # =========GENERIC========= 41 | # ========================= 42 | def get_element_by_id(cls, id): 43 | """ 44 | Get row of type CLS (generic) with ID id 45 | """ 46 | session = SESSION() 47 | record = None 48 | try: 49 | records = session.execute(select(cls).where(cls.id == id)) 50 | results = records.scalars().first() 51 | return results 52 | except Exception as ex: 53 | log(f"get_element_by_id() Failed: {ex}", "e") 54 | finally: 55 | session.close() 56 | 57 | 58 | def get_all_elements(cls): 59 | session = SESSION() 60 | rec =[] 61 | try: 62 | records = session.execute(select(cls)) 63 | results = records.unique().scalars().fetchall() 64 | return results 65 | except Exception as ex: 66 | log(f"get_all_elements() Failed: {ex}", "e") 67 | finally: 68 | session.close() 69 | 70 | 71 | def update_element(cls, **kwargs): 72 | session = SESSION() 73 | try: 74 | session.execute( 75 | update(cls). 76 | where(cls.id == kwargs["id"]). 77 | values(kwargs) 78 | ) 79 | session.commit() 80 | except exc.IntegrityError: 81 | pass 82 | except Exception as ex: 83 | log(f"update_element() Failed: {ex}", "e") 84 | finally: 85 | session.close() 86 | 87 | return get_element_by_id(cls, kwargs["id"]) 88 | 89 | 90 | def delete_element(cls, id): 91 | session = SESSION() 92 | try: 93 | session.execute(delete(cls).where(cls.id == id)) 94 | session.commit() 95 | except Exception as ex: 96 | log(f"delete_element() Failed: {ex}", "e") 97 | finally: 98 | session.close() 99 | 100 | 101 | def get_element_by_values(cls, **kwargs): 102 | """ 103 | Get one element of type CLS (generic) where values match 104 | """ 105 | session = SESSION() 106 | bindTo = [] 107 | try: 108 | #remove externalIP because beacons can connect to beacons .. 109 | kwargs.pop('ip_ext', None) 110 | 111 | for key, value in kwargs.items(): 112 | bindTo.append(f"{ str(key) }=:{ str(key) }") 113 | 114 | qry = str(" and ".join(bindTo)) 115 | query = f"SELECT * FROM {cls.__tablename__} WHERE {qry}" 116 | records = session.execute(text(query).bindparams(**kwargs)) 117 | result = records.scalar() 118 | return result 119 | except Exception as ex: 120 | log(f"get_element_by_values() Failed: {ex}", "e") 121 | finally: 122 | session.close() 123 | 124 | 125 | def create_element(cls, **kwargs): 126 | elem = get_element_by_values(cls, **kwargs) 127 | if elem: 128 | return elem 129 | 130 | session = SESSION() 131 | try: 132 | # if beacon is unknown drop id so it auto generates one 133 | if "id" in kwargs and kwargs["id"] == '': 134 | kwargs.pop("id") 135 | 136 | record = cls() 137 | for k, v in kwargs.items(): 138 | setattr(record, k, v) 139 | 140 | session.add(record) 141 | session.commit() 142 | return record.id 143 | except exc.IntegrityError: 144 | elem = get_element_by_id(cls, kwargs["id"]) 145 | return elem.id 146 | except Exception as ex: 147 | log(f"create_element({cls}) Failed: {ex}", "e") 148 | except sqlalchemy.sqlite3.OperationalError as ex: 149 | log(f"create_element({cls}) Failed: Database busy! Retrying..{ex}", "w") 150 | time.sleep(1) 151 | create_element(cls, **kwargs) 152 | finally: 153 | session.close() 154 | 155 | # ========================= 156 | # =========BEACONS========= 157 | # ========================= 158 | def get_last_entry_of_beacon(id): 159 | session = SESSION() 160 | record = None 161 | try: 162 | records: Entry = session.execute(select(Entry).where(Entry.parent_id == id ).order_by(Entry.timestamp.desc())) 163 | return records.scalars().first() 164 | except Exception as ex: 165 | log(f"get_last_entry_of_beacon() Failed: {ex}", "e") 166 | finally: 167 | session.close() 168 | 169 | 170 | def get_first_metadata_entry_of_beacon(id): 171 | session = SESSION() 172 | try: 173 | records: Entry = session.execute(select(Entry).where(Entry.parent_id == id ).where(Entry.type == EntryType.metadata)) 174 | result = records.scalars().first() 175 | return result 176 | except Exception as ex: 177 | log(f"get_first_metadata_entry_of_beacon() Failed: {ex}", "e") 178 | finally: 179 | session.close() 180 | 181 | 182 | def get_all_incomplete_beacons(): 183 | session = SESSION() 184 | try: 185 | records: Entry = session.execute( 186 | select(Beacon).filter( 187 | or_( 188 | Beacon.hostname == None, 189 | Beacon.exited == None 190 | ) 191 | ) 192 | ) 193 | result = records.unique().scalars().fetchall() 194 | return result 195 | except Exception as ex: 196 | log(f"get_all_incomplete_beacons() Failed: {ex}", "e") 197 | finally: 198 | session.close() 199 | 200 | 201 | def get_all_valid_beacons() -> List[Beacon]: 202 | session = SESSION() 203 | try: 204 | records: Beacon = session.execute( 205 | select(Beacon).filter( 206 | and_( 207 | Beacon.hostname != None, 208 | Beacon.joined != None 209 | ) 210 | ) 211 | ) 212 | result = records.unique().scalars().fetchall() 213 | return result 214 | except Exception as ex: 215 | log(f"get_all_complete_beacons() Failed: {ex}", "e") 216 | finally: 217 | session.close() 218 | 219 | 220 | def get_last_beacon_entry_time(beacon_id: int): 221 | session = SESSION() 222 | try: 223 | # Query to fetch the latest timestamp of an entry for a specific beacon 224 | last_entry_time = session.query(func.max(Entry.timestamp)).filter(Entry.parent_id == beacon_id).scalar() 225 | return last_entry_time 226 | except Exception as e: 227 | print(f"Failed to fetch last entry time for beacon {beacon_id}: {e}") 228 | return None 229 | finally: 230 | session.close() 231 | # ========================= 232 | # ==========ENTRY========== 233 | # ========================= 234 | def get_entry_by_param(timestamp, timezone, type, content): 235 | session = SESSION() 236 | record = None 237 | try: 238 | records: Entry = session.execute( 239 | select(Entry). 240 | where(Entry.timestamp == timestamp). 241 | where(Entry.timezone == timezone). 242 | where(Entry.type == type) 243 | ) 244 | record = records.scalars().first() 245 | except Exception as ex: 246 | log(f"get_entry_by_param() Failed: {ex}", "e") 247 | finally: 248 | session.close() 249 | 250 | return excel_save(redact(record.content)) 251 | 252 | 253 | def get_all_entries_filtered(filter: EntryType, redacting: bool=True) -> List[Entry]: 254 | session = SESSION() 255 | try: 256 | records: Entry = session.execute(select(Entry).where(Entry.type == filter).order_by(Entry.timestamp.asc())) 257 | results = records.unique().scalars().fetchall() 258 | # reduct will be done at .to_row() level 259 | return results 260 | except Exception as ex: 261 | log(f"get_all_entries_filtered() Failed: {ex}", "e") 262 | finally: 263 | session.close() 264 | 265 | 266 | def get_all_entries_filtered_containing(filter: EntryType, cont: String) -> List[Entry]: 267 | """ 268 | Get all entrytype called filter which contains sttring called cont 269 | """ 270 | session = SESSION() 271 | try: 272 | records: Entry = session.execute( 273 | select(Entry).filter( 274 | and_( 275 | Entry.type == filter, 276 | Entry.content.contains(cont) 277 | ) 278 | ).order_by(Entry.timestamp.asc()) 279 | ) 280 | results = records.unique().scalars().fetchall() 281 | # reduct will be done at .to_row() level 282 | return results 283 | except Exception as ex: 284 | log(f"get_all_entries_filtered_containing() Failed: {ex}", "e") 285 | finally: 286 | session.close() 287 | 288 | 289 | def get_upload_entries(): 290 | """ 291 | Get one element of type CLS (generic) where values match 292 | """ 293 | session = SESSION() 294 | try: 295 | records = session.execute(select(Entry).filter( 296 | or_( 297 | Entry.content.contains('Uploading beaconloader:'), 298 | Entry.content.contains('Uploading payload file:'), 299 | Entry.content.contains('Tasked beacon to upload'), 300 | Entry.type == EntryType.indicator 301 | ) 302 | )) 303 | results = records.unique().scalars().fetchall() 304 | # reduct will be done at .to_row() level 305 | return results 306 | except Exception as ex: 307 | log(f"get_element_by_values() Failed: {ex}", "e") 308 | finally: 309 | session.close() 310 | 311 | 312 | 313 | # ========================= 314 | # =========CLEANUP========= 315 | # ========================= 316 | 317 | 318 | def test_remove_clutter(): 319 | session = SESSION() 320 | try: 321 | records = session.execute(select(Entry).filter( 322 | or_( 323 | Entry.content.contains('clear'), 324 | Entry.content.contains('jobs'), 325 | Entry.content.contains('jobkill'), 326 | Entry.content.contains('cancel'), 327 | )).order_by(Entry.timestamp.asc())) 328 | 329 | result = records.unique().scalars().fetchall() 330 | return result 331 | except Exception as ex: 332 | log(f"get_all_entries_filtered() Failed: {ex}", "e") 333 | finally: 334 | session.close() 335 | 336 | def build_filter_conditions(filters: List[str]): 337 | """Build SQLAlchemy filter conditions from list of strings""" 338 | or_conditions = [] 339 | try: 340 | for filter_item in filters: 341 | if isinstance(filter_item, dict): 342 | if "_and" in filter_item: 343 | and_conditions = [Entry.content.contains(part) for part in filter_item["_and"]] 344 | or_conditions.append(and_(*and_conditions)) 345 | if "_regex" in filter_item: 346 | regex_conditions = [Entry.content.op('REGEXP')(part) for part in filter_item["_regex"]] 347 | for regex in regex_conditions: 348 | or_conditions.append(regex) 349 | else: 350 | or_conditions.append(Entry.content.contains(filter_item)) 351 | except Exception as e: 352 | log(f"Please format your filter correctly in the config.yml: {e}", "e") 353 | 354 | return or_conditions 355 | 356 | def remove_clutter(): 357 | """This function removes the following entries from the DB: 358 | - keylogger output 359 | - sleep commands issues by the operator 360 | - BeaconBot responses 361 | - Screenshot output 362 | https://docs.sqlalchemy.org/en/14/core/expression_api.html""" 363 | config = get_config() 364 | session = SESSION() 365 | try: 366 | # Get filters from config 367 | filters = config.exclusions.commands 368 | conditions = build_filter_conditions(filters) 369 | 370 | entries = session.query(Entry).filter(or_(*conditions)) 371 | count = entries.count() 372 | entries.delete(synchronize_session=False) 373 | session.commit() 374 | log(f"Removed {count} clutter entries") 375 | 376 | except Exception as ex: 377 | log(f"remove_clutter() Failed: {ex}", "e") 378 | session.rollback() 379 | finally: 380 | session.close() 381 | 382 | def remove_via_ip(excluded_ranges, public_ip=False): 383 | """Remove beacons and entries for ip_ext in excluded_ranges: 384 | https://docs.sqlalchemy.org/en/14/core/expression_api.html""" 385 | session = SESSION() 386 | try: 387 | if not excluded_ranges: 388 | return 389 | 390 | # Get all beacons 391 | beacons = session.query(Beacon).all() 392 | 393 | # Filter beacons with excluded IPs 394 | beacon_ids = [] 395 | for beacon in beacons: 396 | if public_ip: 397 | if beacon.ip_ext and is_ip_excluded(beacon.ip_ext, excluded_ranges): 398 | beacon_ids.append(beacon.id) 399 | elif not public_ip: 400 | if beacon.ip and is_ip_excluded(beacon.ip, excluded_ranges): 401 | beacon_ids.append(beacon.id) 402 | else: 403 | log(f"remove_via_ip() Failed: Invalid public_ip value: {public_ip}", "e") 404 | return 405 | 406 | if beacon_ids: 407 | # Remove related entries first 408 | session.query(Entry).filter( 409 | Entry.parent_id.in_(beacon_ids) 410 | ).delete(synchronize_session=False) 411 | 412 | # Remove beacons 413 | session.query(Beacon).filter( 414 | Beacon.id.in_(beacon_ids) 415 | ).delete(synchronize_session=False) 416 | 417 | session.commit() 418 | 419 | except Exception as e: 420 | session.rollback() 421 | raise e 422 | finally: 423 | session.close() 424 | 425 | def remove_beacons_via_hostname(excluded_hostnames): 426 | """Remove beacons and entries where hostname matches excluded hostnames in config""" 427 | session = SESSION() 428 | try: 429 | if not excluded_hostnames: 430 | return 431 | 432 | # Get all beacons 433 | beacons = session.query(Beacon).all() 434 | 435 | # Filter beacons with excluded hostnames 436 | beacon_ids = [ 437 | beacon.id for beacon in beacons 438 | # Check if hostname is in excluded_hostnames check only lower case 439 | if beacon.hostname and beacon.hostname.lower() in [host.lower() for host in excluded_hostnames] 440 | ] 441 | 442 | if beacon_ids: 443 | # Remove related entries first 444 | session.query(Entry).filter( 445 | Entry.parent_id.in_(beacon_ids) 446 | ).delete(synchronize_session=False) 447 | 448 | # Remove beacons 449 | session.query(Beacon).filter( 450 | Beacon.id.in_(beacon_ids) 451 | ).delete(synchronize_session=False) 452 | 453 | session.commit() 454 | 455 | except Exception as e: 456 | session.rollback() 457 | log(f"remove_via_hostname() Failed: {e}", "e") 458 | raise e 459 | finally: 460 | session.close() 461 | 462 | -------------------------------------------------------------------------------- /modules/parser/br_parser.py: -------------------------------------------------------------------------------- 1 | import re 2 | from datetime import datetime, timedelta 3 | from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Enum, create_engine 4 | from sqlalchemy.orm import relationship, sessionmaker 5 | from sqlalchemy.ext.declarative import declarative_base 6 | from concurrent.futures import ThreadPoolExecutor, as_completed 7 | import threading 8 | from typing import Dict 9 | from modules.sql.sqlite_func import init_db 10 | from modules.sql.sqlite_model import Beacon, Entry, EntryType 11 | from modules.utils import extract_ips 12 | 13 | #### 14 | # Disclaimer: There are some issues with BR logs which I tried to work around! 15 | # - There is no mapping between input and output 16 | # - Some of the logs just don't have a beacon reference, especially upload and http_request 17 | # - The timestamp is not always in the same format 18 | # - Autoruns are too fast for different timestamps, so we need to add a microsecond to the timestamp >:/ FFS 19 | # - There are multiple outputs in one, so I have to create the same output object again with new content 20 | # - There are other logs interfering with the output 21 | #### 22 | class BRLogParser: 23 | def __init__(self, filepath: str, db_path: str, debug: bool = False): 24 | self.filepath = filepath 25 | # Extract beacon ID from the filename 26 | self.beacon_id = self.extract_beacon_id_from_filename(filepath) 27 | # Initialize the database session 28 | session_manager = init_db(db_path, debug) 29 | self.session = session_manager() 30 | # Track the current command and its accumulated output 31 | self.current_command = None 32 | self.current_output = "" 33 | self.is_accumulating_output = False 34 | # Lock for thread-safe database access 35 | self.lock = threading.Lock() 36 | 37 | @staticmethod 38 | def extract_beacon_id_from_filename(filename: str) -> int: 39 | match = re.search(r'b-(\d+)', filename) 40 | if match: 41 | return int(match.group(1)) 42 | else: 43 | pass 44 | 45 | @staticmethod 46 | def parse_beacon_log(filepath: str, db_path: str, debug: bool = False): 47 | parser = BRLogParser(filepath, db_path, debug) 48 | parser.parse() 49 | 50 | @staticmethod 51 | def parse_timestamp(timestamp_str: str, format: str = "%Y/%m/%d %H:%M:%S %Z") -> datetime: 52 | # get the current year 53 | try: 54 | return datetime.strptime(timestamp_str, format) 55 | except ValueError: 56 | raise ValueError("Incorrect data format!") 57 | 58 | 59 | def parse(self): 60 | with open(self.filepath, 'r') as file: 61 | for line in file: 62 | parsed_line = self.parse_line(line) 63 | if parsed_line: 64 | # Handle metadata separately to store or update beacon information 65 | if parsed_line['type'] == 'metadata': 66 | self.store_beacon_to_db(parsed_line) 67 | elif parsed_line['type'] == 'output_end': 68 | # store completed output if there is something to write 69 | if self.current_output: 70 | self.store_entry_to_db({'type': self.current_command['type'], 'timestamp': self.current_command['timestamp'], 'timezone': self.current_command["timezone"], 'content': self.current_output.strip()}) 71 | self.current_output = "" 72 | self.is_accumulating_output = False 73 | # handle multiline output 74 | elif parsed_line['type'] == 'output' or parsed_line['type'] == 'access_denied' or parsed_line['type'] == 'http_request' or parsed_line['type'] == 'http_log': 75 | # if a new command is detected, store the previous command and its output and reset current_output 76 | if self.is_accumulating_output: 77 | self.current_command['content'] = self.current_output.strip() 78 | self.store_entry_to_db(self.current_command) 79 | self.current_output = "" 80 | 81 | # Start/restart the accumulate output for the current command 82 | self.is_accumulating_output = True 83 | self.current_command = parsed_line 84 | self.current_output += parsed_line['content'] if 'content' in parsed_line else "" 85 | else: 86 | # handle if there is other stuff between the output start and the first line of the output 87 | # lets hope it does not start randomly in between the output 88 | if self.current_output == "" and self.is_accumulating_output: 89 | self.store_entry_to_db(parsed_line) 90 | continue 91 | 92 | # autoruns are too fast for different timestamps, so we need to add a microsecond to the timestamp >:/ FFS 93 | if self.current_command and \ 94 | "operator" in self.current_command and \ 95 | "autoruns" == self.current_command['operator'] and \ 96 | "autoruns" == parsed_line['operator']: 97 | parsed_line['timestamp'] = self.current_command['timestamp'] + timedelta(microseconds=1) 98 | self.store_entry_to_db(parsed_line) 99 | self.current_command = parsed_line 100 | continue 101 | 102 | # store completed output if there is something to write 103 | if self.current_output: 104 | self.store_entry_to_db({'type': self.current_command['type'], 'timestamp': self.current_command['timestamp'], 'timezone': self.current_command["timezone"], 'content': self.current_output.strip()}) 105 | 106 | self.store_entry_to_db(parsed_line) 107 | # Reset for the new command 108 | self.current_command = parsed_line 109 | self.current_output = "" 110 | self.is_accumulating_output = False 111 | else: 112 | # add the output to the current command 113 | if self.is_accumulating_output: 114 | self.current_output += line 115 | # skip empty lines 116 | elif re.match(r'^\s*$', line): 117 | continue 118 | # there are multiple outputs in one, so create the same output object again with new content 119 | elif self.current_command: 120 | # add a microsecond to the timestamp to avoid duplicates and keep the order 121 | self.current_command['timestamp'] += timedelta(microseconds=1) 122 | self.current_output += line 123 | else: 124 | if "watchlist.log" in self.filepath: 125 | pass 126 | else: 127 | print(f"Could not parse {self.filepath} - {line}") 128 | # Last line of the file: Store the last command of the file and its output if applicable 129 | if self.current_output: 130 | self.store_entry_to_db({'type': self.current_command['type'], 'timestamp': self.current_command['timestamp'], 'timezone': self.current_command["timezone"], 'content': self.current_output.strip()}) 131 | 132 | 133 | def parse_line(self, line: str) -> Dict: 134 | metadata_pattern = re.compile(r"(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[::badger authenticated from (?P.*)\]\[(?P.*?)\]\[b-(?P\d+?).*\]") 135 | watchlist_pattern = re.compile(r'(?P\d{2}-\d{2}-\d{4} \d{2}:\d{2}:\d{2} (?P\w+))\s+\[Initial Access\]\s+b-(?P\d+)\\\\[A-Z0-9]+\s+\((?P.+?)\)\s+from\s+(?P.+?)\s+\[(?P\d+)->(?P\d+)\]') 136 | input_pattern = re.compile(r"^(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[input\] (?P\w+) => (?P.+)") 137 | output_pattern = re.compile(r'(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[sent \d+ bytes\]') 138 | output_end_pattern = re.compile(r'\+-+\+') 139 | upload_pattern = re.compile(r"(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[UPLOAD\] (?PHost: (?P.*) \| File: (?P.*) \| MD5: (?P[a-f0-9]{32}))") 140 | access_denied_pattern = re.compile(r"^(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) (?PAccess denied \(.+\): \[(b-(?P\d+))?.+\])") 141 | http_request_pattern = re.compile(r"(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \n") 142 | http_log_pattern = re.compile(r'(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+))\s+\[\+\] b-(?P\d+)\s+(?P.*: \[(?P.*)\])') 143 | 144 | metadata_match = metadata_pattern.match(line) 145 | watchlist_match = watchlist_pattern.match(line) 146 | input_match = input_pattern.match(line) 147 | output_match = output_pattern.match(line) 148 | output_end_pattern = output_end_pattern.match(line) 149 | upload_match = upload_pattern.match(line) 150 | access_denied_match = access_denied_pattern.match(line) 151 | http_request_match = http_request_pattern.match(line) 152 | http_log_match = http_log_pattern.match(line) 153 | 154 | if metadata_match: 155 | return { 156 | 'type': 'metadata', 157 | 'timestamp': self.parse_timestamp(metadata_match.group('timestamp')), 158 | 'timezone': metadata_match.group('timezone'), 159 | 'ip': extract_ips(metadata_match.group('ips'))[-1], 160 | 'ip_ext': extract_ips(metadata_match.group('ips'))[0], 161 | 'user': metadata_match.group('user').split("\\", 1)[1], 162 | 'hostname': metadata_match.group('user').split("\\", 1)[0], 163 | 'id': metadata_match.group('beacon_id') 164 | } 165 | elif watchlist_match: 166 | return { 167 | 'type': 'metadata', 168 | 'timestamp': self.parse_timestamp(watchlist_match.group('timestamp'), "%m-%d-%Y %H:%M:%S %Z"), # why?? different timestamp format 169 | 'timezone': watchlist_match.group('timezone'), 170 | 'ip': extract_ips(watchlist_match.group('ips'))[-1], 171 | 'ip_ext': extract_ips(watchlist_match.group('ips'))[0], 172 | 'user': watchlist_match.group('user').split("\\", 1)[1], 173 | 'hostname': watchlist_match.group('user').split("\\", 1)[0], 174 | 'id': watchlist_match.group('beacon_id') 175 | } 176 | elif input_match: 177 | return { 178 | 'type': 'input', 179 | 'timestamp': self.parse_timestamp(input_match.group('timestamp')), 180 | 'timezone': input_match.group('timezone'), 181 | 'operator': input_match.group('operator'), 182 | 'content': input_match.group('command') 183 | } 184 | elif output_match: 185 | return { 186 | 'type': 'output', 187 | 'timestamp': self.parse_timestamp(output_match.group('timestamp')), 188 | 'timezone': output_match.group('timezone'), 189 | # 'bytes': output_match.group('bytes'), 190 | } 191 | elif output_end_pattern: 192 | return { 193 | 'type': 'output_end', 194 | } 195 | elif upload_match: 196 | return { 197 | 'type': 'indicator', 198 | 'timestamp': self.parse_timestamp(upload_match.group('timestamp')), 199 | 'timezone': upload_match.group('timezone'), 200 | 'content': upload_match.group('content') 201 | # 'host': upload_match.group('host'), 202 | # 'file': upload_match.group('file'), 203 | # 'md5': upload_match.group('md5'), 204 | } 205 | elif access_denied_match: 206 | return { 207 | 'type': 'access_denied', 208 | 'timestamp': self.parse_timestamp(access_denied_match.group('timestamp')), 209 | 'timezone': access_denied_match.group('timezone'), 210 | 'content': access_denied_match.group('content'), 211 | 'parent_id': access_denied_match.group('beacon_id') 212 | } 213 | elif http_request_match: 214 | return { 215 | 'type': 'http_request', 216 | 'timestamp': self.parse_timestamp(http_request_match.group('timestamp')), 217 | 'timezone': http_request_match.group('timezone'), 218 | } 219 | elif http_log_match: 220 | return { 221 | 'type': 'http_log', 222 | 'timestamp': self.parse_timestamp(http_log_match.group('timestamp')), 223 | 'timezone': http_log_match.group('timezone'), 224 | 'content': http_log_match.group('content'), 225 | 'parent_id': http_log_match.group('beacon_id'), 226 | } 227 | return None 228 | 229 | 230 | def store_entry_to_db(self, entry_data: Dict): 231 | entry_type = EntryType[entry_data['type']] 232 | entry_data['parent_id'] = self.beacon_id if self.beacon_id != None else entry_data.get('parent_id', -1) # why?? upload has no beacon reference 233 | try: 234 | existing_entry = self.session.query(Entry).filter_by( 235 | timestamp=entry_data['timestamp'], 236 | timezone=entry_data['timezone'], 237 | type=entry_type, 238 | parent_id=entry_data['parent_id'], 239 | content=entry_data.get('content') 240 | ).one_or_none() 241 | 242 | if existing_entry is None: 243 | entry = Entry(**entry_data) 244 | self.session.add(entry) 245 | else: 246 | # Update the existing entry 247 | existing_entry.ttp = entry_data.get('ttp') 248 | existing_entry.operator = entry_data.get('operator') 249 | existing_entry.content = entry_data.get('content') 250 | self.session.add(existing_entry) 251 | 252 | self.session.commit() 253 | except Exception as e: 254 | self.session.rollback() 255 | print(f"Failed to insert log entry: {e}") 256 | 257 | def store_beacon_to_db(self, metadata: Dict): 258 | metadata.pop('type', None) 259 | metadata['id'] = self.beacon_id if self.beacon_id else metadata['id'] 260 | if not metadata['id']: 261 | raise ValueError("Beacon ID not found in metadata") 262 | 263 | try: 264 | existing_beacon = self.session.query(Beacon).filter_by( 265 | id=metadata['id'] 266 | ).one_or_none() 267 | 268 | if existing_beacon is None: 269 | beacon = Beacon(**metadata) 270 | self.session.add(beacon) 271 | else: 272 | # Update the existing beacon via **metadata 273 | existing_beacon.timestamp = metadata['timestamp'] 274 | existing_beacon.ip = metadata['ip'] 275 | existing_beacon.ip_ext = metadata['ip_ext'] 276 | existing_beacon.user = metadata['user'] 277 | existing_beacon.hostname = metadata['hostname'] 278 | # existing_beacon.process = metadata['process'] 279 | # existing_beacon.pid = metadata['pid'] 280 | # existing_beacon.os = metadata['os'] 281 | # existing_beacon.version = metadata['version'] 282 | # existing_beacon.build = metadata['build'] 283 | # existing_beacon.arch = metadata['arch'] 284 | self.session.add(existing_beacon) 285 | 286 | self.session.commit() 287 | except Exception as e: 288 | self.session.rollback() 289 | print(f"Failed to insert or update beacon: {e}") 290 | -------------------------------------------------------------------------------- /modules/parser/oc2_parser.py: -------------------------------------------------------------------------------- 1 | # ost_-log_parser_adapted.py 2 | import json 3 | import os 4 | import re 5 | import threading 6 | from datetime import datetime 7 | from typing import Dict, Optional, Tuple 8 | 9 | from pathlib import Path 10 | import sys 11 | 12 | # Ensure project root is on sys.path when executed as a script 13 | if __package__ is None: # pragma: no cover - runtime safety net 14 | project_root = Path(__file__).resolve().parents[2] 15 | if str(project_root) not in sys.path: 16 | sys.path.insert(0, str(project_root)) 17 | 18 | from sqlalchemy import inspect 19 | from sqlalchemy.orm import Session 20 | 21 | from modules.sql.sqlite_func import init_db 22 | from modules.sql.sqlite_model import Beacon, Entry, EntryType 23 | 24 | 25 | class OSTLogParserAdapted: 26 | TIMESTAMP_REGEX = re.compile( 27 | r"^(?P\d{4}-\d{2}-\d{2}\s\d{2}:\d{2}:\d{2}(?:\.\d+)?)\s+UTC\s*(?P\{.*)", 28 | re.DOTALL, 29 | ) 30 | 31 | def __init__(self, filepath: str, db_path: str, debug: bool = False): 32 | self.filepath = filepath 33 | self.filename = os.path.basename(filepath) 34 | session_factory = init_db(db_path, debug) 35 | self.session: Session = session_factory() 36 | self.lock = threading.RLock() 37 | self.implant_uid_to_db_id: Dict[str, int] = {} 38 | self._load_existing_mappings() 39 | 40 | def _load_existing_mappings(self) -> None: 41 | try: 42 | inspector = inspect(self.session.bind) 43 | columns = {col["name"] for col in inspector.get_columns("beacon")} 44 | except AttributeError: 45 | print( 46 | "Warning: Could not inspect database or Beacon model missing expected schema. Parser will create new beacon entries if UIDs are encountered for the first time." 47 | ) 48 | return 49 | except Exception as exc: 50 | print( 51 | f"Warning: Could not pre-load implant mappings (maybe DB connection issue or schema mismatch?): {exc}" 52 | ) 53 | print( 54 | "Parser will create new beacon entries if UIDs are encountered for the first time in this run." 55 | ) 56 | return 57 | 58 | if "uid_str" not in columns: 59 | print( 60 | "Warning: 'uid_str' column not found in 'beacon' table. Cannot pre-load mappings. Parser will create new beacon entries if UIDs are encountered for the first time." 61 | ) 62 | return 63 | 64 | existing = ( 65 | self.session.query(Beacon.id, Beacon.uid_str) 66 | .filter(Beacon.uid_str.isnot(None)) 67 | .all() 68 | ) 69 | count = 0 70 | for db_id, uid in existing: 71 | if uid: 72 | self.implant_uid_to_db_id[uid] = db_id 73 | count += 1 74 | if count: 75 | print(f"Loaded {count} existing implant mappings from database.") 76 | 77 | @classmethod 78 | def parse_ost_log(cls, filepath: str, db_path: str, debug: bool = False) -> None: 79 | parser = cls(filepath, db_path, debug) 80 | parser.parse() 81 | 82 | @staticmethod 83 | def parse_timestamp(timestamp_str: str) -> Optional[datetime]: 84 | if not timestamp_str: 85 | return None 86 | dt_str = ( 87 | timestamp_str.replace("UTC", "") 88 | .replace("Z", "") 89 | .replace("T", " ") 90 | .strip() 91 | ) 92 | for fmt in ("%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"): 93 | try: 94 | return datetime.strptime(dt_str, fmt) 95 | except ValueError: 96 | continue 97 | print(f"Error parsing timestamp '{timestamp_str}': unsupported format.") 98 | return None 99 | 100 | def read_line(self, line: str, line_num: int) -> Optional[Tuple[datetime, Dict]]: 101 | match = self.TIMESTAMP_REGEX.match(line) 102 | if not match: 103 | print( 104 | f"Warning: Skipping line {line_num} due to format mismatch (timestamp/JSON) in file {self.filename}." 105 | ) 106 | return None 107 | 108 | timestamp = self.parse_timestamp(match.group("timestamp")) 109 | if not timestamp: 110 | print( 111 | f"Warning: Skipping line {line_num} due to timestamp parsing error in file {self.filename}." 112 | ) 113 | return None 114 | 115 | json_part = match.group("payload") 116 | try: 117 | log_data = json.loads(json_part) 118 | except json.JSONDecodeError as exc: 119 | snippet = json_part[:100] 120 | if len(json_part) > 100: 121 | snippet += "..." 122 | print( 123 | f"Warning: Skipping line {line_num} due to invalid JSON in file {self.filename}: {exc}: {snippet}" 124 | ) 125 | return None 126 | except Exception as exc: 127 | print( 128 | f"Error during initial parsing of line {line_num} in file {self.filename}: {exc}" 129 | ) 130 | return None 131 | 132 | return timestamp, log_data 133 | 134 | def parse(self) -> None: 135 | print(f"Starting parsing for file: {self.filename}") 136 | line_num = 0 137 | try: 138 | with open(self.filepath, "r", encoding="utf-8") as file: 139 | for raw_line in file: 140 | line_num += 1 141 | line = raw_line.strip() 142 | if not line: 143 | continue 144 | 145 | parsed_line = self.read_line(line, line_num) 146 | if not parsed_line: 147 | continue 148 | 149 | timestamp, log_data = parsed_line 150 | self._process_event(line_num, timestamp, log_data) 151 | except FileNotFoundError: 152 | print(f"Error: File not found at {self.filepath}") 153 | except Exception as exc: 154 | print(f"An unexpected error occurred while reading {self.filepath}: {exc}") 155 | finally: 156 | self.close() 157 | print(f"Finished parsing file: {self.filename}") 158 | 159 | def _process_event( 160 | self, line_num: int, timestamp: datetime, log_data: Dict 161 | ) -> None: 162 | event_type = log_data.get("event_type") 163 | implant_data = log_data.get("implant") 164 | task_data = log_data.get("task") 165 | 166 | db_beacon_id = self.get_beacon_db_id(implant_data, timestamp) 167 | 168 | if implant_data and db_beacon_id is None: 169 | uid = implant_data.get("uid") 170 | print( 171 | f"Warning: Could not get or create beacon for implant UID {uid} on line {line_num} in file {self.filename}. Skipping entry." 172 | ) 173 | return 174 | 175 | if event_type in ("task_request", "task_response"): 176 | if not db_beacon_id: 177 | print( 178 | f"Warning: Cannot store task entry for event '{event_type}' on line {line_num} as implant DB ID is unknown in file {self.filename}." 179 | ) 180 | return 181 | if not task_data or not isinstance(task_data, dict): 182 | print( 183 | f"Warning: Missing task_data for event '{event_type}' on line {line_num} in file {self.filename}." 184 | ) 185 | return 186 | if "uid" not in task_data or "name" not in task_data: 187 | print( 188 | f"Warning: Task data missing 'uid' or 'name' on line {line_num} in file {self.filename}. Skipping task entry." 189 | ) 190 | return 191 | self.store_task_entry_to_db(event_type, timestamp, db_beacon_id, task_data) 192 | elif event_type == "new_implant": 193 | return 194 | else: 195 | return 196 | 197 | def get_beacon_db_id( 198 | self, implant_data: Optional[Dict], timestamp: datetime 199 | ) -> Optional[int]: 200 | if not implant_data or not isinstance(implant_data, dict): 201 | return None 202 | 203 | implant_uid = implant_data.get("uid") 204 | if not implant_uid: 205 | print( 206 | f"Warning: Implant data missing 'uid' in file {self.filename}. Cannot associate entry." 207 | ) 208 | return None 209 | 210 | db_beacon_id = self.implant_uid_to_db_id.get(implant_uid) 211 | if db_beacon_id: 212 | self.update_beacon_details(db_beacon_id, implant_data, timestamp) 213 | return db_beacon_id 214 | 215 | with self.lock: 216 | existing = ( 217 | self.session.query(Beacon) 218 | .filter(Beacon.uid_str == implant_uid) 219 | .one_or_none() 220 | ) 221 | if existing: 222 | self.implant_uid_to_db_id[implant_uid] = existing.id 223 | self.update_beacon_details(existing.id, implant_data, timestamp) 224 | return existing.id 225 | 226 | db_beacon_id = self.get_or_create_beacon(implant_data, timestamp) 227 | if db_beacon_id: 228 | self.implant_uid_to_db_id[implant_uid] = db_beacon_id 229 | return db_beacon_id 230 | 231 | def get_or_create_beacon( 232 | self, implant_data: Dict, timestamp: datetime 233 | ) -> Optional[int]: 234 | implant_uid = implant_data.get("uid") 235 | if not implant_uid: 236 | print("Error: get_or_create_beacon called with missing implant UID.") 237 | return None 238 | 239 | first_seen = self.parse_timestamp(implant_data.get("first_seen", "")) or timestamp 240 | 241 | beacon = Beacon( 242 | uid_str=implant_uid, 243 | timestamp=first_seen, 244 | timezone="UTC", 245 | hostname=implant_data.get("hostname"), 246 | user=implant_data.get("username"), 247 | ip=implant_data.get("ip"), 248 | ip_ext=implant_data.get("transport_ip"), 249 | process=implant_data.get("proc_name"), 250 | pid=implant_data.get("pid"), 251 | os=implant_data.get("os"), 252 | version=implant_data.get("version"), 253 | arch=str(implant_data.get("arch")) if implant_data.get("arch") is not None else None, 254 | ) 255 | 256 | with self.lock: 257 | try: 258 | self.session.add(beacon) 259 | self.session.flush() 260 | db_id = beacon.id 261 | self.session.commit() 262 | print( 263 | f"Info: Created new Beacon DB record ID {db_id} for implant UID {implant_uid}" 264 | ) 265 | return db_id 266 | except Exception as exc: 267 | self.session.rollback() 268 | print( 269 | f"Failed to create Beacon record for implant {implant_uid}: {exc}" 270 | ) 271 | return None 272 | 273 | def update_beacon_details( 274 | self, 275 | db_beacon_id: int, 276 | implant_data: Dict, 277 | fallback_timestamp: Optional[datetime] = None, 278 | ) -> None: 279 | try: 280 | with self.lock: 281 | beacon = ( 282 | self.session.query(Beacon) 283 | .filter_by(id=db_beacon_id) 284 | .one_or_none() 285 | ) 286 | if not beacon: 287 | return 288 | 289 | updated = False 290 | 291 | last_seen_raw = implant_data.get("last_seen") 292 | last_seen_dt = self.parse_timestamp(last_seen_raw) if last_seen_raw else fallback_timestamp 293 | if last_seen_dt and hasattr(beacon, "last_seen"): 294 | if not beacon.last_seen or last_seen_dt > beacon.last_seen: 295 | beacon.last_seen = last_seen_dt 296 | updated = True 297 | 298 | checkin_count = implant_data.get("checkin_count") 299 | if ( 300 | checkin_count is not None 301 | and hasattr(beacon, "checkin_count") 302 | and ( 303 | beacon.checkin_count is None 304 | or checkin_count > beacon.checkin_count 305 | ) 306 | ): 307 | beacon.checkin_count = checkin_count 308 | updated = True 309 | 310 | pid = implant_data.get("pid") 311 | if hasattr(beacon, "pid") and pid is not None and beacon.pid != pid: 312 | beacon.pid = pid 313 | updated = True 314 | 315 | if updated: 316 | self.session.add(beacon) 317 | self.session.commit() 318 | except Exception as exc: 319 | self.session.rollback() 320 | print(f"Failed to update details for Beacon ID {db_beacon_id}: {exc}") 321 | 322 | def store_task_entry_to_db( 323 | self, 324 | event_type: str, 325 | timestamp: datetime, 326 | db_beacon_id: int, 327 | task_data: Dict, 328 | ) -> None: 329 | operator = task_data.get("operator") 330 | task_uid = task_data.get("uid") 331 | task_name = task_data.get("name") 332 | arguments = task_data.get("arguments") 333 | response = task_data.get("response") 334 | 335 | if event_type == "task_request": 336 | entry_type = EntryType.input 337 | content_parts = [task_name] if task_name else [] 338 | if arguments: 339 | if isinstance(arguments, str): 340 | content_parts.append(arguments) 341 | else: 342 | content_parts.append(json.dumps(arguments)) 343 | content = " ".join(part for part in content_parts if part) 344 | elif event_type == "task_response": 345 | entry_type = EntryType.output 346 | if response is None: 347 | content = "[No Response Content]" 348 | elif isinstance(response, str): 349 | content = response 350 | else: 351 | content = json.dumps(response) 352 | else: 353 | return 354 | 355 | entry_data = { 356 | "timestamp": timestamp, 357 | "timezone": "UTC", 358 | "type": entry_type, 359 | "parent_id": db_beacon_id, 360 | "task_uid": task_uid, 361 | "operator": operator, 362 | "content": content.strip() if content else "", 363 | "ttp": None, 364 | } 365 | 366 | try: 367 | with self.lock: 368 | duplicate = ( 369 | self.session.query(Entry) 370 | .filter( 371 | Entry.timestamp == entry_data["timestamp"], 372 | Entry.parent_id == entry_data["parent_id"], 373 | Entry.type == entry_data["type"], 374 | Entry.task_uid == entry_data["task_uid"], 375 | ) 376 | .first() 377 | ) 378 | if duplicate: 379 | return 380 | 381 | db_entry = Entry(**entry_data) 382 | self.session.add(db_entry) 383 | self.session.commit() 384 | except Exception as exc: 385 | self.session.rollback() 386 | print( 387 | f"Failed to insert Entry for task {task_uid} (Beacon ID {db_beacon_id}): {exc}" 388 | ) 389 | 390 | def close(self) -> None: 391 | if self.session: 392 | self.session.close() 393 | 394 | 395 | if __name__ == "__main__": 396 | sample_logs = [ 397 | os.path.join("logs", "1IL8FMDN.json"), 398 | os.path.join("logs", "JTFHSQLN.json"), 399 | ] 400 | database_path = os.path.join("results", "ost_adapted.db") 401 | 402 | print("Ensure your DB models in sqlite_model.py include Entry.task_uid and Beacon.uid_str before running this script.") 403 | 404 | for log_file in sample_logs: 405 | if os.path.exists(log_file): 406 | OSTLogParserAdapted.parse_ost_log(log_file, database_path, debug=True) 407 | else: 408 | print(f"Log file not found: {log_file}") -------------------------------------------------------------------------------- /modules/parser/cs_parser.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | from typing import List, Dict 4 | from sqlalchemy.orm import exc, relationship 5 | from sqlalchemy import select, and_, Column, DateTime, Integer, String, Enum, ForeignKey 6 | from modules.sql.sqlite_func import init_db 7 | from modules.sql.sqlite_model import EntryType, Beacon, Entry 8 | import threading 9 | from datetime import datetime 10 | 11 | class CSLogParser: 12 | def __init__(self, filepath: str, db_path: str, debug: bool = False): 13 | self.filepath = filepath 14 | # Extract beacon ID from the filename 15 | self.beacon_id = self.extract_beacon_id_from_filename(filepath) 16 | # Extract date from the folder name 17 | self.year_prefix = self.extract_year_prefix_from_filepath(filepath) 18 | # Initialize the database session 19 | session_manager = init_db(db_path, debug) 20 | self.session = session_manager() 21 | # Track the current command and its accumulated output 22 | self.last_command = None 23 | self.current_output = "" 24 | self.is_accumulating_output = False 25 | # Lock for thread-safe database access 26 | self.lock = threading.Lock() 27 | 28 | @staticmethod 29 | def extract_beacon_id_from_filename(filename: str) -> int: 30 | match = re.search(r'beacon_(\d+)', filename) 31 | if match: 32 | return int(match.group(1)) 33 | elif "events" in filename or "downloads" in filename: 34 | return 0 35 | else: 36 | #raise ValueError("Beacon ID could not be extracted from the filename.") 37 | print(f"Beacon ID could not be extracted from the filename: {filename}") 38 | return -1 39 | 40 | @staticmethod 41 | def extract_year_prefix_from_filepath(filepath: str) -> str: 42 | match = re.search(r'(\d{6})', os.path.dirname(filepath)) 43 | if match: 44 | return match.group(1)[:2] 45 | else: 46 | raise ValueError("Year prefix could not be extracted from the folder name.") 47 | 48 | @staticmethod 49 | def parse_beacon_log(filepath: str, db_path: str, debug: bool = False): 50 | if filepath.endswith("weblog_443.log"): 51 | return 52 | parser = CSLogParser(filepath, db_path, debug) 53 | parser.parse() 54 | 55 | @staticmethod 56 | def parse_timestamp(year_prefix: str, timestamp_str: str) -> datetime: 57 | # get the current year 58 | return datetime.strptime(year_prefix + "/" + timestamp_str, "%y/%m/%d %H:%M:%S %Z") 59 | 60 | 61 | def parse(self): 62 | with open(self.filepath, 'r') as file: 63 | for line in file: 64 | current_command = self.parse_line(line) 65 | if current_command and self.is_accumulating_output and current_command['type'] != 'output': 66 | # store the output of the previous command 67 | if self.last_command: 68 | self.store_entry_to_db({'type': 'output', 'timestamp': self.last_command['timestamp'], 'timezone': self.last_command["timezone"], 'content': self.current_output.strip()}) 69 | self.current_output = "" 70 | self.is_accumulating_output = False 71 | self.last_command = current_command 72 | if current_command: 73 | # Handle metadata separately to store or update beacon information 74 | if current_command['type'] == 'metadata': 75 | self.store_beacon_to_db(current_command) 76 | # if new command is found, store the new command and the old output 77 | elif current_command['type'] == 'input': 78 | # store finished entry with its output 79 | if self.is_accumulating_output: 80 | self.store_entry_to_db({'type': 'output', 'timestamp': self.last_command['timestamp'], 'timezone': self.last_command["timezone"], 'content': self.current_output.strip()}) 81 | self.current_output = "" 82 | self.is_accumulating_output = False 83 | # if self.current_output: 84 | # self.store_entry_to_db({'type': 'output', 'timestamp': self.current_command['timestamp'], 'timezone': self.current_command["timezone"], 'content': self.current_output.strip()}) 85 | 86 | self.store_entry_to_db(current_command) 87 | # Reset for the new command 88 | self.last_command = current_command 89 | elif current_command['type'] == 'output' or current_command['type'] == 'received_output' or current_command['type'] == 'error': 90 | # Accumulate output for the current command 91 | self.is_accumulating_output = True 92 | self.current_output += current_command['content'] 93 | else: 94 | # Store any other type of entry immediately 95 | if self.last_command and self.current_output: 96 | self.store_entry_to_db({'type': 'output', 'timestamp': self.last_command['timestamp'], 'timezone': self.last_command["timezone"], 'content': self.current_output.strip()}) 97 | self.last_command = None 98 | self.current_output = "" 99 | self.is_accumulating_output = False 100 | self.store_entry_to_db(current_command) 101 | else: 102 | # add the output to the current command 103 | if self.is_accumulating_output: 104 | self.current_output += line 105 | elif re.match(r'^\s*$', line): 106 | continue 107 | elif "events.log" in self.filepath: 108 | pass 109 | else: 110 | print(f"Could not parse {self.filepath} - {line}") 111 | # Last line of the file: Store the last command of the file and its output if applicable 112 | if self.current_output: 113 | if self.last_command: 114 | self.store_entry_to_db({'type': 'output', 'timestamp': self.last_command['timestamp'], 'timezone': self.last_command["timezone"], 'content': self.current_output.strip()}) 115 | if current_command: 116 | self.store_entry_to_db({'type': 'output', 'timestamp': current_command['timestamp'], 'timezone': current_command["timezone"], 'content': self.current_output.strip()}) 117 | 118 | def parse_line(self, line: str) -> Dict: 119 | # Regular expressions for different log formats 120 | metadata_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[metadata\] (?P[\w\.\_]+) (?P<-|->) (?P[\d\.]+); computer: (?P.*?); user: (?P.*?); process: (?P.*?); pid: (?P\d+); os: (?P.*?); version: (?P.*?); build: (?P.*?); beacon arch: (?P.*)') 121 | input_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[input\] <(?P.*?)> (?P.*)') 122 | output_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[output\](?P.*)') 123 | task_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[task\] <(?P.*?)> (?P.*)') 124 | checkin_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[checkin\] host called home, sent: (?P\d+) bytes') 125 | received_output_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[output\]\s*received output:') 126 | download_pattern = re.compile(r'^(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+))\t(?P[\d\.]+)\t(?P\d+)\t(?P\d+)\t(?P[^\t]+)\t(?P[^\t]+)\t(?P[^\t]*)\r?\n') 127 | job_registered_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[job_registered\] job registered with id (?P\d+)') 128 | job_completed_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[job_completed\] job (?P\d+) completed') 129 | indicator_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[indicator\] (?Pfile: (?P\w+) (?P\d+) bytes (?P.+))') 130 | event_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \*\*\* (?P.*)') 131 | error_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[error\] (?P.*)') 132 | note_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[note\] (?P.*)') 133 | warning_pattern = re.compile(r'(?P\d{2}/\d{2} \d{2}:\d{2}:\d{2} (?P\w+)) \[warning\] (?P.*)') 134 | 135 | metadata_match = metadata_pattern.match(line) 136 | input_match = input_pattern.match(line) 137 | output_match = output_pattern.match(line) 138 | task_match = task_pattern.match(line) 139 | checkin_match = checkin_pattern.match(line) 140 | received_output_match = received_output_pattern.match(line) 141 | event_match = event_pattern.match(line) 142 | download_match = download_pattern.match(line) 143 | error_match = error_pattern.match(line) 144 | job_registered_match = job_registered_pattern.match(line) 145 | job_completed_match = job_completed_pattern.match(line) 146 | indicator_match = indicator_pattern.match(line) 147 | note_match = note_pattern.match(line) 148 | warning_match = warning_pattern.match(line) 149 | 150 | if metadata_match: 151 | return { 152 | 'type': 'metadata', 153 | 'timestamp': self.parse_timestamp(self.year_prefix, metadata_match.group('timestamp')), 154 | 'ip': metadata_match.group('ip_int'), 155 | 'ip_ext': metadata_match.group('ip_ext'), 156 | 'hostname': metadata_match.group('hostname'), 157 | 'user': metadata_match.group('user'), 158 | 'process': metadata_match.group('process'), 159 | 'pid': metadata_match.group('pid'), 160 | 'os': metadata_match.group('os'), 161 | 'version': metadata_match.group('version'), 162 | 'build': metadata_match.group('build'), 163 | 'arch': metadata_match.group('arch'), 164 | } 165 | elif input_match: 166 | return { 167 | 'type': 'input', 168 | 'timestamp': self.parse_timestamp(self.year_prefix, input_match.group('timestamp')), 169 | 'timezone': input_match.group('timezone'), 170 | 'operator': input_match.group('operator'), 171 | 'content': input_match.group('command'), 172 | } 173 | elif output_match: 174 | return { 175 | 'type': 'output', 176 | 'timestamp': self.parse_timestamp(self.year_prefix, output_match.group('timestamp')), 177 | 'timezone': output_match.group('timezone'), 178 | 'content': output_match.group('output').strip(), 179 | } 180 | elif task_match: 181 | return { 182 | 'type': 'task', 183 | 'timestamp': self.parse_timestamp(self.year_prefix, task_match.group('timestamp')), 184 | 'timezone': task_match.group('timezone'), 185 | 'ttp': task_match.group('operator'), 186 | 'content': task_match.group('task_description'), 187 | } 188 | elif checkin_match: 189 | return { 190 | 'type': 'checkin', 191 | 'timestamp': self.parse_timestamp(self.year_prefix, checkin_match.group('timestamp')), 192 | 'timezone': checkin_match.group('timezone'), 193 | 'content': checkin_match.group('bytes_sent'), 194 | } 195 | elif received_output_match: 196 | return { 197 | 'type': 'received_output', 198 | 'timestamp': self.parse_timestamp(self.year_prefix, received_output_match.group('timestamp')), 199 | 'timezone': received_output_match.group('timezone'), 200 | } 201 | elif event_match: 202 | return { 203 | 'type': 'event', 204 | 'timestamp': self.parse_timestamp(self.year_prefix, event_match.group('timestamp')), 205 | 'timezone': event_match.group('timezone'), 206 | 'content': event_match.group('event_description').strip(), 207 | } 208 | elif download_match: 209 | return { 210 | 'type': 'download', 211 | 'timestamp': self.parse_timestamp(self.year_prefix, download_match.group('timestamp')), 212 | 'timezone': download_match.group('timezone'), 213 | 'content': "IP: {}, File: {}{}, Size: {}".format(download_match.group('source_ip'), download_match.group('local_path'), download_match.group('file_name'), download_match.group('size')), 214 | #'content': download_match.group('content').strip(), 215 | # 'source_ip': download_match.group('source_ip'), 216 | # 'session_id': download_match.group('session_id'), 217 | # 'size': download_match.group('size'), 218 | # 'server_path': download_match.group('server_path'), 219 | # 'file_name': download_match.group('file_name'), 220 | # 'local_path': download_match.group('local_path'), 221 | } 222 | elif error_match: 223 | return { 224 | 'type': 'error', 225 | 'timestamp': self.parse_timestamp(self.year_prefix, error_match.group('timestamp')), 226 | 'timezone': error_match.group('timezone'), 227 | 'content': error_match.group('error_message').strip(), 228 | } 229 | elif job_registered_match: 230 | return { 231 | 'type': 'job_registered', 232 | 'timestamp': self.parse_timestamp(self.year_prefix, job_registered_match.group('timestamp')), 233 | 'timezone': job_registered_match.group('timezone'), 234 | 'content': job_registered_match.group('job_id').strip(), 235 | } 236 | elif job_completed_match: 237 | return { 238 | 'type': 'job_completed', 239 | 'timestamp': self.parse_timestamp(self.year_prefix, job_completed_match.group('timestamp')), 240 | 'timezone': job_completed_match.group('timezone'), 241 | 'content': job_completed_match.group('job_id').strip(), 242 | } 243 | elif indicator_match: 244 | return { 245 | 'type': 'indicator', 246 | 'timestamp': self.parse_timestamp(self.year_prefix, indicator_match.group('timestamp')), 247 | 'timezone': indicator_match.group('timezone'), 248 | 'content': "MD5: {}, File: {}, Size: {}".format(indicator_match.group('file_hash'), indicator_match.group('file_path'), indicator_match.group('file_size')), 249 | #'content': indicator_match.group('content').strip(), 250 | # 'file_hash': indicator_match.group('file_hash'), 251 | # 'file_size': indicator_match.group('file_size'), 252 | # 'file_path': indicator_match.group('file_path').strip(), 253 | } 254 | elif note_match: 255 | return { 256 | 'type': 'note', 257 | 'timestamp': self.parse_timestamp(self.year_prefix, note_match.group('timestamp')), 258 | 'timezone': note_match.group('timezone'), 259 | 'content': note_match.group('note_message').strip(), 260 | } 261 | elif warning_match: 262 | return { 263 | 'type': 'note', 264 | 'timestamp': self.parse_timestamp(self.year_prefix, warning_match.group('timestamp')), 265 | 'timezone': warning_match.group('timezone'), 266 | 'content': warning_match.group('warning_message').strip(), 267 | } 268 | return None 269 | 270 | def store_entry_to_db(self, entry_data: Dict): 271 | entry_type = EntryType[entry_data['type']] 272 | entry_data['parent_id'] = self.beacon_id 273 | try: 274 | # Sanity check to avoid adding duplicate entries 275 | with self.lock: 276 | existing_entry = self.session.query(Entry).filter_by( 277 | timestamp=entry_data['timestamp'], 278 | timezone=entry_data['timezone'], 279 | type=entry_type, 280 | parent_id=self.beacon_id, 281 | content=entry_data['content'] 282 | ).one_or_none() 283 | 284 | if existing_entry is None: 285 | entry = Entry(**entry_data) 286 | self.session.add(entry) 287 | else: 288 | # update the entry object 289 | existing_entry.ttp = entry_data['ttp'] if 'ttp' in entry_data else None 290 | existing_entry.operator = entry_data['operator'] if 'operator' in entry_data else None 291 | existing_entry.content = entry_data['content'] if 'content' in entry_data else None 292 | self.session.add(existing_entry) 293 | 294 | self.session.commit() 295 | except Exception as e: 296 | self.session.rollback() 297 | print(f"Failed to insert log entry: {e}") 298 | 299 | def store_beacon_to_db(self, metadata: Dict): 300 | #remove type from metadata 301 | metadata.pop('type', None) 302 | try: 303 | # Sanity check to avoid adding duplicate beacons 304 | with self.lock: 305 | existing_beacon = self.session.query(Beacon).filter_by( 306 | id=self.beacon_id 307 | ).one_or_none() 308 | 309 | if existing_beacon is None: 310 | beacon = Beacon(**metadata, id=self.beacon_id) 311 | self.session.add(beacon) 312 | else: 313 | existing_beacon.ip = metadata['ip'] 314 | existing_beacon.ip_ext = metadata['ip_ext'] 315 | existing_beacon.hostname = metadata['hostname'] 316 | existing_beacon.user = metadata['user'] 317 | existing_beacon.process = metadata['process'] 318 | existing_beacon.pid = metadata['pid'] 319 | existing_beacon.os = metadata['os'] 320 | existing_beacon.version = metadata['version'] 321 | existing_beacon.build = metadata['build'] 322 | existing_beacon.arch = metadata['arch'] 323 | existing_beacon.timestamp = metadata['timestamp'] 324 | self.session.add(existing_beacon) 325 | 326 | self.session.commit() 327 | except Exception as e: 328 | self.session.rollback() 329 | print(f"Failed to insert or update beacon: {e}") 330 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------