├── scripts ├── __init__.py ├── lib │ ├── __init__.py │ ├── global_vars.py │ ├── state.py │ ├── step_state.py │ ├── alerts.py │ ├── util_user.py │ ├── util_file.py │ ├── util.py │ ├── util_utmp.py │ ├── util_lastlog.py │ └── util_module.py ├── config │ ├── __init__.py │ ├── test_alert.py │ ├── search_dev_shm.py │ ├── monitor_hosts_file.py │ ├── monitor_ld_preload.py │ ├── search_deleted_exe.py │ ├── search_memfd_create.py │ ├── monitor_ssh_authorized_keys.py │ ├── search_ssh_leftover_processes.py │ ├── search_non_kthreads.py │ ├── monitor_modules.py │ ├── search_tainted_modules.py │ ├── verify_deb_packages.py │ ├── search_lastlog_in_utmp.py │ ├── search_utmp_tampering.py │ ├── monitor_cron.py │ ├── monitor_passwd.py │ ├── config.py │ ├── monitor_systemd_units.py │ ├── search_hidden_exe.py │ └── search_immutable_files.py ├── test_alert.py ├── verify_deb_packages.py ├── search_non_kthreads.py ├── search_memfd_create.py ├── search_ssh_leftover_processes.py ├── monitor_ld_preload.py ├── search_dev_shm.py ├── monitor_modules.py ├── search_tainted_modules.py ├── search_deleted_exe.py ├── monitor_hosts_file.py ├── search_hidden_exe.py ├── search_lastlog_in_utmp.py ├── search_immutable_files.py ├── monitor_ssh_authorized_keys.py ├── monitor_systemd_units.py ├── search_utmp_tampering.py └── monitor_cron.py ├── requirements.txt ├── .gitignore ├── .github └── FUNDING.yml ├── test ├── resources │ ├── lastlog │ ├── random │ ├── wtmp_benign │ ├── wtmp_benign2 │ └── passwd ├── test_lib_util_utmp.py ├── test_lib_util_lastlog.py ├── test_lib_util_user.py ├── test_search_deleted_exe.py ├── test_lib_util_module.py ├── test_search_lastlog_in_utmp.py ├── test_search_tainted_modules.py └── test_search_utmp_tampering.py ├── LICENSE ├── README.md └── start_search.py /scripts/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /scripts/lib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /scripts/config/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | python-dateutil~=2.9.0 2 | -------------------------------------------------------------------------------- /scripts/lib/global_vars.py: -------------------------------------------------------------------------------- 1 | SUPPRESS_OUTPUT = False 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .git 2 | .idea 3 | .venv 4 | **/__pycache__ 5 | scripts/state -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: sqall01 2 | patreon: sqall 3 | custom: ["paypal.me/sqall"] 4 | -------------------------------------------------------------------------------- /scripts/config/test_alert.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = False 3 | -------------------------------------------------------------------------------- /test/resources/lastlog: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqall01/LSMS/HEAD/test/resources/lastlog -------------------------------------------------------------------------------- /test/resources/random: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqall01/LSMS/HEAD/test/resources/random -------------------------------------------------------------------------------- /scripts/config/search_dev_shm.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | -------------------------------------------------------------------------------- /scripts/config/monitor_hosts_file.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | -------------------------------------------------------------------------------- /scripts/config/monitor_ld_preload.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | -------------------------------------------------------------------------------- /scripts/config/search_deleted_exe.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | -------------------------------------------------------------------------------- /scripts/config/search_memfd_create.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | -------------------------------------------------------------------------------- /test/resources/wtmp_benign: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqall01/LSMS/HEAD/test/resources/wtmp_benign -------------------------------------------------------------------------------- /test/resources/wtmp_benign2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sqall01/LSMS/HEAD/test/resources/wtmp_benign2 -------------------------------------------------------------------------------- /scripts/config/monitor_ssh_authorized_keys.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | -------------------------------------------------------------------------------- /scripts/config/search_ssh_leftover_processes.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | -------------------------------------------------------------------------------- /scripts/config/search_non_kthreads.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # List of process names that are ignored. 4 | NON_KTHREAD_WHITELIST = [] # type: List[str] 5 | 6 | # Is the script allowed to run or not? 7 | ACTIVATED = True 8 | -------------------------------------------------------------------------------- /scripts/config/monitor_modules.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # List of modules that are loaded and should be ignored. 4 | MODULES_WHITELIST = [] # type: List[str] 5 | 6 | # Is the script allowed to run or not? 7 | ACTIVATED = True 8 | -------------------------------------------------------------------------------- /scripts/config/search_tainted_modules.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # List of modules that are loaded and should be ignored. 4 | MODULES_WHITELIST = [] # type: List[str] 5 | 6 | # Is the script allowed to run or not? 7 | ACTIVATED = True 8 | -------------------------------------------------------------------------------- /scripts/config/verify_deb_packages.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # Executable of debsums. 4 | DEBSUMS_EXE = "/usr/bin/debsums" 5 | 6 | # List of changed deb package files to ignore. 7 | FILE_WHITELIST = [] # type: List[str] 8 | 9 | # Is the script allowed to run or not? 10 | ACTIVATED = True 11 | -------------------------------------------------------------------------------- /scripts/config/search_lastlog_in_utmp.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | 4 | # File locations of utmp files 5 | UTMP_FILE_LOCATIONS = ["/var/run/utmp", "/var/log/wtmp", "/var/log/wtmp.1"] # type: List[str] 6 | 7 | # File location of lastlog file 8 | LASTLOG_FILE_LOCATION = "/var/log/lastlog" # type: str 9 | 10 | # File location of passwd file 11 | PASSWD_FILE_LOCATION = "/etc/passwd" # type: str 12 | -------------------------------------------------------------------------------- /scripts/config/search_utmp_tampering.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from typing import List 3 | 4 | # Is the script allowed to run or not? 5 | ACTIVATED = True 6 | 7 | # File locations of utmp files 8 | UTMP_FILE_LOCATIONS = ["/var/run/utmp", "/var/log/wtmp", "/var/log/wtmp.1", "/var/log/btmp"] # type: List[str] 9 | 10 | # Oldest allowed timestamp entry in utmp file 11 | UTMP_OLDEST_ENTRY = datetime.datetime.now() - datetime.timedelta(days=3650) # type: datetime.datetime 12 | -------------------------------------------------------------------------------- /scripts/config/monitor_cron.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # Is the script allowed to run or not? 4 | ACTIVATED = True 5 | 6 | # Directory in which cron stores crontab files for individual users. Following is the default on Ubuntu/Debian. 7 | USER_CRONTAB_DIR = "/var/spool/cron/crontabs/" 8 | 9 | # Directories in which scripts can be placed that are executed by cron. Following list are the defaults on Ubuntu/Debian. 10 | CRON_SCRIPT_DIRS = ["/etc/cron.daily", "/etc/cron.hourly", "/etc/cron.monthly", "/etc/cron.weekly"] # type: List[str] 11 | -------------------------------------------------------------------------------- /scripts/config/monitor_passwd.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Set 2 | 3 | # Is the script allowed to run or not? 4 | ACTIVATED = True 5 | 6 | # Shells that do not allow interactive login 7 | SHELL_NO_LOGIN = {"/usr/bin/false", 8 | "/bin/false", 9 | "/sbin/nologin", 10 | "/usr/sbin/nologin", 11 | "/usr/bin/nologin", 12 | "/bin/nologin"} # type: Set[str] 13 | 14 | # Service accounts that are allowed to have an interactive shell 15 | SERVICE_ACCOUNT_SHELL_WHITELIST = {"sync": {"/bin/sync"}} # type: Dict[str, Set[str]] -------------------------------------------------------------------------------- /scripts/config/config.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | # NOTE: If no "AlertR alert settings" and "Mail alert settings" are set to 4 | # None, each script will fall back to print its output. 5 | 6 | # AlertR alert settings. 7 | ALERTR_FIFO = None # type: Optional[str] 8 | 9 | # Mail alert settings. 10 | FROM_ADDR = None # type: Optional[str] 11 | TO_ADDR = None # type: Optional[str] 12 | 13 | # Directory to hold states in. Defaults to "/tmp" if not set. 14 | STATE_DIR = "state" 15 | 16 | # If "start_search.py" is used to execute all scripts, this setting configures 17 | # the time in seconds before a script times out. 18 | START_PROCESS_TIMEOUT = 60 19 | -------------------------------------------------------------------------------- /scripts/config/monitor_systemd_units.py: -------------------------------------------------------------------------------- 1 | # Is the script allowed to run or not? 2 | ACTIVATED = True 3 | 4 | # Directories in which systemd unit files can be placed. Following list are the defaults on Ubuntu/Debian. 5 | SYSTEMD_UNIT_DIRS = ["/etc/systemd/system", 6 | "/etc/systemd/user", 7 | "/etc/systemd/network", 8 | "/usr/lib/systemd/system", 9 | "/usr/lib/systemd/user", 10 | "/usr/lib/systemd/network", 11 | "/usr/local/lib/systemd/system", 12 | "/usr/local/lib/systemd/user" 13 | "/usr/local/lib/systemd/network", 14 | "/lib/systemd/system", 15 | "/lib/systemd/user", 16 | "/lib/systemd/network"] # type: List[str] 17 | -------------------------------------------------------------------------------- /scripts/config/search_hidden_exe.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # List of directories to search for hidden ELF files. Defaults to "/". 4 | SEARCH_LOCATIONS = [] # type: List[str] 5 | 6 | # To prevent a timeout if this script is run regularly for monitoring, 7 | # the search can be done in steps for each location given in SEARCH_LOCATIONS. 8 | # Steps mean if you have location_A, the first execution of this script will 9 | # process location_A non-recursively and terminates, 10 | # the second execution will process location_A/subdir_A recursively and terminates, 11 | # the third execution will process location_A/subdir_B recursively and terminates and so on. 12 | # After all subdirectories where processed, the subsequent execution will begin again with location_A non-recursively. 13 | SEARCH_IN_STEPS = False 14 | 15 | # List of directories to ignore. 16 | HIDDEN_EXE_DIRECTORY_WHITELIST = [] # type: List[str] 17 | 18 | # List of hidden ELF files to ignore. 19 | HIDDEN_EXE_FILE_WHITELIST = [] # type: List[str] 20 | 21 | # Is the script allowed to run or not? 22 | ACTIVATED = True 23 | -------------------------------------------------------------------------------- /scripts/config/search_immutable_files.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | # List of directories to search for immutablle files. Defaults to "/". 4 | SEARCH_LOCATIONS = [] # type: List[str] 5 | 6 | # To prevent a timeout if this script is run regularly for monitoring, 7 | # the search can be done in steps for each location given in SEARCH_LOCATIONS. 8 | # Steps mean if you have location_A, the first execution of this script will 9 | # process location_A non-recursively and terminates, 10 | # the second execution will process location_A/subdir_A recursively and terminates, 11 | # the third execution will process location_A/subdir_B recursively and terminates and so on. 12 | # After all subdirectories where processed, the subsequent execution will begin again with location_A non-recursively. 13 | SEARCH_IN_STEPS = False 14 | 15 | # List of directories to ignore. 16 | IMMUTABLE_DIRECTORY_WHITELIST = [] # type: List[str] 17 | 18 | # List of immutable files to ignore. 19 | IMMUTABLE_FILE_WHITELIST = [] # type: List[str] 20 | 21 | # Is the script allowed to run or not? 22 | ACTIVATED = True 23 | -------------------------------------------------------------------------------- /scripts/test_alert.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | If scripts are executed via cronjob, this script helps to check if the alert functions work. 13 | 14 | Requirements: 15 | None 16 | """ 17 | 18 | import sys 19 | from lib.util import output_finding 20 | 21 | # Read configuration. 22 | try: 23 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR 24 | from config.test_alert import ACTIVATED 25 | except: 26 | ALERTR_FIFO = None 27 | FROM_ADDR = None 28 | TO_ADDR = None 29 | ACTIVATED = False 30 | 31 | 32 | if __name__ == '__main__': 33 | is_init_run = False 34 | if len(sys.argv) == 2: 35 | if sys.argv[1] == "--init": 36 | is_init_run = True 37 | 38 | # Script does not need to establish a state. 39 | if not is_init_run: 40 | if ACTIVATED: 41 | message = "Alert test." 42 | output_finding(__file__, message) 43 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Andre Pawlowski 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /scripts/lib/state.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import stat 4 | from typing import Dict, Any 5 | 6 | 7 | class StateException(Exception): 8 | pass 9 | 10 | 11 | def load_state(state_dir: str) -> Dict[str, Any]: 12 | state_file = os.path.join(state_dir, "state") 13 | state_data = {} 14 | if os.path.isfile(state_file): 15 | data = None 16 | try: 17 | with open(state_file, 'rt') as fp: 18 | data = fp.read() 19 | if data is None: 20 | raise StateException("Read state data is None.") 21 | 22 | state_data = json.loads(data) 23 | 24 | except Exception as e: 25 | raise StateException("State data: '%s'; Exception: '%s'" % (str(data), str(e))) 26 | 27 | return state_data 28 | 29 | 30 | def store_state(state_dir: str, state_data: Dict[str, Any]): 31 | # Create state dir if it does not exist. 32 | if not os.path.exists(state_dir): 33 | os.makedirs(state_dir) 34 | 35 | state_file = os.path.join(state_dir, "state") 36 | 37 | with open(state_file, 'wt') as fp: 38 | fp.write(json.dumps(state_data)) 39 | 40 | os.chmod(state_file, stat.S_IREAD | stat.S_IWRITE) 41 | 42 | 43 | -------------------------------------------------------------------------------- /scripts/lib/step_state.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import stat 4 | from typing import Dict, Any 5 | 6 | from .state import StateException 7 | from .util_file import FileLocation 8 | 9 | 10 | class StepLocation(FileLocation): 11 | def __init__(self, location: str, search_recursive: bool): 12 | super().__init__(location) 13 | self._search_recursive = search_recursive 14 | 15 | @property 16 | def search_recursive(self) -> bool: 17 | return self._search_recursive 18 | 19 | 20 | class StepStateException(StateException): 21 | def __init__(self, msg: str): 22 | super().__init__(msg) 23 | 24 | 25 | def load_step_state(state_dir: str) -> Dict[str, Any]: 26 | state_file = os.path.join(state_dir, "step_state") 27 | state_data = {"next_step": 0} 28 | if os.path.isfile(state_file): 29 | data = None 30 | try: 31 | with open(state_file, 'rt') as fp: 32 | data = fp.read() 33 | if data is None: 34 | raise StepStateException("Read state data is None.") 35 | 36 | state_data = json.loads(data) 37 | 38 | except Exception as e: 39 | raise StepStateException("State data: '%s'; Exception: '%s'" % (str(data), str(e))) 40 | 41 | return state_data 42 | 43 | 44 | def store_step_state(state_dir: str, state_data: Dict[str, Any]): 45 | # Create state dir if it does not exist. 46 | if not os.path.exists(state_dir): 47 | os.makedirs(state_dir) 48 | 49 | state_file = os.path.join(state_dir, "step_state") 50 | 51 | with open(state_file, 'wt') as fp: 52 | fp.write(json.dumps(state_data)) 53 | 54 | os.chmod(state_file, stat.S_IREAD | stat.S_IWRITE) 55 | -------------------------------------------------------------------------------- /scripts/lib/alerts.py: -------------------------------------------------------------------------------- 1 | import json 2 | import smtplib 3 | import os 4 | import time 5 | from typing import Dict, Any 6 | 7 | 8 | def raise_alert_alertr(alertr_fifo: str, 9 | optional_data_dict: Dict[str, Any]): 10 | # Send message to AlertR. 11 | msg_dict = dict() 12 | msg_dict["message"] = "sensoralert" 13 | 14 | payload_dict = dict() 15 | payload_dict["state"] = 1 16 | payload_dict["dataType"] = 0 17 | payload_dict["data"] = {} 18 | payload_dict["hasLatestData"] = False 19 | payload_dict["changeState"] = False 20 | payload_dict["hasOptionalData"] = True 21 | payload_dict["optionalData"] = optional_data_dict 22 | msg_dict["payload"] = payload_dict 23 | 24 | for i in range(10): 25 | try: 26 | # Will throw an exception if FIFO file does not have a reader instead of blocking. 27 | fd = os.open(alertr_fifo, os.O_WRONLY | os.O_NONBLOCK) 28 | os.write(fd, (json.dumps(msg_dict) + "\n").encode("ascii")) 29 | os.close(fd) 30 | # Give AlertR sensor time to process the data. 31 | # Otherwise, a parsing error might occur on the FIFO sensor when multiple messages were mixed. 32 | time.sleep(2) 33 | break 34 | 35 | except Exception: 36 | time.sleep(5) 37 | 38 | 39 | def raise_alert_mail(from_addr: str, 40 | to_addr: str, 41 | subject: str, 42 | message: str): 43 | 44 | email_header = "From: %s\r\nTo: %s\r\nSubject: %s\r\n" \ 45 | % (from_addr, to_addr, subject) 46 | 47 | for i in range(10): 48 | try: 49 | smtp_server = smtplib.SMTP("127.0.0.1", 25) 50 | smtp_server.sendmail(from_addr, 51 | to_addr, 52 | email_header + message) 53 | smtp_server.quit() 54 | break 55 | 56 | except Exception: 57 | time.sleep(5) 58 | -------------------------------------------------------------------------------- /scripts/verify_deb_packages.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Use `debsums` to verify the integrity of installed deb packages using /var/lib/dpkg/info/*.md5sums. 13 | 14 | Requirements: 15 | `debsums` installed on system 16 | 17 | Reference: 18 | https://www.sandflysecurity.com/blog/detecting-linux-binary-file-poisoning/ 19 | """ 20 | 21 | import os 22 | import sys 23 | from typing import List 24 | 25 | from lib.util import output_finding 26 | 27 | # Read configuration. 28 | try: 29 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR 30 | from config.verify_deb_packages import ACTIVATED, DEBSUMS_EXE, FILE_WHITELIST 31 | except: 32 | ALERTR_FIFO = None 33 | FROM_ADDR = None 34 | TO_ADDR = None 35 | DEBSUMS_EXE = "debsums" 36 | FILE_WHITELIST = [] 37 | ACTIVATED = True 38 | 39 | 40 | def _process_whitelist(changed_files: List[str]) -> List[str]: 41 | if not FILE_WHITELIST: 42 | return changed_files 43 | 44 | new_changed_files = [] 45 | for changed_file in changed_files: 46 | if changed_file in FILE_WHITELIST: 47 | continue 48 | new_changed_files.append(changed_file) 49 | 50 | return new_changed_files 51 | 52 | 53 | def verify_deb_packages(): 54 | 55 | # Decide where to output results. 56 | print_output = False 57 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 58 | print_output = True 59 | 60 | if not ACTIVATED: 61 | if print_output: 62 | print("Module deactivated.") 63 | return 64 | 65 | fd = os.popen("%s -c 2> /dev/null" % DEBSUMS_EXE) 66 | output_raw = fd.read().strip() 67 | fd.close() 68 | 69 | if output_raw != "": 70 | changed_files = output_raw.split("\n") 71 | 72 | changed_files = _process_whitelist(changed_files) 73 | 74 | if changed_files: 75 | message = "Changed deb package files found.\n\n" 76 | message += "\n".join(["File: %s" % x for x in changed_files]) 77 | 78 | output_finding(__file__, message) 79 | 80 | 81 | if __name__ == '__main__': 82 | is_init_run = False 83 | if len(sys.argv) == 2: 84 | if sys.argv[1] == "--init": 85 | is_init_run = True 86 | 87 | # Script does not need to establish a state. 88 | if not is_init_run: 89 | verify_deb_packages() 90 | -------------------------------------------------------------------------------- /test/resources/passwd: -------------------------------------------------------------------------------- 1 | root:x:0:0:root:/root:/bin/bash 2 | daemon:x:1:1:daemon:/usr/sbin:/usr/sbin/nologin 3 | bin:x:2:2:bin:/bin:/usr/sbin/nologin 4 | sys:x:3:3:sys:/dev:/usr/sbin/nologin 5 | sync:x:4:65534:sync:/bin:/bin/sync 6 | games:x:5:60:games:/usr/games:/usr/sbin/nologin 7 | man:x:6:12:man:/var/cache/man:/usr/sbin/nologin 8 | lp:x:7:7:lp:/var/spool/lpd:/usr/sbin/nologin 9 | mail:x:8:8:mail:/var/mail:/usr/sbin/nologin 10 | news:x:9:9:news:/var/spool/news:/usr/sbin/nologin 11 | uucp:x:10:10:uucp:/var/spool/uucp:/usr/sbin/nologin 12 | proxy:x:13:13:proxy:/bin:/usr/sbin/nologin 13 | www-data:x:33:33:www-data:/var/www:/usr/sbin/nologin 14 | backup:x:34:34:backup:/var/backups:/usr/sbin/nologin 15 | list:x:38:38:Mailing List Manager:/var/list:/usr/sbin/nologin 16 | irc:x:39:39:ircd:/run/ircd:/usr/sbin/nologin 17 | _apt:x:42:65534::/nonexistent:/usr/sbin/nologin 18 | nobody:x:65534:65534:nobody:/nonexistent:/usr/sbin/nologin 19 | systemd-network:x:998:998:systemd Network Management:/:/usr/sbin/nologin 20 | systemd-timesync:x:996:996:systemd Time Synchronization:/:/usr/sbin/nologin 21 | dhcpcd:x:100:65534:DHCP Client Daemon,,,:/usr/lib/dhcpcd:/bin/false 22 | messagebus:x:101:101::/nonexistent:/usr/sbin/nologin 23 | syslog:x:102:102::/nonexistent:/usr/sbin/nologin 24 | systemd-resolve:x:991:991:systemd Resolver:/:/usr/sbin/nologin 25 | uuidd:x:103:103::/run/uuidd:/usr/sbin/nologin 26 | usbmux:x:104:46:usbmux daemon,,,:/var/lib/usbmux:/usr/sbin/nologin 27 | tss:x:105:105:TPM software stack,,,:/var/lib/tpm:/bin/false 28 | systemd-oom:x:990:990:systemd Userspace OOM Killer:/:/usr/sbin/nologin 29 | kernoops:x:106:65534:Kernel Oops Tracking Daemon,,,:/:/usr/sbin/nologin 30 | whoopsie:x:107:109::/nonexistent:/bin/false 31 | dnsmasq:x:999:65534:dnsmasq:/var/lib/misc:/usr/sbin/nologin 32 | avahi:x:108:111:Avahi mDNS daemon,,,:/run/avahi-daemon:/usr/sbin/nologin 33 | tcpdump:x:109:112::/nonexistent:/usr/sbin/nologin 34 | sssd:x:110:113:SSSD system user,,,:/var/lib/sss:/usr/sbin/nologin 35 | speech-dispatcher:x:111:29:Speech Dispatcher,,,:/run/speech-dispatcher:/bin/false 36 | cups-pk-helper:x:112:114:user for cups-pk-helper service,,,:/nonexistent:/usr/sbin/nologin 37 | fwupd-refresh:x:989:989:Firmware update daemon:/var/lib/fwupd:/usr/sbin/nologin 38 | saned:x:113:116::/var/lib/saned:/usr/sbin/nologin 39 | geoclue:x:114:117::/var/lib/geoclue:/usr/sbin/nologin 40 | cups-browsed:x:115:114::/nonexistent:/usr/sbin/nologin 41 | hplip:x:116:7:HPLIP system user,,,:/run/hplip:/bin/false 42 | gnome-remote-desktop:x:988:988:GNOME Remote Desktop:/var/lib/gnome-remote-desktop:/usr/sbin/nologin 43 | polkitd:x:987:987:User for polkitd:/:/usr/sbin/nologin 44 | rtkit:x:117:119:RealtimeKit,,,:/proc:/usr/sbin/nologin 45 | colord:x:118:120:colord colour management daemon,,,:/var/lib/colord:/usr/sbin/nologin 46 | gnome-initial-setup:x:119:65534::/run/gnome-initial-setup/:/bin/false 47 | gdm:x:120:121:Gnome Display Manager:/var/lib/gdm3:/bin/false 48 | nm-openvpn:x:121:122:NetworkManager OpenVPN,,,:/var/lib/openvpn/chroot:/usr/sbin/nologin 49 | sqall:x:1000:1000:sqall:/home/sqall:/bin/bash 50 | sshd:x:122:65534::/run/sshd:/usr/sbin/nologin 51 | xrdp:x:123:125::/run/xrdp:/usr/sbin/nologin 52 | -------------------------------------------------------------------------------- /scripts/lib/util_user.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | 4 | class PasswdException(Exception): 5 | pass 6 | 7 | 8 | class SystemUser: 9 | 10 | def __init__(self, 11 | name: str, 12 | password: str, 13 | uid: int, 14 | gid: int, 15 | info: str, 16 | home: str, 17 | shell: str): 18 | self._name = name 19 | self._password = password 20 | self._uid = uid 21 | self._gid = gid 22 | self._info = info 23 | self._home = home 24 | self._shell = shell 25 | 26 | def __eq__(self, other): 27 | return (hasattr(other, "name") 28 | and self.name == other.name 29 | and hasattr(other, "password") 30 | and self.password == other.password 31 | and hasattr(other, "uid") 32 | and self.uid == other.uid 33 | and hasattr(other, "gid") 34 | and self.gid == other.gid 35 | and hasattr(other, "info") 36 | and self.info == other.info 37 | and hasattr(other, "home") 38 | and self.home == other.home 39 | and hasattr(other, "shell") 40 | and self.shell == other.shell) 41 | 42 | def __hash__(self): 43 | return hash((self.name, self.password, self.uid, self.gid, self.info, self.home, self.shell)) 44 | 45 | def __str__(self): 46 | return "%s:%s:%d:%d:%s:%s:%s" % (self._name, 47 | self._password, 48 | self._uid, 49 | self._gid, 50 | self._info, 51 | self._home, 52 | self._shell) 53 | 54 | @staticmethod 55 | def from_passwd_line(passwd_line: str): 56 | line_split = passwd_line.split(":") 57 | if len(line_split) != 7: 58 | raise ValueError("Illegal line: %s" % passwd_line) 59 | return SystemUser(line_split[0], 60 | line_split[1], 61 | int(line_split[2]), 62 | int(line_split[3]), 63 | line_split[4], 64 | line_split[5], 65 | line_split[6]) 66 | 67 | @property 68 | def name(self) -> str: 69 | return self._name 70 | 71 | @property 72 | def password(self) -> str: 73 | return self._password 74 | 75 | @property 76 | def uid(self) -> int: 77 | return self._uid 78 | 79 | @property 80 | def gid(self) -> int: 81 | return self._gid 82 | 83 | @property 84 | def info(self) -> str: 85 | return self._info 86 | 87 | @property 88 | def home(self) -> str: 89 | return self._home 90 | 91 | @property 92 | def shell(self) -> str: 93 | return self._shell 94 | 95 | def get_system_users(passwd_file: str = "/etc/passwd") -> List[SystemUser]: 96 | """ 97 | Gets the system's users from /etc/passwd 98 | :return: 99 | """ 100 | user_list = [] 101 | try: 102 | with open(passwd_file, 'rt') as fp: 103 | for line in fp: 104 | if line.strip() == "": 105 | continue 106 | user_list.append(SystemUser.from_passwd_line(line.strip())) 107 | 108 | except Exception as e: 109 | raise PasswdException(str(e)) 110 | 111 | return user_list 112 | -------------------------------------------------------------------------------- /scripts/search_non_kthreads.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Malware will name itself with [brackets] to impersonate a Linux kernel thread. 13 | Any Linux process that looks like a [kernel thread] should have an empty maps file. 14 | 15 | Site note: 16 | when using ps auxwf | grep "\\[" they are children of [kthreadd] 17 | 18 | Requirements: 19 | None 20 | 21 | Reference: 22 | https://twitter.com/CraigHRowland/status/1232399132632813568 23 | https://www.sandflysecurity.com/blog/detecting-linux-kernel-process-masquerading-with-command-line-forensics/ 24 | """ 25 | 26 | import os 27 | import sys 28 | 29 | from lib.util import output_error, output_finding 30 | 31 | # Read configuration. 32 | try: 33 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR 34 | from config.search_non_kthreads import NON_KTHREAD_WHITELIST, ACTIVATED 35 | except: 36 | ALERTR_FIFO = None 37 | FROM_ADDR = None 38 | TO_ADDR = None 39 | NON_KTHREAD_WHITELIST = [] 40 | ACTIVATED = True 41 | 42 | 43 | def search_suspicious_process(): 44 | 45 | # Decide where to output results. 46 | print_output = False 47 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 48 | print_output = True 49 | 50 | if not ACTIVATED: 51 | if print_output: 52 | print("Module deactivated.") 53 | return 54 | 55 | # Iterate over all processes that have a "[". 56 | fd = os.popen("ps auxw | grep \\\\[ | awk '{print $2}'") 57 | pids_raw = fd.read().strip() 58 | fd.close() 59 | for pid in pids_raw.split("\n"): 60 | 61 | # Get process name of pid. 62 | fd = os.popen("ps u -p %s" % pid) 63 | ps_output = fd.read().strip() 64 | fd.close() 65 | 66 | fd = os.popen("ps u -p %s | awk '{$1=$2=$3=$4=$5=$6=$7=$8=$9=$10=\"\"; print $0}'" % pid) 67 | process_name_raw = fd.read().strip() 68 | fd.close() 69 | for process_name in process_name_raw.split("\n"): 70 | process_name = process_name.strip() 71 | # Ignore COMMAND since it is part of the headline of ps output. 72 | if process_name == "COMMAND": 73 | continue 74 | 75 | # Check if we have whitelisted the process 76 | # (e.g., [lxc monitor] /var/lib/lxc satellite). 77 | elif process_name in NON_KTHREAD_WHITELIST: 78 | continue 79 | 80 | # Only consider process names that start with a "[" 81 | # (e.g., "avahi-daemon: running [towelie.local]"" does not) 82 | elif process_name.startswith("["): 83 | 84 | file_path = "/proc/%s/maps" % pid 85 | try: 86 | with open(file_path, 'rt') as fp: 87 | data = fp.read() 88 | if data == "": 89 | continue 90 | 91 | except Exception as e: 92 | output_error(__file__, str(e)) 93 | continue 94 | 95 | message = "Process with pid '%s' suspicious.\n\n" % pid 96 | message += ps_output 97 | output_finding(__file__, message) 98 | 99 | 100 | if __name__ == '__main__': 101 | is_init_run = False 102 | if len(sys.argv) == 2: 103 | if sys.argv[1] == "--init": 104 | is_init_run = True 105 | 106 | # Script does not need to establish a state. 107 | if not is_init_run: 108 | search_suspicious_process() 109 | -------------------------------------------------------------------------------- /scripts/search_memfd_create.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Malware uses calls such as memfd_create() to create an anonymous file in RAM that can be run. 13 | 14 | Requirements: 15 | None 16 | 17 | Reference: 18 | https://www.sandflysecurity.com/blog/detecting-linux-memfd_create-fileless-malware-with-command-line-forensics/ 19 | """ 20 | 21 | import os 22 | import sys 23 | 24 | from lib.state import load_state, store_state 25 | from lib.util import output_finding, output_error 26 | 27 | # Read configuration. 28 | try: 29 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 30 | from config.search_memfd_create import ACTIVATED 31 | MONITORING_MODE = False 32 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 33 | except: 34 | ALERTR_FIFO = None 35 | FROM_ADDR = None 36 | TO_ADDR = None 37 | ACTIVATED = True 38 | MONITORING_MODE = False 39 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 40 | 41 | 42 | def search_deleted_memfd_files(): 43 | 44 | # Decide where to output results. 45 | print_output = False 46 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 47 | print_output = True 48 | 49 | if not ACTIVATED: 50 | if print_output: 51 | print("Module deactivated.") 52 | return 53 | 54 | last_suspicious_exes = [] 55 | if MONITORING_MODE: 56 | try: 57 | stored_data = load_state(STATE_DIR) 58 | if "suspicious_exes" in stored_data.keys(): 59 | last_suspicious_exes = stored_data["suspicious_exes"] 60 | 61 | except Exception as e: 62 | output_error(__file__, str(e)) 63 | return 64 | 65 | # Get all suspicious ELF files. 66 | fd = os.popen("ls -laR /proc/*/exe 2> /dev/null | grep memfd:.*\\(deleted\\)") 67 | suspicious_exe_raw = fd.read().strip() 68 | fd.close() 69 | 70 | current_suspicious_exes = [] 71 | if suspicious_exe_raw.strip(): 72 | current_suspicious_exes.extend(suspicious_exe_raw.strip().split("\n")) 73 | 74 | # Extract new findings 75 | new_suspicious_exes = [] 76 | for current_suspicious_exe in current_suspicious_exes: 77 | if current_suspicious_exe not in last_suspicious_exes: 78 | new_suspicious_exes.append(current_suspicious_exe) 79 | 80 | # Remove stored findings that do no longer exist 81 | for last_suspicious_exe in list(last_suspicious_exes): 82 | if last_suspicious_exe not in current_suspicious_exes: 83 | last_suspicious_exes.remove(last_suspicious_exe) 84 | 85 | if new_suspicious_exes: 86 | message = "Deleted memfd file(s) found:\n\n" 87 | message += "\n".join(new_suspicious_exes) 88 | 89 | output_finding(__file__, message) 90 | 91 | if MONITORING_MODE: 92 | try: 93 | last_suspicious_exes.extend(new_suspicious_exes) 94 | store_state(STATE_DIR, {"suspicious_exes": last_suspicious_exes}) 95 | 96 | except Exception as e: 97 | output_error(__file__, str(e)) 98 | 99 | 100 | if __name__ == '__main__': 101 | is_init_run = False 102 | if len(sys.argv) > 1: 103 | if "--init" in sys.argv: 104 | is_init_run = True 105 | if "--monitoring" in sys.argv: 106 | MONITORING_MODE = True 107 | 108 | # Script does not need to establish a state. 109 | if not is_init_run: 110 | search_deleted_memfd_files() 111 | -------------------------------------------------------------------------------- /scripts/lib/util_file.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from typing import List 4 | 5 | 6 | class FileLocation: 7 | """ 8 | Class that stores a location of a file or directory. 9 | """ 10 | 11 | def __init__(self, location: str): 12 | self._location = location 13 | 14 | @property 15 | def location(self) -> str: 16 | return self._location 17 | 18 | 19 | def apply_directory_whitelist(dir_whitelist: List[FileLocation], files: List[FileLocation]) -> List[FileLocation]: 20 | """ 21 | Applies a whitelist containing directories to the given file list. The whitelist contains directories 22 | that are considered whitelisted. If the whitelist contains the directory "/home" then all files 23 | stored in "/home" are removed from the result (e.g., "/home/user/test.txt"). 24 | 25 | :param dir_whitelist: 26 | :param files: 27 | :return: list of files that do not match whitelist 28 | """ 29 | if not dir_whitelist: 30 | return files 31 | 32 | # Extract the components of the whitelist paths (pre-process it to reduces processing steps). 33 | whitelist_path_components_list = [] 34 | for whitelist_entry in dir_whitelist: 35 | whitelist_path = os.path.normpath(whitelist_entry.location) 36 | whitelist_path_components = [] 37 | while True: 38 | whitelist_path, component = os.path.split(whitelist_path) 39 | if not component: 40 | break 41 | whitelist_path_components.insert(0, component) 42 | whitelist_path_components_list.append(whitelist_path_components) 43 | 44 | new_files = [] 45 | for file in files: 46 | is_whitelisted = False 47 | 48 | # Extract the components of the path to the file. 49 | path = os.path.dirname(os.path.normpath(file.location)) 50 | path_components = [] 51 | while True: 52 | path, component = os.path.split(path) 53 | if not component: 54 | break 55 | path_components.insert(0, component) 56 | 57 | for whitelist_path_components in whitelist_path_components_list: 58 | 59 | # Skip case such as "whitelist: /usr/local/bin" and "file path: /usr" 60 | if len(whitelist_path_components) > len(path_components): 61 | continue 62 | 63 | # NOTE: this check also works if "/" is whitelisted, since the whitelist components are empty and 64 | # thus the file is counted as whitelisted. 65 | is_whitelisted = True 66 | for i in range(len(whitelist_path_components)): 67 | if whitelist_path_components[i] != path_components[i]: 68 | is_whitelisted = False 69 | if is_whitelisted: 70 | break 71 | 72 | if not is_whitelisted: 73 | new_files.append(file) 74 | return new_files 75 | 76 | 77 | def apply_file_whitelist(file_whitelist: List[FileLocation], files: List[FileLocation]) -> List[FileLocation]: 78 | """ 79 | Applies a whitelist containing files to the given file list. The whitelist contains files 80 | that are considered whitelisted. If the whitelist contains the file "/home/user/test.txt" than all occurrences of 81 | this file in the file list will be removed. 82 | 83 | :param file_whitelist: 84 | :param files: 85 | :return: list of files that do not match whitelist 86 | """ 87 | if not file_whitelist: 88 | return files 89 | 90 | new_files = [] 91 | for file in files: 92 | is_whitelisted = False 93 | for whitelist_file in file_whitelist: 94 | if os.path.samefile(file.location, whitelist_file.location): 95 | is_whitelisted = True 96 | break 97 | if not is_whitelisted: 98 | new_files.append(file) 99 | return new_files 100 | -------------------------------------------------------------------------------- /scripts/search_ssh_leftover_processes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Searches for processes that were started by an SSH session that is now disconnected. 13 | 14 | Requirements: 15 | None 16 | 17 | Reference: 18 | https://twitter.com/CraigHRowland/status/1579582776529281026 19 | """ 20 | 21 | import os 22 | import re 23 | import sys 24 | 25 | from lib.util import output_error, output_finding 26 | 27 | # Read configuration. 28 | try: 29 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR 30 | from config.search_ssh_leftover_processes import ACTIVATED 31 | except: 32 | ALERTR_FIFO = None 33 | FROM_ADDR = None 34 | TO_ADDR = None 35 | ACTIVATED = True 36 | 37 | 38 | def search_leftover_ssh_process(): 39 | 40 | # Decide where to output results. 41 | print_output = False 42 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 43 | print_output = True 44 | 45 | if not ACTIVATED: 46 | if print_output: 47 | print("Module deactivated.") 48 | return 49 | 50 | # Search for SSH_CONNECTION and SSH_CLIENT 51 | fd = os.popen("grep -l SSH_C /proc/*/environ 2> /dev/null") 52 | ssh_processes = fd.read().strip() 53 | fd.close() 54 | 55 | for ssh_process in ssh_processes.split("\n"): 56 | # Example output: /proc/996/environ 57 | # noinspection RegExpRedundantEscape 58 | matches = re.search(r'proc/(\d*)/environ', ssh_process, re.IGNORECASE) 59 | if not matches: 60 | continue 61 | 62 | pid = matches.group(1) 63 | 64 | try: 65 | with open("/proc/" + str(pid) + "/status", "r") as fp: 66 | status_data = fp.read() 67 | 68 | except FileNotFoundError: # Process got terminated while searching 69 | continue 70 | 71 | ppid = None 72 | name = None 73 | for line in status_data.split("\n"): 74 | if line.startswith("PPid:"): 75 | line_split = line.split("\t") 76 | 77 | try: 78 | ppid = int(line_split[-1]) 79 | except Exception as e: 80 | output_error(__file__, "PPid not parsable for pid %d\n\n%s" % (pid, status_data)) 81 | break 82 | 83 | elif line.startswith("Name:"): 84 | line_split = line.split("\t") 85 | name = line_split[-1] 86 | 87 | if ppid is not None and name is not None: 88 | break 89 | 90 | if ppid is not None and name is not None: 91 | if ppid == 1: 92 | 93 | # Get executed file 94 | exe_link = "/proc/" + str(pid) + "/exe" 95 | fd = os.popen("ls -laR %s" % exe_link) 96 | exe_raw = fd.read().strip() 97 | fd.close() 98 | matches = re.search(r'/proc/\d*/exe -> (.*)', exe_raw, re.IGNORECASE) 99 | exe_file = exe_raw 100 | if matches: 101 | exe_file = matches.group(1) 102 | 103 | message = "Leftover process of SSH session found.\n\n" 104 | message += "Name: %s\n" % name 105 | message += "Exe: %s\n" % exe_file 106 | message += "Pid: %s\n" % pid 107 | 108 | output_finding(__file__, message) 109 | 110 | 111 | if __name__ == '__main__': 112 | is_init_run = False 113 | if len(sys.argv) == 2: 114 | if sys.argv[1] == "--init": 115 | is_init_run = True 116 | 117 | # Script does not need to establish a state. 118 | if not is_init_run: 119 | search_leftover_ssh_process() 120 | -------------------------------------------------------------------------------- /scripts/lib/util.py: -------------------------------------------------------------------------------- 1 | import difflib 2 | import os 3 | import socket 4 | import traceback 5 | import threading 6 | 7 | from . import global_vars 8 | from .alerts import raise_alert_alertr, raise_alert_mail 9 | 10 | try: 11 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 12 | 13 | except: 14 | ALERTR_FIFO = None 15 | FROM_ADDR = None 16 | TO_ADDR = None 17 | 18 | 19 | def get_diff_per_line(name1: str, data1: str, name2: str, data2: str) -> str: 20 | # difflib function needs trailing newline for each element to build a usable output string 21 | temp1 = ["%s\n" % x for x in data1.split("\n")] 22 | temp2 = ["%s\n" % x for x in data2.split("\n")] 23 | return "".join(difflib.unified_diff(temp1, temp2, fromfile=name1, tofile=name2)) 24 | 25 | 26 | def output_error(file_name: str, msg: str, output_exception: bool = True): 27 | # Suppresses output, for example, if an initialization run is performed. 28 | if global_vars.SUPPRESS_OUTPUT: 29 | return 30 | 31 | base_name = os.path.basename(file_name) 32 | 33 | # Decide where to output results. 34 | print_output = False 35 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 36 | print_output = True 37 | 38 | if output_exception: 39 | msg += "\n\n%s" % traceback.format_exc() 40 | 41 | if print_output: 42 | message = "#" * 80 43 | message += "\nError in '%s':\n%s" % (base_name, msg) 44 | print(message) 45 | 46 | else: 47 | hostname = socket.gethostname() 48 | message = "Error in '%s' on host '%s':\n%s" \ 49 | % (base_name, hostname, msg) 50 | 51 | if ALERTR_FIFO: 52 | optional_data = dict() 53 | optional_data["error"] = True 54 | optional_data["script"] = base_name 55 | optional_data["message"] = message 56 | 57 | threading.Thread(target=raise_alert_alertr, 58 | args=(ALERTR_FIFO, optional_data), 59 | daemon=False).start() 60 | 61 | if FROM_ADDR is not None and TO_ADDR is not None: 62 | mail_subject = "[Security] Error in '%s' on host '%s'" % (base_name, socket.gethostname()) 63 | threading.Thread(target=raise_alert_mail, 64 | args=(FROM_ADDR, TO_ADDR, mail_subject, message), 65 | daemon=False).start() 66 | 67 | 68 | def output_finding(file_name: str, msg: str): 69 | # Suppresses output, for example, if an initialization run is performed. 70 | if global_vars.SUPPRESS_OUTPUT: 71 | return 72 | 73 | base_name = os.path.basename(file_name) 74 | 75 | # Decide where to output results. 76 | print_output = False 77 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 78 | print_output = True 79 | 80 | if print_output: 81 | message = "#" * 80 82 | message += "\nFinding in '%s':\n%s" % (base_name, msg) 83 | 84 | print(message) 85 | 86 | else: 87 | hostname = socket.gethostname() 88 | message = "Finding in '%s' on host '%s':\n%s" \ 89 | % (base_name, hostname, msg) 90 | 91 | if ALERTR_FIFO: 92 | optional_data = dict() 93 | optional_data["finding"] = True 94 | optional_data["script"] = base_name 95 | optional_data["message"] = message 96 | 97 | raise_alert_alertr(ALERTR_FIFO, 98 | optional_data) 99 | 100 | if FROM_ADDR is not None and TO_ADDR is not None: 101 | mail_subject = "[Security] Finding in '%s' on host '%s'" % (base_name, socket.gethostname()) 102 | raise_alert_mail(FROM_ADDR, 103 | TO_ADDR, 104 | mail_subject, 105 | message) 106 | -------------------------------------------------------------------------------- /scripts/monitor_ld_preload.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Monitor /etc/ld.so.preload for changes to detect malicious attempts to alter the control flow of binaries. 13 | 14 | NOTE: The first execution of this script should be done with the argument "--init". 15 | Otherwise, the script will only show you the current state of the environment since no state was established yet. 16 | However, this assumes that the system is uncompromised during the initial execution. 17 | Hence, if you are unsure this is the case you should verify the current state 18 | before monitoring for changes will become an effective security measure. 19 | 20 | Requirements: 21 | None 22 | """ 23 | 24 | import os 25 | import sys 26 | from typing import Set 27 | 28 | import lib.global_vars 29 | from lib.state import load_state, store_state 30 | from lib.util import output_error, output_finding 31 | 32 | # Read configuration. 33 | try: 34 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 35 | from config.monitor_ld_preload import ACTIVATED 36 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 37 | except: 38 | ALERTR_FIFO = None 39 | FROM_ADDR = None 40 | TO_ADDR = None 41 | ACTIVATED = True 42 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 43 | 44 | 45 | def _get_ld_preload() -> Set[str]: 46 | path = "/etc/ld.so.preload" 47 | ld_data = set() 48 | if os.path.isfile(path): 49 | with open(path, 'rt') as fp: 50 | for line in fp: 51 | 52 | if line.strip() == "": 53 | continue 54 | 55 | ld_data.add(line.strip()) 56 | 57 | return ld_data 58 | 59 | 60 | def monitor_ld_preload(): 61 | 62 | # Decide where to output results. 63 | print_output = False 64 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 65 | print_output = True 66 | 67 | if not ACTIVATED: 68 | if print_output: 69 | print("Module deactivated.") 70 | return 71 | 72 | stored_ld_data = set() 73 | try: 74 | state_data = load_state(STATE_DIR) 75 | 76 | # Convert list to set. 77 | if "ld_data" in state_data.keys(): 78 | stored_ld_data = set(state_data["ld_data"]) 79 | 80 | except Exception as e: 81 | output_error(__file__, str(e)) 82 | return 83 | 84 | curr_ld_data = set() 85 | try: 86 | curr_ld_data = _get_ld_preload() 87 | 88 | except Exception as e: 89 | output_error(__file__, str(e)) 90 | return 91 | 92 | # Compare stored data with current one. 93 | for stored_entry in stored_ld_data: 94 | if stored_entry not in curr_ld_data: 95 | message = "LD_PRELOAD entry '%s' was deleted." % stored_entry 96 | 97 | output_finding(__file__, message) 98 | 99 | continue 100 | 101 | # Check new data was added. 102 | for curr_entry in curr_ld_data: 103 | if curr_entry not in stored_ld_data: 104 | message = "LD_PRELOAD entry '%s' was added." % curr_entry 105 | 106 | output_finding(__file__, message) 107 | 108 | try: 109 | # Convert set to list. 110 | state_data = {"ld_data": list(curr_ld_data)} 111 | 112 | store_state(STATE_DIR, state_data) 113 | 114 | except Exception as e: 115 | output_error(__file__, str(e)) 116 | 117 | 118 | if __name__ == '__main__': 119 | if len(sys.argv) == 2: 120 | # Suppress output in our initial execution to establish a state. 121 | if sys.argv[1] == "--init": 122 | lib.global_vars.SUPPRESS_OUTPUT = True 123 | monitor_ld_preload() 124 | -------------------------------------------------------------------------------- /scripts/search_dev_shm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Search for binaries and scripts in /dev/shm. 13 | Malware that tries to hide is often stored there. 14 | 15 | Requirements: 16 | None 17 | 18 | Reference: 19 | https://twitter.com/CraigHRowland/status/1268863172825346050 20 | https://twitter.com/CraigHRowland/status/1269196509079166976 21 | """ 22 | 23 | import os 24 | import sys 25 | 26 | from lib.state import load_state, store_state 27 | from lib.util import output_finding, output_error 28 | 29 | # Read configuration. 30 | try: 31 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 32 | from config.search_dev_shm import ACTIVATED 33 | MONITORING_MODE = False 34 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 35 | except: 36 | ALERTR_FIFO = None 37 | FROM_ADDR = None 38 | TO_ADDR = None 39 | ACTIVATED = True 40 | MONITORING_MODE = False 41 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 42 | 43 | 44 | def search_suspicious_files(): 45 | 46 | # Decide where to output results. 47 | print_output = False 48 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 49 | print_output = True 50 | 51 | if not ACTIVATED: 52 | if print_output: 53 | print("Module deactivated.") 54 | return 55 | 56 | last_suspicious_files = [] 57 | if MONITORING_MODE: 58 | try: 59 | stored_data = load_state(STATE_DIR) 60 | if "suspicious_files" in stored_data.keys(): 61 | last_suspicious_files = stored_data["suspicious_files"] 62 | 63 | except Exception as e: 64 | output_error(__file__, str(e)) 65 | return 66 | 67 | # Get all suspicious ELF files. 68 | fd = os.popen("find /dev/shm -type f -exec file -p '{}' \\; | grep ELF") 69 | elf_raw = fd.read().strip() 70 | fd.close() 71 | 72 | # Get all suspicious script files. 73 | fd = os.popen("find /dev/shm -type f -exec file -p '{}' \\; | grep script") 74 | script_raw = fd.read().strip() 75 | fd.close() 76 | 77 | current_suspicious_files = [] 78 | if elf_raw.strip(): 79 | current_suspicious_files.extend(elf_raw.strip().split("\n")) 80 | if script_raw.strip(): 81 | current_suspicious_files.extend(script_raw.strip().split("\n")) 82 | 83 | # Extract new findings 84 | new_suspicious_files = [] 85 | for current_suspicious_file in current_suspicious_files: 86 | if current_suspicious_file not in last_suspicious_files: 87 | new_suspicious_files.append(current_suspicious_file) 88 | 89 | # Remove stored findings that do no longer exist 90 | for last_suspicious_file in list(last_suspicious_files): 91 | if last_suspicious_file not in current_suspicious_files: 92 | last_suspicious_files.remove(last_suspicious_file) 93 | 94 | if new_suspicious_files: 95 | message = "File(s) in /dev/shm suspicious:\n\n" 96 | message += "\n".join(new_suspicious_files) 97 | 98 | output_finding(__file__, message) 99 | 100 | if MONITORING_MODE: 101 | try: 102 | last_suspicious_files.extend(new_suspicious_files) 103 | store_state(STATE_DIR, {"suspicious_files": last_suspicious_files}) 104 | 105 | except Exception as e: 106 | output_error(__file__, str(e)) 107 | 108 | if __name__ == '__main__': 109 | is_init_run = False 110 | if len(sys.argv) > 1: 111 | if "--init" in sys.argv: 112 | is_init_run = True 113 | if "--monitoring" in sys.argv: 114 | MONITORING_MODE = True 115 | 116 | # Script does not need to establish a state. 117 | if not is_init_run: 118 | search_suspicious_files() 119 | -------------------------------------------------------------------------------- /scripts/lib/util_utmp.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import re 3 | import subprocess 4 | from dateutil import parser 5 | from typing import Any, Dict, List 6 | 7 | 8 | class UtmpException(Exception): 9 | pass 10 | 11 | 12 | class UtmpEntry: 13 | """ 14 | Class that stores an entry of an utmp file. 15 | """ 16 | def __init__(self, 17 | line: str): 18 | match = re.fullmatch(r"\[(\d+)\] \[(\d+)\] \[(.+)\] \[(.+)\] \[(.+)\] \[(.+)\] \[(.+)\] \[(.+)\]", line) 19 | if match is None: 20 | raise ValueError("Unable to parse line (no match) '%s'" % line) 21 | 22 | self._line = line 23 | self._ut_type = int(match.group(1)) 24 | self._ut_pid = int(match.group(2)) 25 | self._ut_id = match.group(3).strip() 26 | self._ut_user = match.group(4).strip() 27 | self._ut_line = match.group(5).strip() 28 | self._ut_host = match.group(6).strip() 29 | self._ut_addr_v6 = match.group(7).strip() 30 | self._ut_time = parser.parse(match.group(8)) 31 | 32 | def __eq__(self, other): 33 | return (hasattr(other, "line") 34 | and self.line == other.line 35 | and hasattr(other, "ut_type") 36 | and self.ut_type == other.ut_type 37 | and hasattr(other, "ut_pid") 38 | and self.ut_pid == other.ut_pid 39 | and hasattr(other, "ut_id") 40 | and self.ut_id == other.ut_id 41 | and hasattr(other, "ut_user") 42 | and self.ut_user == other.ut_user 43 | and hasattr(other, "ut_line") 44 | and self.ut_line == other.ut_line 45 | and hasattr(other, "ut_host") 46 | and self.ut_host == other.ut_host 47 | and hasattr(other, "ut_addr_v6") 48 | and self.ut_addr_v6 == other.ut_addr_v6 49 | and hasattr(other, "ut_time") 50 | and self.ut_time == other.ut_time) 51 | 52 | def __hash__(self): 53 | return hash(self._line) 54 | 55 | def __str__(self): 56 | return self._line 57 | 58 | @property 59 | def line(self) -> str: 60 | return self._line 61 | 62 | @property 63 | def ut_type(self) -> int: 64 | return self._ut_type 65 | 66 | @property 67 | def ut_pid(self) -> int: 68 | return self._ut_pid 69 | 70 | @property 71 | def ut_id(self) -> str: 72 | return self._ut_id 73 | 74 | @property 75 | def ut_user(self) -> str: 76 | return self._ut_user 77 | 78 | @property 79 | def ut_line(self) -> str: 80 | return self._ut_line 81 | 82 | @property 83 | def ut_host(self) -> str: 84 | return self._ut_host 85 | 86 | @property 87 | def ut_addr_v6(self) -> str: 88 | return self._ut_addr_v6 89 | 90 | @property 91 | def ut_time(self) -> datetime.datetime: 92 | return self._ut_time 93 | 94 | def to_dict(self) -> Dict[str, Any]: 95 | return {"line": self.line} 96 | 97 | @staticmethod 98 | def from_dict(utmp_dict: Dict[str, Any]): 99 | return UtmpEntry(utmp_dict["line"]) 100 | 101 | 102 | def parse_utmp_dump_line(line: str) -> UtmpEntry: 103 | """ 104 | Parse an utmp dump line into a UtmpEntry object. 105 | """ 106 | try: 107 | return UtmpEntry(line) 108 | except Exception as e: 109 | raise UtmpException("Unable to parse line '%s'" % line) from e 110 | 111 | 112 | def parse_utmp_file(file_location: str) -> List[UtmpEntry]: 113 | """ 114 | Parse an utmp file into a list of UtmpEntry objects. 115 | """ 116 | p = subprocess.Popen("utmpdump %s" % file_location, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 117 | stdout, stderr = p.communicate() 118 | 119 | stderr_str = stderr.decode('utf-8') 120 | if not (stderr_str.startswith("Utmp dump of") and len(stderr.splitlines()) == 1): 121 | raise UtmpException("Unable to parse file '%s' with stderr: %s" % (file_location, stderr_str)) 122 | 123 | utmp_data = [] 124 | for line in stdout.strip().splitlines(): 125 | utmp_data.append(parse_utmp_dump_line(line.decode("utf-8"))) 126 | return utmp_data 127 | -------------------------------------------------------------------------------- /scripts/lib/util_lastlog.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import struct 4 | from typing import Any, Dict, List 5 | 6 | from .util_user import get_system_users 7 | 8 | 9 | class LastlogException(Exception): 10 | pass 11 | 12 | 13 | class LastlogEntry: 14 | """ 15 | Class that stores an entry of lastlog. 16 | """ 17 | def __init__(self, uid: int, name: str, device: str, host: str, timestamp: int): 18 | self._uid = uid 19 | self._name = name 20 | self._device = device 21 | self._host = host 22 | self._latest_time = datetime.datetime.fromtimestamp(timestamp, tz=datetime.timezone.utc) 23 | 24 | def __eq__(self, other): 25 | return (hasattr(other, "uid") 26 | and self.uid == other.uid 27 | and hasattr(other, "name") 28 | and self.name == other.name 29 | and hasattr(other, "device") 30 | and self.device == other.device 31 | and hasattr(other, "host") 32 | and self.host == other.host 33 | and hasattr(other, "latest_time") 34 | and self.latest_time == other.latest_time) 35 | 36 | def __hash__(self): 37 | return hash((self.uid, self.name, self.device, self.host, self.latest_time.timestamp())) 38 | 39 | def __str__(self): 40 | return "%d %s %s %s %s" % (self._uid, self._name, self._device, self._host, self._latest_time) 41 | 42 | @property 43 | def uid(self) -> int: 44 | return self._uid 45 | 46 | @property 47 | def name(self) -> str: 48 | return self._name 49 | 50 | @property 51 | def device(self) -> str: 52 | return self._device 53 | 54 | @property 55 | def host(self) -> str: 56 | return self._host 57 | 58 | @property 59 | def latest_time(self) -> datetime.datetime: 60 | return self._latest_time 61 | 62 | def to_dict(self) -> Dict[str, Any]: 63 | return {"uid": self.uid, 64 | "name": self.name, 65 | "device": self.device, 66 | "host": self.host, 67 | "latest_time": self.latest_time.timestamp()} 68 | 69 | @staticmethod 70 | def from_dict(entity_dict: Dict[str, Any]): 71 | return LastlogEntry(entity_dict["uid"], 72 | entity_dict["name"], 73 | entity_dict["device"], 74 | entity_dict["host"], 75 | entity_dict["latest_time"]) 76 | 77 | 78 | def parse_lastlog_file(file_location: str = "/var/log/lastlog", passwd_file: str = "/etc/passwd") -> List[LastlogEntry]: 79 | """ 80 | Parses the given lastlog file and returns a list of LastlogEntry objects. 81 | :param file_location: location of the lastlog file 82 | :param passwd_file: location of the passwd file to resolve users 83 | :return: List of LastlogEntry objects 84 | """ 85 | result = [] # type: List[LastlogEntry] 86 | 87 | entry_format_str = "I32s256s" 88 | entry_size = struct.calcsize(entry_format_str) 89 | entry_format = struct.Struct(entry_format_str) 90 | 91 | system_users = {} 92 | for system_user in get_system_users(passwd_file): 93 | system_users[system_user.uid] = system_user.name 94 | 95 | if not os.path.isfile(file_location): 96 | raise LastlogException("File '%s' does not exist" % file_location) 97 | 98 | with open(file_location, 'rb') as fp: 99 | try: 100 | uid = 0 101 | entry_raw = fp.read(entry_size) 102 | 103 | while entry_raw: 104 | timestamp, device, host = entry_format.unpack(entry_raw) 105 | if timestamp != 0 and uid in system_users: 106 | result.append(LastlogEntry(uid, 107 | system_users[uid], 108 | "" if device[0] == 0 else device.decode("utf-8").replace("\x00", "").strip(), 109 | "" if host[0] == 0 else host.decode("utf-8").replace("\x00", "").strip(), 110 | timestamp)) 111 | entry_raw = fp.read(entry_size) 112 | uid += 1 113 | except Exception as e: 114 | raise LastlogException("Unable to parse raw entry '%s'" % entry_raw) from e 115 | 116 | return result 117 | -------------------------------------------------------------------------------- /scripts/monitor_modules.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Monitor /proc/modules for changes to detect if a malicious module was loaded. The script can run in two different modes: 13 | 1) before running the script, whitelist every module that is allowed to be loaded on the host in the configuration file. 14 | 2) assume all loaded modules are legitimate during the initial execution of the script with "--init" and monitor for changes. 15 | 16 | If using 1), you get fewer false-positives due to the time you spend setting everything up. 17 | If using 2), you assume the host is uncompromised during the initial execution of the script. 18 | If you have a module that is loaded/unloaded frequently, you can still configure the whitelist additionally 19 | to prevent constant alerting. 20 | 21 | Requirements: 22 | None 23 | """ 24 | 25 | import os 26 | import sys 27 | 28 | import lib.global_vars 29 | from lib.state import load_state, store_state 30 | from lib.util import output_error, output_finding 31 | 32 | # Read configuration. 33 | try: 34 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 35 | from config.monitor_modules import ACTIVATED, MODULES_WHITELIST 36 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 37 | except: 38 | ALERTR_FIFO = None 39 | FROM_ADDR = None 40 | TO_ADDR = None 41 | ACTIVATED = True 42 | MODULES_WHITELIST = [] 43 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 44 | 45 | 46 | def _get_modules(): 47 | """ 48 | Reads all currently loaded modules. 49 | :return: set of loaded modules 50 | """ 51 | loaded_modules = set() 52 | with open("/proc/modules", 'r') as fp: 53 | for line in fp: 54 | line_list = line.split(" ") 55 | loaded_modules.add(line_list[0]) 56 | return loaded_modules 57 | 58 | 59 | def monitor_modules(): 60 | # Decide where to output results. 61 | print_output = False 62 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 63 | print_output = True 64 | 65 | if not ACTIVATED: 66 | if print_output: 67 | print("Module deactivated.") 68 | return 69 | 70 | stored_modules_data = set() 71 | try: 72 | state_data = load_state(STATE_DIR) 73 | 74 | # Convert list to set. 75 | if "modules_data" in state_data.keys(): 76 | stored_modules_data = set(state_data["modules_data"]) 77 | 78 | except Exception as e: 79 | output_error(__file__, str(e)) 80 | return 81 | 82 | current_modules = set() 83 | try: 84 | current_modules = _get_modules() 85 | 86 | except Exception as e: 87 | output_error(__file__, str(e)) 88 | return 89 | 90 | # Remove whitelisted modules from the currently loaded modules set. 91 | current_modules = current_modules - set(MODULES_WHITELIST) 92 | 93 | # Check for newly loaded modules. 94 | loaded_modules = current_modules - stored_modules_data 95 | if loaded_modules: 96 | message = "New modules loaded.\n\n" 97 | message += "Entries:\n" 98 | for module in loaded_modules: 99 | message += module 100 | message += "\n" 101 | 102 | output_finding(__file__, message) 103 | 104 | # Check for newly unloaded modules. 105 | unloaded_modules = stored_modules_data - current_modules 106 | if unloaded_modules: 107 | message = "Running modules unloaded.\n\n" 108 | message += "Entries:\n" 109 | for module in unloaded_modules: 110 | message += module 111 | message += "\n" 112 | 113 | output_finding(__file__, message) 114 | 115 | try: 116 | # Convert set to list. 117 | state_data = {"modules_data": list(current_modules)} 118 | 119 | store_state(STATE_DIR, state_data) 120 | 121 | except Exception as e: 122 | output_error(__file__, str(e)) 123 | 124 | 125 | if __name__ == '__main__': 126 | if len(sys.argv) == 2: 127 | # Suppress output in our initial execution to establish a state. 128 | if sys.argv[1] == "--init": 129 | lib.global_vars.SUPPRESS_OUTPUT = True 130 | monitor_modules() 131 | -------------------------------------------------------------------------------- /scripts/search_tainted_modules.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Search combinations of taint flags of loaded kernel modules that are an indicator of a malicious module. 13 | A suspicious combination is an unsigned module flag with an out-of-tree build flag. 14 | 15 | Requirements: 16 | None 17 | 18 | Reference: 19 | https://twitter.com/CraigHRowland/status/1642263411437506561 20 | """ 21 | 22 | import os 23 | import sys 24 | from typing import List 25 | 26 | from lib.state import load_state, store_state 27 | from lib.util import output_finding, output_error 28 | from lib.util_module import SystemModule, SystemModuleTaintFlag, get_system_modules 29 | 30 | # Read configuration. 31 | try: 32 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 33 | from config.search_tainted_modules import ACTIVATED, MODULES_WHITELIST 34 | MONITORING_MODE = False 35 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 36 | except: 37 | ALERTR_FIFO = None 38 | FROM_ADDR = None 39 | TO_ADDR = None 40 | ACTIVATED = True 41 | MODULES_WHITELIST = [] 42 | MONITORING_MODE = False 43 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 44 | 45 | 46 | def _get_suspicious_modules() -> List[SystemModule]: 47 | suspicious_modules = [] 48 | modules = get_system_modules() 49 | 50 | for module in modules: 51 | 52 | if module.name in MODULES_WHITELIST: 53 | continue 54 | 55 | # Suspicious modules are out-of-tree (not part of the Linux kernel tree) and are unsigned 56 | if (SystemModuleTaintFlag.OOT_MODULE in module.taint_flags 57 | and SystemModuleTaintFlag.UNSIGNED_MODULE in module.taint_flags): 58 | 59 | suspicious_modules.append(module) 60 | 61 | return suspicious_modules 62 | 63 | 64 | def search_tainted_modules(): 65 | 66 | # Decide where to output results. 67 | print_output = False 68 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 69 | print_output = True 70 | 71 | if not ACTIVATED: 72 | if print_output: 73 | print("Module deactivated.") 74 | return 75 | 76 | last_suspicious_modules = [] 77 | if MONITORING_MODE: 78 | try: 79 | stored_data = load_state(STATE_DIR) 80 | if "suspicious_modules" in stored_data.keys(): 81 | last_suspicious_modules = list(map(lambda x: SystemModule.from_dict(x), stored_data["suspicious_modules"])) 82 | 83 | except Exception as e: 84 | output_error(__file__, str(e)) 85 | return 86 | 87 | current_suspicious_modules = _get_suspicious_modules() 88 | 89 | # Extract new findings 90 | new_suspicious_modules = [] 91 | for current_suspicious_module in current_suspicious_modules: 92 | if current_suspicious_module not in last_suspicious_modules: 93 | new_suspicious_modules.append(current_suspicious_module) 94 | 95 | # Remove stored findings that do no longer exist 96 | for last_suspicious_module in list(last_suspicious_modules): 97 | if last_suspicious_module not in current_suspicious_modules: 98 | last_suspicious_modules.remove(last_suspicious_module) 99 | 100 | if new_suspicious_modules: 101 | message = "%d suspicious loaded module(s) found:\n\n" % len(new_suspicious_modules) 102 | for suspicious_module in new_suspicious_modules: 103 | 104 | message += "%s - State: %s; Dependencies: %s; Taint Flags: %s\n" % (suspicious_module.name, 105 | suspicious_module.state.name, 106 | ",".join(suspicious_module.dependencies), 107 | ",".join(map(lambda x: x.name, suspicious_module.taint_flags))) 108 | 109 | output_finding(__file__, message) 110 | 111 | if MONITORING_MODE: 112 | try: 113 | last_suspicious_modules.extend(new_suspicious_modules) 114 | store_state(STATE_DIR, {"suspicious_modules": list(map(lambda x: x.to_dict(), last_suspicious_modules))}) 115 | 116 | except Exception as e: 117 | output_error(__file__, str(e)) 118 | 119 | 120 | if __name__ == '__main__': 121 | is_init_run = False 122 | if len(sys.argv) > 1: 123 | if "--init" in sys.argv: 124 | is_init_run = True 125 | if "--monitoring" in sys.argv: 126 | MONITORING_MODE = True 127 | 128 | # Script does not need to establish a state. 129 | if not is_init_run: 130 | search_tainted_modules() 131 | -------------------------------------------------------------------------------- /test/test_lib_util_utmp.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | # Fix to workaround importing issues from test cases 4 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 5 | 6 | import unittest 7 | 8 | from scripts.lib.util_utmp import UtmpEntry, UtmpException, parse_utmp_dump_line, parse_utmp_file 9 | 10 | class TestUtilUtmp(unittest.TestCase): 11 | 12 | def test_UtmpEntry(self): 13 | line1 = "[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]" 14 | line2 = "[6] [659777] [ ] [sqall ] [ssh:notty ] [11.22.33.44 ] [11.22.33.44 ] [2025-09-05T09:38:06,000000+00:00]" 15 | line3 = "[7] [2654257] [ts/0] [sqall ] [pts/0 ] [10.42.42.42 ] [10.42.42.42 ] [2024-01-23T07:46:40,563329+00:00]" 16 | line4 = "[2] [00000] [~~ ] [reboot ] [~ ] [6.1.0-33-amd64 ] [0.0.0.0 ] [2025-04-21T10:58:14,544986+00:00]" 17 | UtmpEntry(line1) 18 | UtmpEntry(line2) 19 | UtmpEntry(line3) 20 | UtmpEntry(line4) 21 | 22 | def test_UtmpEntry_illegal_line(self): 23 | line = "foo[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]" 24 | self.assertRaises(ValueError, UtmpEntry, line) 25 | 26 | def test_UtmpEntry_eq(self): 27 | entity1 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 28 | entity2 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 29 | entity3 = UtmpEntry("[3] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 30 | 31 | something_else = "something" 32 | self.assertEqual(entity1, entity2) 33 | self.assertNotEqual(entity1, entity3) 34 | self.assertNotEqual(entity1, something_else) 35 | 36 | def test_UtmpEntry_hash(self): 37 | entity1 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 38 | entity2 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 39 | entity3 = UtmpEntry("[3] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 40 | 41 | hash_set = set() 42 | hash_set.add(entity1) 43 | hash_set.add(entity2) 44 | hash_set.add(entity3) 45 | 46 | self.assertEqual(2, len(hash_set)) 47 | 48 | found_module1 = False 49 | found_module3 = False 50 | for temp_module in hash_set: 51 | if temp_module == entity1: 52 | found_module1 = True 53 | elif temp_module == entity3: 54 | found_module3 = True 55 | 56 | self.assertTrue(found_module1) 57 | self.assertTrue(found_module3) 58 | 59 | def test_UtmpEntry_to_dict(self): 60 | entity1 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 61 | entity2 = UtmpEntry("[3] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 62 | 63 | entity1_dict = entity1.to_dict() 64 | entity2_dict = entity2.to_dict() 65 | 66 | self.assertEqual(entity1.line, entity1_dict["line"]) 67 | 68 | self.assertEqual(entity2.line, entity2_dict["line"]) 69 | 70 | def test_UtmpEntry_from_dict(self): 71 | entity1 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 72 | entity2 = UtmpEntry("[3] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 73 | 74 | self.assertEqual(entity1, UtmpEntry.from_dict(entity1.to_dict())) 75 | self.assertEqual(entity2, UtmpEntry.from_dict(entity2.to_dict())) 76 | 77 | def test_parse_utmp_dump_line(self): 78 | line = "[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]" 79 | entry = parse_utmp_dump_line(line) 80 | 81 | self.assertEqual(line, entry.line) 82 | 83 | def test_parse_utmp_dump_line_malformed(self): 84 | line = "foo[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]" 85 | self.assertRaises(UtmpException, parse_utmp_dump_line, line) 86 | 87 | def test_parse_utmp_file(self): 88 | utmp_data = parse_utmp_file(os.path.join(os.path.dirname(__file__), "resources", "wtmp_benign")) 89 | self.assertEqual(551, len(utmp_data)) 90 | 91 | def test_parse_utmp_file_no_file(self): 92 | self.assertRaises(UtmpException, parse_utmp_file, "/something_that_does/not/exist") 93 | -------------------------------------------------------------------------------- /scripts/search_deleted_exe.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Search running programs whose binary was deleted. Indicator of malicious programs. 13 | 14 | Requirements: 15 | None 16 | """ 17 | 18 | import os 19 | import re 20 | import sys 21 | from typing import List 22 | 23 | from lib.state import load_state, store_state 24 | from lib.util import output_finding, output_error 25 | 26 | # Read configuration. 27 | try: 28 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 29 | from config.search_deleted_exe import ACTIVATED 30 | MONITORING_MODE = False 31 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 32 | except: 33 | ALERTR_FIFO = None 34 | FROM_ADDR = None 35 | TO_ADDR = None 36 | ACTIVATED = True 37 | MONITORING_MODE = False 38 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 39 | 40 | 41 | def _get_deleted_exe_files() -> List[str]: 42 | # Get all suspicious processes. 43 | # The Linux kernel appends " (deleted)" to the target location if file was deleted 44 | # https://github.com/torvalds/linux/blob/052d534373b7ed33712a63d5e17b2b6cdbce84fd/fs/d_path.c#L256 45 | fd = os.popen("ls -laR /proc/*/exe 2> /dev/null | grep -v memfd: | grep \\(deleted\\)") 46 | suspicious_exe_raw = fd.read().strip() 47 | fd.close() 48 | 49 | current_suspicious_exes = [] 50 | if suspicious_exe_raw.strip(): 51 | for suspicious_exe in suspicious_exe_raw.strip().split("\n"): 52 | match = re.search(r" (/proc/(\d+)/exe -> .*)$", suspicious_exe) 53 | if match: 54 | exe = match.group(1) 55 | current_suspicious_exes.append(exe) 56 | 57 | # Remove false-positives from result 58 | for current_suspicious_exe in list(current_suspicious_exes): 59 | # The Linux kernel can spawn processes that do not point to an executable 60 | # https://www.uninformativ.de/blog/postings/2022-06-11/0/POSTING-en.html 61 | match = re.search(r"(/proc/(\d+)/exe -> / \(deleted\))$", current_suspicious_exe) 62 | if match: 63 | current_suspicious_exes.remove(current_suspicious_exe) 64 | 65 | return current_suspicious_exes 66 | 67 | 68 | def search_deleted_exe_files(): 69 | 70 | # Decide where to output results. 71 | print_output = False 72 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 73 | print_output = True 74 | 75 | if not ACTIVATED: 76 | if print_output: 77 | print("Module deactivated.") 78 | return 79 | 80 | last_suspicious_exes = [] 81 | if MONITORING_MODE: 82 | try: 83 | stored_data = load_state(STATE_DIR) 84 | if "suspicious_exes" in stored_data.keys(): 85 | last_suspicious_exes = stored_data["suspicious_exes"] 86 | 87 | except Exception as e: 88 | output_error(__file__, str(e)) 89 | return 90 | 91 | current_suspicious_exes = _get_deleted_exe_files() 92 | 93 | # Extract new findings 94 | new_suspicious_exes = [] 95 | for current_suspicious_exe in current_suspicious_exes: 96 | if current_suspicious_exe not in last_suspicious_exes: 97 | new_suspicious_exes.append(current_suspicious_exe) 98 | 99 | # Remove stored findings that do no longer exist 100 | for last_suspicious_exe in list(last_suspicious_exes): 101 | if last_suspicious_exe not in current_suspicious_exes: 102 | last_suspicious_exes.remove(last_suspicious_exe) 103 | 104 | if new_suspicious_exes: 105 | message = "%d deleted executable file(s) found:\n\n" % len(new_suspicious_exes) 106 | for suspicious_exe in new_suspicious_exes: 107 | match = re.search(r"/proc/(\d+)/exe -> .*$", suspicious_exe) 108 | if not match: 109 | output_error(__file__, "Unable to parse: %s" % suspicious_exe, False) 110 | continue 111 | pid = match.group(1) 112 | message += "\n%s" % suspicious_exe 113 | with open("/proc/%s/cmdline" % pid, "rb") as fp: 114 | cmdline = fp.read() 115 | # Replace 0-bytes with whitespaces for readability 116 | cmdline = cmdline.replace(b"\x00", b" ") 117 | message += "\n/proc/%s/cmdline -> %s" % (pid, cmdline.decode("utf-8")) 118 | message += "\n" 119 | 120 | output_finding(__file__, message) 121 | 122 | if MONITORING_MODE: 123 | try: 124 | last_suspicious_exes.extend(new_suspicious_exes) 125 | store_state(STATE_DIR, {"suspicious_exes": last_suspicious_exes}) 126 | 127 | except Exception as e: 128 | output_error(__file__, str(e)) 129 | 130 | 131 | if __name__ == '__main__': 132 | is_init_run = False 133 | if len(sys.argv) > 1: 134 | if "--init" in sys.argv: 135 | is_init_run = True 136 | if "--monitoring" in sys.argv: 137 | MONITORING_MODE = True 138 | 139 | # Script does not need to establish a state. 140 | if not is_init_run: 141 | search_deleted_exe_files() 142 | -------------------------------------------------------------------------------- /scripts/monitor_hosts_file.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Monitor /etc/hosts for changes to detect malicious attempts to divert traffic. 13 | 14 | NOTE: The first execution of this script should be done with the argument "--init". 15 | Otherwise, the script will only show you the current state of the environment since no state was established yet. 16 | However, this assumes that the system is uncompromised during the initial execution. 17 | Hence, if you are unsure this is the case you should verify the current state 18 | before monitoring for changes will become an effective security measure. 19 | 20 | Requirements: 21 | None 22 | """ 23 | 24 | import os 25 | import sys 26 | from typing import Dict, Set 27 | 28 | import lib.global_vars 29 | from lib.state import load_state, store_state 30 | from lib.util import output_error, output_finding 31 | 32 | # Read configuration. 33 | try: 34 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 35 | from config.monitor_hosts_file import ACTIVATED 36 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 37 | except: 38 | ALERTR_FIFO = None 39 | FROM_ADDR = None 40 | TO_ADDR = None 41 | ACTIVATED = True 42 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 43 | 44 | 45 | class MonitorHostsException(Exception): 46 | pass 47 | 48 | 49 | def _get_hosts() -> Dict[str, Set[str]]: 50 | 51 | hosts_data = {} 52 | with open("/etc/hosts", 'rt') as fp: 53 | for line in fp: 54 | line = line.strip() 55 | 56 | if line == "": 57 | continue 58 | 59 | # Ignore comments. 60 | if line[0] == "#": 61 | continue 62 | 63 | entry = line.split() 64 | if len(entry) < 2: 65 | raise MonitorHostsException("Not able to parse line: %s" % line) 66 | 67 | ip = entry[0] 68 | hosts = set(entry[1:]) 69 | if ip not in hosts_data.keys(): 70 | hosts_data[ip] = hosts 71 | 72 | else: 73 | for host in hosts: 74 | hosts_data[ip].add(host) 75 | 76 | return hosts_data 77 | 78 | 79 | def monitor_hosts(): 80 | 81 | # Decide where to output results. 82 | print_output = False 83 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 84 | print_output = True 85 | 86 | if not ACTIVATED: 87 | if print_output: 88 | print("Module deactivated.") 89 | return 90 | 91 | stored_hosts_data = {} 92 | try: 93 | state_data = load_state(STATE_DIR) 94 | 95 | # Convert list to set. 96 | for k, v in state_data.items(): 97 | stored_hosts_data[k] = set(v) 98 | 99 | except Exception as e: 100 | output_error(__file__, str(e)) 101 | return 102 | 103 | curr_hosts_data = {} 104 | try: 105 | curr_hosts_data = _get_hosts() 106 | 107 | except Exception as e: 108 | output_error(__file__, str(e)) 109 | return 110 | 111 | # Compare stored data with current one. 112 | for stored_entry_ip in stored_hosts_data.keys(): 113 | 114 | # Extract current entry belonging to the same ip. 115 | if stored_entry_ip not in curr_hosts_data.keys(): 116 | message = "Host name for IP '%s' was deleted." % stored_entry_ip 117 | 118 | output_finding(__file__, message) 119 | 120 | continue 121 | 122 | # Check host entry was removed. 123 | for host in stored_hosts_data[stored_entry_ip]: 124 | if host not in curr_hosts_data[stored_entry_ip]: 125 | message = "Host name entry for IP '%s' was removed.\n\n" % stored_entry_ip 126 | message += "Entry: %s" % host 127 | 128 | output_finding(__file__, message) 129 | 130 | # Check host entry was added. 131 | for host in curr_hosts_data[stored_entry_ip]: 132 | if host not in stored_hosts_data[stored_entry_ip]: 133 | message = "Host name entry for IP '%s' was added.\n\n" % stored_entry_ip 134 | message += "Entry: %s" % host 135 | 136 | output_finding(__file__, message) 137 | 138 | # Check new data was added. 139 | for curr_entry_ip in curr_hosts_data.keys(): 140 | if curr_entry_ip not in stored_hosts_data.keys(): 141 | message = "New host name was added for IP '%s'.\n\n" % curr_entry_ip 142 | message += "Entries:\n" 143 | for host in curr_hosts_data[curr_entry_ip]: 144 | message += host 145 | message += "\n" 146 | 147 | output_finding(__file__, message) 148 | 149 | try: 150 | # Convert set to list. 151 | state_data = {} 152 | for k, v in curr_hosts_data.items(): 153 | state_data[k] = list(v) 154 | 155 | store_state(STATE_DIR, state_data) 156 | 157 | except Exception as e: 158 | output_error(__file__, str(e)) 159 | 160 | 161 | if __name__ == '__main__': 162 | if len(sys.argv) == 2: 163 | # Suppress output in our initial execution to establish a state. 164 | if sys.argv[1] == "--init": 165 | lib.global_vars.SUPPRESS_OUTPUT = True 166 | monitor_hosts() 167 | -------------------------------------------------------------------------------- /test/test_lib_util_lastlog.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | # Fix to workaround importing issues from test cases 4 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 5 | 6 | import datetime 7 | import unittest 8 | import tempfile 9 | from unittest.mock import patch 10 | 11 | from scripts.lib.util_lastlog import LastlogEntry, LastlogException, parse_lastlog_file 12 | from scripts.lib.util_user import SystemUser 13 | 14 | class TestUtilLastlog(unittest.TestCase): 15 | 16 | def test_LastlogEntry_eq(self): 17 | temp_time = int(datetime.datetime.now().timestamp()) 18 | entity1 = LastlogEntry(0, "root", "pts/1", "127.0.0.1", temp_time) 19 | entity2 = LastlogEntry(0, "root", "pts/1", "127.0.0.1", temp_time) 20 | entity3 = LastlogEntry(1, "toor", "pts/1", "127.0.0.1", temp_time) 21 | 22 | something_else = "something" 23 | self.assertEqual(entity1, entity2) 24 | self.assertNotEqual(entity1, entity3) 25 | self.assertNotEqual(entity1, something_else) 26 | 27 | def test_LastlogEntry_hash(self): 28 | temp_time = int(datetime.datetime.now().timestamp()) 29 | entity1 = LastlogEntry(0, "root", "pts/1", "127.0.0.1", temp_time) 30 | entity2 = LastlogEntry(0, "root", "pts/1", "127.0.0.1", temp_time) 31 | entity3 = LastlogEntry(1, "toor", "pts/1", "127.0.0.1", temp_time) 32 | 33 | hash_set = set() 34 | hash_set.add(entity1) 35 | hash_set.add(entity2) 36 | hash_set.add(entity3) 37 | 38 | self.assertEqual(2, len(hash_set)) 39 | 40 | found_module1 = False 41 | found_module3 = False 42 | for temp_module in hash_set: 43 | if temp_module == entity1: 44 | found_module1 = True 45 | elif temp_module == entity3: 46 | found_module3 = True 47 | 48 | self.assertTrue(found_module1) 49 | self.assertTrue(found_module3) 50 | 51 | def test_LastlogEntry_to_dict(self): 52 | temp_time = int(datetime.datetime.now().timestamp()) 53 | entity1 = LastlogEntry(0, "root", "pts/1", "127.0.0.1", temp_time) 54 | entity2 = LastlogEntry(1, "toor", "pts/1", "127.0.0.1", temp_time) 55 | 56 | entity1_dict = entity1.to_dict() 57 | entity2_dict = entity2.to_dict() 58 | 59 | self.assertEqual(entity1.uid, entity1_dict["uid"]) 60 | self.assertEqual(entity1.name, entity1_dict["name"]) 61 | self.assertEqual(entity1.device, entity1_dict["device"]) 62 | self.assertEqual(entity1.host, entity1_dict["host"]) 63 | self.assertEqual(entity1.latest_time.timestamp(), entity1_dict["latest_time"]) 64 | 65 | self.assertEqual(entity2.uid, entity2_dict["uid"]) 66 | self.assertEqual(entity2.name, entity2_dict["name"]) 67 | self.assertEqual(entity2.device, entity2_dict["device"]) 68 | self.assertEqual(entity2.host, entity2_dict["host"]) 69 | self.assertEqual(entity2.latest_time.timestamp(), entity2_dict["latest_time"]) 70 | 71 | def test_LastlogEntry_from_dict(self): 72 | temp_time = datetime.datetime.now() 73 | entity1 = LastlogEntry(0, "root", "pts/1", "127.0.0.1", int(temp_time.timestamp())) 74 | entity2 = LastlogEntry(1, "toor", "pts/1", "127.0.0.1", int(temp_time.timestamp())) 75 | 76 | self.assertEqual(entity1, LastlogEntry.from_dict(entity1.to_dict())) 77 | self.assertEqual(entity2, LastlogEntry.from_dict(entity2.to_dict())) 78 | 79 | @patch("scripts.lib.util_lastlog.get_system_users") 80 | def test_parse_lastlog_file(self, get_system_users_mock): 81 | get_system_users_mock.return_value = [SystemUser("sqall", 82 | "", 83 | 1000, 84 | 1000, 85 | "", 86 | "/home/sqall", 87 | "/bin/bash")] 88 | 89 | lastlog_entries = parse_lastlog_file(os.path.join(os.path.dirname(__file__), "resources", "lastlog")) 90 | 91 | self.assertEqual(1, len(lastlog_entries)) 92 | self.assertEqual(1000, lastlog_entries[0].uid) 93 | self.assertEqual("sqall", lastlog_entries[0].name) 94 | self.assertEqual("pts/1", lastlog_entries[0].device) 95 | self.assertEqual("172.19.80.1", lastlog_entries[0].host) 96 | self.assertEqual(datetime.datetime(2025, 1, 9, 8, 10, 23, tzinfo=datetime.timezone.utc), 97 | lastlog_entries[0].latest_time) 98 | 99 | def test_parse_lastlog_file_no_file(self): 100 | self.assertRaises(LastlogException, parse_lastlog_file, "/something_that_does/not/exist") 101 | 102 | @patch("scripts.lib.util_lastlog.get_system_users") 103 | def test_parse_lastlog_file_parsing_error(self, get_system_users_mock): 104 | get_system_users_mock.return_value = [] 105 | tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 106 | tmp_file.write("\x12\x13\x14\x15\x00\x00") 107 | tmp_file.flush() 108 | self.assertRaises(LastlogException, parse_lastlog_file, tmp_file.name) 109 | 110 | @patch("scripts.lib.util_lastlog.get_system_users") 111 | def test_parse_lastlog_file_missing_user(self, get_system_users_mock): 112 | get_system_users_mock.return_value = [] 113 | parse_lastlog_file(os.path.join(os.path.dirname(__file__), "resources", "lastlog")) 114 | -------------------------------------------------------------------------------- /scripts/search_hidden_exe.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Searches for hidden ELF files in the filesystem. Usually, ELF binaries are not hidden in a Linux environment. 13 | 14 | Requirements: 15 | None 16 | """ 17 | 18 | import os 19 | import sys 20 | from typing import List 21 | 22 | from lib.step_state import StepLocation, load_step_state, store_step_state 23 | from lib.util import output_error, output_finding 24 | from lib.util_file import FileLocation, apply_directory_whitelist, apply_file_whitelist 25 | 26 | # Read configuration. 27 | try: 28 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 29 | from config.search_hidden_exe import ACTIVATED, SEARCH_IN_STEPS, SEARCH_LOCATIONS, \ 30 | HIDDEN_EXE_DIRECTORY_WHITELIST, HIDDEN_EXE_FILE_WHITELIST 31 | 32 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 33 | except: 34 | ALERTR_FIFO = None 35 | FROM_ADDR = None 36 | TO_ADDR = None 37 | ACTIVATED = True 38 | SEARCH_IN_STEPS = False 39 | SEARCH_LOCATIONS = ["/"] 40 | HIDDEN_EXE_DIRECTORY_WHITELIST = [] 41 | HIDDEN_EXE_FILE_WHITELIST = [] 42 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 43 | 44 | 45 | def search_hidden_exe_files(): 46 | # Decide where to output results. 47 | print_output = False 48 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 49 | print_output = True 50 | 51 | if not ACTIVATED: 52 | if print_output: 53 | print("Module deactivated.") 54 | return 55 | 56 | step_state_data = {} 57 | try: 58 | step_state_data = load_step_state(STATE_DIR) 59 | 60 | except Exception as e: 61 | output_error(__file__, str(e)) 62 | return 63 | 64 | # Reset step if we do not search in steps but everything. 65 | if not SEARCH_IN_STEPS: 66 | step_state_data["next_step"] = 0 67 | 68 | if not SEARCH_LOCATIONS: 69 | SEARCH_LOCATIONS.append("/") 70 | 71 | # Gather all search locations. 72 | search_locations = [] # type: List[StepLocation] 73 | # If SEARCH_IN_STEPS is active, build a list of directories to search in 74 | if SEARCH_IN_STEPS: 75 | for search_location in SEARCH_LOCATIONS: 76 | 77 | # Add parent directory as non-recursive search location in order to search in it without going deeper. 78 | search_locations.append(StepLocation(search_location, False)) 79 | 80 | # Add all containing subdirectories as recursive search locations. 81 | elements = os.listdir(search_location) 82 | elements.sort() 83 | for element in elements: 84 | path = os.path.join(search_location, element) 85 | if os.path.isdir(path): 86 | search_locations.append(StepLocation(path, True)) 87 | 88 | # If we do not search in separated steps, just add each directory as a recursive search location. 89 | else: 90 | for search_location in SEARCH_LOCATIONS: 91 | search_locations.append(StepLocation(search_location, True)) 92 | 93 | # Reset index if it is outside the search locations. 94 | if step_state_data["next_step"] >= len(search_locations): 95 | step_state_data["next_step"] = 0 96 | 97 | while True: 98 | search_location_obj = search_locations[step_state_data["next_step"]] 99 | 100 | # Get all hidden ELF files. 101 | if search_location_obj.search_recursive: 102 | fd = os.popen("find %s -type f -iname \".*\" -exec echo -n \"{} \" \\; -exec head -c 4 {} \\; -exec echo \"\" \\; | grep -P \"\\x7fELF\"" 103 | % search_location_obj.location) 104 | 105 | else: 106 | fd = os.popen("find %s -maxdepth 1 -type f -iname \".*\" -exec echo -n \"{} \" \\; -exec head -c 4 {} \\; -exec echo \"\" \\; | grep -P \"\\x7fELF\"" 107 | % search_location_obj.location) 108 | output_raw = fd.read().strip() 109 | fd.close() 110 | 111 | if output_raw != "": 112 | 113 | hidden_files = [] # type: List[FileLocation] 114 | output_list = output_raw.split("\n") 115 | for output_entry in output_list: 116 | file_location = output_entry[:-5] 117 | hidden_files.append(FileLocation(file_location)) 118 | 119 | dir_whitelist = [FileLocation(x) for x in HIDDEN_EXE_DIRECTORY_WHITELIST] 120 | file_whitelist = [FileLocation(x) for x in HIDDEN_EXE_FILE_WHITELIST] 121 | 122 | hidden_files = apply_directory_whitelist(dir_whitelist, hidden_files) 123 | hidden_files = apply_file_whitelist(file_whitelist, hidden_files) 124 | 125 | if hidden_files: 126 | message = "Hidden ELF file(s) found:\n\n" 127 | message += "\n".join(["File: %s" % x.location for x in hidden_files]) 128 | 129 | output_finding(__file__, message) 130 | 131 | step_state_data["next_step"] += 1 132 | 133 | # Stop search if we are finished. 134 | if SEARCH_IN_STEPS or step_state_data["next_step"] >= len(search_locations): 135 | break 136 | 137 | try: 138 | store_step_state(STATE_DIR, step_state_data) 139 | 140 | except Exception as e: 141 | output_error(__file__, str(e)) 142 | 143 | 144 | if __name__ == '__main__': 145 | is_init_run = False 146 | if len(sys.argv) == 2: 147 | if sys.argv[1] == "--init": 148 | is_init_run = True 149 | 150 | # Script does not need to establish a state. 151 | if not is_init_run: 152 | search_hidden_exe_files() 153 | -------------------------------------------------------------------------------- /test/test_lib_util_user.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | # Fix to workaround importing issues from test cases 4 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 5 | 6 | import tempfile 7 | import unittest 8 | 9 | from scripts.lib.util_user import get_system_users, PasswdException, SystemUser 10 | 11 | 12 | class TestUtilUser(unittest.TestCase): 13 | 14 | def test_get_system_users_empty_file(self): 15 | passwd_tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 16 | 17 | users = get_system_users(passwd_tmp_file.name) 18 | 19 | self.assertEqual([], users) 20 | 21 | def test_get_system_users_no_file(self): 22 | self.assertRaises(PasswdException, get_system_users, "/something_that_does/not/exist") 23 | 24 | def test_get_system_users_one_line(self): 25 | passwd_tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 26 | passwd_tmp_file.writelines(["root:x:0:0:root:/root:/bin/bash"]) 27 | passwd_tmp_file.flush() 28 | 29 | users = get_system_users(passwd_tmp_file.name) 30 | 31 | self.assertEqual(1, len(users)) 32 | self.assertEqual("root", users[0].name) 33 | self.assertEqual("x", users[0].password) 34 | self.assertEqual(0, users[0].uid) 35 | self.assertEqual(0, users[0].gid) 36 | self.assertEqual("root", users[0].info) 37 | self.assertEqual("/root", users[0].home) 38 | self.assertEqual("/bin/bash", users[0].shell) 39 | 40 | def test_get_system_users_multiple_lines(self): 41 | passwd_tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 42 | passwd_tmp_file.writelines(["root:x:0:0:root:/root:/bin/bash\nsystemd-coredump:x:999:999:systemd Core Dumper:/:/usr/sbin/nologin\nfwupd-refresh:x:129:138:fwupd-refresh user,,,:/run/systemd:/usr/sbin/nologin"]) 43 | passwd_tmp_file.flush() 44 | 45 | users = get_system_users(passwd_tmp_file.name) 46 | 47 | self.assertEqual(3, len(users)) 48 | self.assertEqual("root", users[0].name) 49 | self.assertEqual("x", users[0].password) 50 | self.assertEqual(0, users[0].uid) 51 | self.assertEqual(0, users[0].gid) 52 | self.assertEqual("root", users[0].info) 53 | self.assertEqual("/root", users[0].home) 54 | self.assertEqual("/bin/bash", users[0].shell) 55 | 56 | self.assertEqual("systemd-coredump", users[1].name) 57 | self.assertEqual("x", users[1].password) 58 | self.assertEqual(999, users[1].uid) 59 | self.assertEqual(999, users[1].gid) 60 | self.assertEqual("systemd Core Dumper", users[1].info) 61 | self.assertEqual("/", users[1].home) 62 | self.assertEqual("/usr/sbin/nologin", users[1].shell) 63 | 64 | self.assertEqual("fwupd-refresh", users[2].name) 65 | self.assertEqual("x", users[2].password) 66 | self.assertEqual(129, users[2].uid) 67 | self.assertEqual(138, users[2].gid) 68 | self.assertEqual("fwupd-refresh user,,,", users[2].info) 69 | self.assertEqual("/run/systemd", users[2].home) 70 | self.assertEqual("/usr/sbin/nologin", users[2].shell) 71 | 72 | def test_get_system_users_illegal_line(self): 73 | passwd_tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 74 | passwd_tmp_file.writelines(["root:x:0:0:root"]) 75 | passwd_tmp_file.flush() 76 | 77 | self.assertRaises(PasswdException, get_system_users, passwd_tmp_file.name) 78 | 79 | def test_SystemUser_from_passwd_line(self): 80 | user = SystemUser.from_passwd_line("root:x:0:0:root:/root:/bin/bash") 81 | 82 | self.assertEqual("root", user.name) 83 | self.assertEqual("x", user.password) 84 | self.assertEqual(0, user.uid) 85 | self.assertEqual(0, user.gid) 86 | self.assertEqual("root", user.info) 87 | self.assertEqual("/root", user.home) 88 | self.assertEqual("/bin/bash", user.shell) 89 | 90 | def test_SystemUser_from_passwd_line_illegal(self): 91 | self.assertRaises(ValueError, SystemUser.from_passwd_line, "root:x:0:0:root/root:/bin/bash") 92 | self.assertRaises(ValueError, SystemUser.from_passwd_line, "root:x:0:0:root:/root:/bin/bash:some") 93 | self.assertRaises(ValueError, SystemUser.from_passwd_line, "root:x:a:0:root:/root:/bin/bash") 94 | self.assertRaises(ValueError, SystemUser.from_passwd_line, "root:x:0:a:root:/root:/bin/bash") 95 | 96 | def test_SystemUser_eq(self): 97 | user1 = SystemUser.from_passwd_line("root:x:0:0:root:/root:/bin/bash") 98 | user2 = SystemUser.from_passwd_line("root:x:0:0:root:/root:/bin/bash") 99 | user3 = SystemUser.from_passwd_line("toor:x:0:0:root:/root:/bin/bash") 100 | 101 | something_else = "something" 102 | self.assertEqual(user1, user2) 103 | self.assertNotEqual(user1, user3) 104 | self.assertNotEqual(user1, something_else) 105 | 106 | def test_SystemUser_hash(self): 107 | user1 = SystemUser.from_passwd_line("root:x:0:0:root:/root:/bin/bash") 108 | user2 = SystemUser.from_passwd_line("root:x:0:0:root:/root:/bin/bash") 109 | user3 = SystemUser.from_passwd_line("toor:x:0:0:root:/root:/bin/bash") 110 | 111 | hash_set = set() 112 | hash_set.add(user1) 113 | hash_set.add(user2) 114 | hash_set.add(user3) 115 | 116 | self.assertEqual(2, len(hash_set)) 117 | 118 | found_user1 = False 119 | found_user3 = False 120 | for temp_user in hash_set: 121 | if temp_user == user1: 122 | found_user1 = True 123 | elif temp_user == user3: 124 | found_user3 = True 125 | 126 | self.assertTrue(found_user1) 127 | self.assertTrue(found_user3) 128 | 129 | def test_SystemUser_str(self): 130 | line = "root:x:0:0:root:/root:/bin/bash" 131 | user = SystemUser.from_passwd_line(line) 132 | 133 | self.assertEqual(line, str(user)) 134 | -------------------------------------------------------------------------------- /test/test_search_deleted_exe.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | # Fix to workaround importing issues from test cases 4 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 5 | 6 | import shutil 7 | import unittest 8 | from unittest.mock import patch 9 | 10 | from scripts.search_deleted_exe import search_deleted_exe_files 11 | 12 | 13 | class TestSearchDeletedExe(unittest.TestCase): 14 | 15 | TempDirectory = "/tmp/TestSearchDeletedExe" 16 | 17 | def setUp(self): 18 | os.makedirs(TestSearchDeletedExe.TempDirectory) 19 | 20 | def tearDown(self): 21 | shutil.rmtree(TestSearchDeletedExe.TempDirectory) 22 | 23 | @patch("scripts.search_deleted_exe._get_deleted_exe_files") 24 | @patch("scripts.search_deleted_exe.output_error") 25 | @patch("scripts.search_deleted_exe.output_finding") 26 | def test_search_deleted_exe_files_no_result(self, output_finding_mock, output_error_mock, get_deleted_exe_files_mock): 27 | get_deleted_exe_files_mock.return_value = [] 28 | 29 | search_deleted_exe_files() 30 | 31 | output_error_mock.assert_not_called() 32 | output_finding_mock.assert_not_called() 33 | 34 | @patch("scripts.search_deleted_exe._get_deleted_exe_files") 35 | @patch("scripts.search_deleted_exe.output_error") 36 | @patch("scripts.search_deleted_exe.output_finding") 37 | def test_search_deleted_exe_files_illegal_line(self, output_finding_mock, output_error_mock, get_deleted_exe_files_mock): 38 | get_deleted_exe_files_mock.return_value = ["something unexpected"] 39 | 40 | search_deleted_exe_files() 41 | 42 | output_error_mock.assert_called_once() 43 | self.assertEqual("Unable to parse: something unexpected", output_error_mock.call_args.args[1]) 44 | output_finding_mock.assert_called_once() 45 | self.assertFalse("something unexpected" in output_finding_mock.call_args.args[1]) 46 | 47 | @patch("scripts.search_deleted_exe._get_deleted_exe_files") 48 | @patch("scripts.search_deleted_exe.output_error") 49 | @patch("scripts.search_deleted_exe.output_finding") 50 | def test_search_deleted_exe_files_correct_line(self, output_finding_mock, output_error_mock, get_deleted_exe_files_mock): 51 | # Use PID of this process for tested routine to gather further information about the process 52 | pid = os.getpid() 53 | 54 | get_deleted_exe_files_mock.return_value = ["/proc/%d/exe -> /some/bogus/file (deleted)" % pid] 55 | 56 | search_deleted_exe_files() 57 | 58 | output_error_mock.assert_not_called() 59 | output_finding_mock.assert_called_once() 60 | self.assertTrue("1 deleted executable file(s) found:" in output_finding_mock.call_args.args[1]) 61 | self.assertTrue("/proc/%d/exe -> /some/bogus/file (deleted)" % pid in output_finding_mock.call_args.args[1]) 62 | 63 | @patch("scripts.search_deleted_exe._get_deleted_exe_files") 64 | @patch("scripts.search_deleted_exe.output_error") 65 | @patch("scripts.search_deleted_exe.output_finding") 66 | @patch("scripts.search_deleted_exe.MONITORING_MODE", True) 67 | @patch("scripts.search_deleted_exe.STATE_DIR", TempDirectory) 68 | def test_search_deleted_exe_monitoring_persistence(self, output_finding_mock, output_error_mock, get_deleted_exe_files_mock): 69 | # Use PID of this process for tested routine to gather further information about the process 70 | pid = os.getpid() 71 | 72 | get_deleted_exe_files_mock.return_value = ["/proc/%d/exe -> /some/bogus/file (deleted)" % pid] 73 | 74 | search_deleted_exe_files() 75 | 76 | output_error_mock.assert_not_called() 77 | output_finding_mock.assert_called_once() 78 | self.assertTrue("1 deleted executable file(s) found:" in output_finding_mock.call_args.args[1]) 79 | self.assertTrue("/proc/%d/exe -> /some/bogus/file (deleted)" % pid in output_finding_mock.call_args.args[1]) 80 | 81 | search_deleted_exe_files() 82 | 83 | output_error_mock.assert_not_called() 84 | output_finding_mock.assert_called_once() 85 | 86 | @patch("scripts.search_deleted_exe._get_deleted_exe_files") 87 | @patch("scripts.search_deleted_exe.output_error") 88 | @patch("scripts.search_deleted_exe.output_finding") 89 | @patch("scripts.search_deleted_exe.MONITORING_MODE", True) 90 | @patch("scripts.search_deleted_exe.STATE_DIR", TempDirectory) 91 | def test_search_deleted_exe_monitoring_persistence_cleanup(self, output_finding_mock, output_error_mock, get_deleted_exe_files_mock): 92 | # Use PID of this process for tested routine to gather further information about the process 93 | pid = os.getpid() 94 | 95 | get_deleted_exe_files_mock.return_value = ["/proc/%d/exe -> /some/bogus/file (deleted)" % pid] 96 | 97 | search_deleted_exe_files() 98 | 99 | output_error_mock.assert_not_called() 100 | output_finding_mock.assert_called_once() 101 | self.assertTrue("1 deleted executable file(s) found:" in output_finding_mock.call_args.args[1]) 102 | self.assertTrue("/proc/%d/exe -> /some/bogus/file (deleted)" % pid in output_finding_mock.call_args.args[1]) 103 | 104 | output_error_mock.reset_mock() 105 | output_finding_mock.reset_mock() 106 | get_deleted_exe_files_mock.return_value = [] 107 | 108 | search_deleted_exe_files() 109 | 110 | output_error_mock.assert_not_called() 111 | output_finding_mock.assert_not_called() 112 | 113 | get_deleted_exe_files_mock.return_value = ["/proc/%d/exe -> /some/bogus/file (deleted)" % pid] 114 | 115 | search_deleted_exe_files() 116 | 117 | output_error_mock.assert_not_called() 118 | output_finding_mock.assert_called_once() 119 | self.assertTrue("1 deleted executable file(s) found:" in output_finding_mock.call_args.args[1]) 120 | self.assertTrue("/proc/%d/exe -> /some/bogus/file (deleted)" % pid in output_finding_mock.call_args.args[1]) 121 | -------------------------------------------------------------------------------- /scripts/search_lastlog_in_utmp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Searches if all lastlog entries are also present in utmp and wtmp files. Otherwise, indicators for tampered 13 | utmp and wtmp files are found. 14 | 15 | lastlog - contains an entry of the last login of a user 16 | 17 | utmp - maintains a full accounting of the current status of the system, system boot time (used by uptime), 18 | recording user logins at which terminals, logouts, system events etc. 19 | 20 | wtmp - acts as a historical utmp 21 | 22 | Requirements: 23 | pip package `python-dateutil` 24 | 25 | Reference: 26 | - https://en.wikipedia.org/wiki/Utmp 27 | - https://sandflysecurity.com/blog/using-linux-utmpdump-for-forensics-and-detecting-log-file-tampering 28 | """ 29 | 30 | import os 31 | import sys 32 | from typing import List 33 | 34 | from lib.state import load_state, store_state 35 | from lib.util import output_error, output_finding 36 | from lib.util_utmp import UtmpEntry, parse_utmp_file 37 | from lib.util_lastlog import LastlogEntry, parse_lastlog_file 38 | 39 | # Read configuration. 40 | try: 41 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 42 | from config.search_lastlog_in_utmp import ACTIVATED, UTMP_FILE_LOCATIONS, LASTLOG_FILE_LOCATION, \ 43 | PASSWD_FILE_LOCATION 44 | MONITORING_MODE = False 45 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 46 | except: 47 | ALERTR_FIFO = None 48 | FROM_ADDR = None 49 | TO_ADDR = None 50 | ACTIVATED = True 51 | MONITORING_MODE = False 52 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 53 | UTMP_FILE_LOCATIONS = ["/var/run/utmp", "/var/log/wtmp"] 54 | LASTLOG_FILE_LOCATION = "/var/log/lastlog" 55 | PASSWD_FILE_LOCATION = "/etc/passwd" 56 | 57 | 58 | def _check_lastlog_in_umtp(lastlog_data: List[LastlogEntry], utmp_data: List[UtmpEntry]) -> List[LastlogEntry]: 59 | """ 60 | Checks if lastlog data is part of the utmp data. 61 | :param lastlog_data: List of LastlogEntry objects to check 62 | :param utmp_data: List of UtmpEntry objects to check against 63 | :return: List of LastlogEntry objects that are missing in the utmp data 64 | """ 65 | result = [] 66 | for lastlog_entry in lastlog_data: 67 | if not any(map(lambda x: x.ut_user == lastlog_entry.name 68 | # The timestamp in lastlog does only have a second precision, while the 69 | # one in utmp has microsecond precision 70 | and x.ut_time.year == lastlog_entry.latest_time.year 71 | and x.ut_time.month == lastlog_entry.latest_time.month 72 | and x.ut_time.day == lastlog_entry.latest_time.day 73 | and x.ut_time.hour == lastlog_entry.latest_time.hour 74 | and x.ut_time.minute == lastlog_entry.latest_time.minute 75 | and x.ut_time.second == lastlog_entry.latest_time.second, 76 | utmp_data)): 77 | result.append(lastlog_entry) 78 | 79 | return result 80 | 81 | 82 | def search_lastlog_in_utmp(): 83 | """ 84 | Starts the search if lastlog entries are present in utmp files. 85 | """ 86 | # Decide where to output results. 87 | print_output = False 88 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 89 | print_output = True 90 | 91 | if not ACTIVATED: 92 | if print_output: 93 | print("Module deactivated.") 94 | return 95 | 96 | # Load last results if monitoring mode is active 97 | last_missing_entries = [] # type: List[LastlogEntry] 98 | if MONITORING_MODE: 99 | try: 100 | stored_data = load_state(STATE_DIR) 101 | if "missing_entries" in stored_data.keys(): 102 | last_missing_entries = list(map(lambda x: LastlogEntry.from_dict(x), stored_data["missing_entries"])) 103 | 104 | except Exception as e: 105 | output_error(__file__, str(e)) 106 | return 107 | 108 | lastlog_data = [] 109 | try: 110 | lastlog_data = parse_lastlog_file(LASTLOG_FILE_LOCATION, PASSWD_FILE_LOCATION) 111 | except Exception as e: 112 | output_error(__file__, str(e)) 113 | return 114 | 115 | utmp_data = [] 116 | for utmp_file in UTMP_FILE_LOCATIONS: 117 | if not os.path.isfile(utmp_file): 118 | continue 119 | 120 | try: 121 | utmp_data.extend(parse_utmp_file(utmp_file)) 122 | except Exception as e: 123 | output_error(__file__, str(e)) 124 | continue 125 | 126 | missing_entries = _check_lastlog_in_umtp(lastlog_data, utmp_data) 127 | 128 | # Check if a new detection has occurred 129 | # (in non-monitoring mode this will always yield new detections) 130 | has_new_detections = False 131 | 132 | for missing_entry in missing_entries: 133 | if missing_entry not in last_missing_entries: 134 | has_new_detections = True 135 | break 136 | 137 | # Only output findings if we have a new detection 138 | if has_new_detections and missing_entries: 139 | message = "%d missing entry (or entries) in %s found:\n\n" % (len(missing_entries), LASTLOG_FILE_LOCATION) 140 | for missing_entry in missing_entries: 141 | message += "\nMissing entry: %s" % missing_entry 142 | 143 | output_finding(__file__, message) 144 | 145 | # Store results if monitoring mode is active 146 | if MONITORING_MODE: 147 | try: 148 | store_state(STATE_DIR, {"missing_entries": list(map(lambda x: x.to_dict(), missing_entries))}) 149 | 150 | except Exception as e: 151 | output_error(__file__, str(e)) 152 | 153 | 154 | if __name__ == '__main__': 155 | is_init_run = False 156 | if len(sys.argv) == 2: 157 | if sys.argv[1] == "--init": 158 | is_init_run = True 159 | if "--monitoring" in sys.argv: 160 | MONITORING_MODE = True 161 | 162 | # Script does not need to establish a state. 163 | if not is_init_run: 164 | search_lastlog_in_utmp() 165 | -------------------------------------------------------------------------------- /scripts/search_immutable_files.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Searches for immutable files in the filesystem. 13 | 14 | Requirements: 15 | None 16 | """ 17 | 18 | import os 19 | import sys 20 | from typing import List, cast 21 | 22 | from lib.step_state import StepLocation, load_step_state, store_step_state 23 | from lib.util import output_error, output_finding 24 | from lib.util_file import FileLocation, apply_directory_whitelist, apply_file_whitelist 25 | 26 | # Read configuration. 27 | try: 28 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 29 | from config.search_immutable_files import ACTIVATED, SEARCH_IN_STEPS, SEARCH_LOCATIONS, \ 30 | IMMUTABLE_DIRECTORY_WHITELIST, IMMUTABLE_FILE_WHITELIST 31 | 32 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 33 | except: 34 | ALERTR_FIFO = None 35 | FROM_ADDR = None 36 | TO_ADDR = None 37 | ACTIVATED = True 38 | SEARCH_IN_STEPS = False 39 | SEARCH_LOCATIONS = ["/"] 40 | IMMUTABLE_DIRECTORY_WHITELIST = [] 41 | IMMUTABLE_FILE_WHITELIST = [] 42 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 43 | 44 | 45 | class ImmutableFile(FileLocation): 46 | def __init__(self, location: str, attribute: str): 47 | super().__init__(location) 48 | self._attribute = attribute 49 | 50 | @property 51 | def attribute(self) -> str: 52 | return self._attribute 53 | 54 | 55 | def search_immutable_files(): 56 | # Decide where to output results. 57 | print_output = False 58 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 59 | print_output = True 60 | 61 | if not ACTIVATED: 62 | if print_output: 63 | print("Module deactivated.") 64 | return 65 | 66 | step_state_data = {} 67 | try: 68 | step_state_data = load_step_state(STATE_DIR) 69 | 70 | except Exception as e: 71 | output_error(__file__, str(e)) 72 | return 73 | 74 | # Reset step if we do not search in steps but everything. 75 | if not SEARCH_IN_STEPS: 76 | step_state_data["next_step"] = 0 77 | 78 | if not SEARCH_LOCATIONS: 79 | SEARCH_LOCATIONS.append("/") 80 | 81 | # Gather all search locations. 82 | search_locations = [] # type: List[StepLocation] 83 | # If SEARCH_IN_STEPS is active, build a list of directories to search in 84 | if SEARCH_IN_STEPS: 85 | for search_location in SEARCH_LOCATIONS: 86 | 87 | # Add parent directory as non-recursive search location in order to search in it without going deeper. 88 | search_locations.append(StepLocation(search_location, False)) 89 | 90 | # Add all containing subdirectories as recursive search locations. 91 | elements = os.listdir(search_location) 92 | elements.sort() 93 | for element in elements: 94 | path = os.path.join(search_location, element) 95 | if os.path.isdir(path): 96 | search_locations.append(StepLocation(path, True)) 97 | 98 | # If we do not search in separated steps, just add each directory as a recursive search location. 99 | else: 100 | for search_location in SEARCH_LOCATIONS: 101 | search_locations.append(StepLocation(search_location, True)) 102 | 103 | # Reset index if it is outside the search locations. 104 | if step_state_data["next_step"] >= len(search_locations): 105 | step_state_data["next_step"] = 0 106 | 107 | while True: 108 | search_location_obj = search_locations[step_state_data["next_step"]] 109 | 110 | # Get all immutable files. 111 | if search_location_obj.search_recursive: 112 | fd = os.popen("lsattr -R -a %s 2> /dev/null | sed -rn '/^[aAcCdDeijPsStTu\\-]{4}i/p'" 113 | % search_location_obj.location) 114 | 115 | else: 116 | fd = os.popen("lsattr -a %s 2> /dev/null | sed -rn '/^[aAcCdDeijPsStTu\\-]{4}i/p'" 117 | % search_location_obj.location) 118 | output_raw = fd.read().strip() 119 | fd.close() 120 | 121 | if output_raw != "": 122 | 123 | immutable_files = [] # type: List[ImmutableFile] 124 | output_list = output_raw.split("\n") 125 | for output_entry in output_list: 126 | output_entry_list = output_entry.split(" ") 127 | 128 | # Notify and skip line if sanity check fails. 129 | if len(output_entry_list) != 2: 130 | output_error(__file__, "Unable to process line '%s'" % output_entry, False) 131 | continue 132 | 133 | attributes = output_entry_list[0] 134 | file_location = output_entry_list[1] 135 | immutable_files.append(ImmutableFile(file_location, attributes)) 136 | 137 | dir_whitelist = [FileLocation(x) for x in IMMUTABLE_DIRECTORY_WHITELIST] 138 | file_whitelist = [FileLocation(x) for x in IMMUTABLE_FILE_WHITELIST] 139 | 140 | immutable_files = cast(List[ImmutableFile], apply_directory_whitelist(dir_whitelist, immutable_files)) 141 | immutable_files = cast(List[ImmutableFile], apply_file_whitelist(file_whitelist, immutable_files)) 142 | 143 | if immutable_files: 144 | message = "Immutable file(s) found:\n\n" 145 | message += "\n".join(["File: %s; Attributes: %s" % (x.location, x.attribute) for x in immutable_files]) 146 | 147 | output_finding(__file__, message) 148 | 149 | step_state_data["next_step"] += 1 150 | 151 | # Stop search if we are finished. 152 | if SEARCH_IN_STEPS or step_state_data["next_step"] >= len(search_locations): 153 | break 154 | 155 | try: 156 | store_step_state(STATE_DIR, step_state_data) 157 | 158 | except Exception as e: 159 | output_error(__file__, str(e)) 160 | 161 | 162 | if __name__ == '__main__': 163 | is_init_run = False 164 | if len(sys.argv) == 2: 165 | if sys.argv[1] == "--init": 166 | is_init_run = True 167 | 168 | # Script does not need to establish a state. 169 | if not is_init_run: 170 | search_immutable_files() 171 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Linux Security and Monitoring Scripts 2 | 3 | These are a collection of security and monitoring scripts you can use to monitor your Linux installation 4 | for security-related events or for an investigation. Each script works on its own and is independent of other scripts. 5 | The scripts can be set up to either print out their results, send them to you via mail, 6 | or using [AlertR](https://github.com/sqall01/alertR) as notification channel. 7 | 8 | ## Repository Structure 9 | 10 | The scripts are located in the directory `scripts/`. 11 | Each script contains a short summary in the header of the file with a description of what it is supposed to do, 12 | how to interpret the detection result if not unequivocally and known issues. 13 | If it has dependencies that have to be installed they are also listed, as well as references to where the idea for 14 | this script stems from (if available). 15 | 16 | Each script has a configuration file in the `scripts/config/` directory to configure it. 17 | If the configuration file was not found during the execution of the script, 18 | the script will fall back to default settings and print out the results. 19 | Hence, it is not necessary to provide a configuration file. 20 | 21 | The `scripts/lib/` directory contains code that is shared between different scripts. 22 | 23 | Scripts using a `monitor_` prefix hold a state and are only useful for monitoring purposes. 24 | A single usage of them for an investigation will only result in showing the current state the 25 | Linux system and not changes that might be relevant for the system's security. If you want to 26 | establish the current state of your system as benign for these scripts, you can provide the `--init` argument. 27 | 28 | Scripts using a `search_` prefix search for indicators of a compromise. These can be used for investigation 29 | purposes. If you want to use them for monitoring purposes, you can provide the `--monitoring` argument 30 | to hold a state and only output new findings. 31 | 32 | ## Usage 33 | 34 | Take a look at the header of the script you want to execute. It contains a short description what this script 35 | is supposed to do and what requirements are needed (if any needed at all). If requirements are needed, 36 | install them before running the script. 37 | 38 | The shared configuration file `scripts/config/config.py` contains settings that are used by all scripts. 39 | Furthermore, each script can be configured by using the corresponding configuration file in the `scripts/config/` 40 | directory. If no configuration file was found, a default setting is used and the results are printed out. 41 | 42 | Finally, you can run all configured scripts by executing `start_search.py` (which is located in the main directory) 43 | or by executing each script manually. A Python3 interpreter is needed to run the scripts. 44 | 45 | ### Monitoring 46 | 47 | If you want to use the scripts to monitor your Linux system constantly, you have to perform the following steps: 48 | 49 | 1. Set up a notification channel that is supported by the scripts (currently printing out, mail, 50 | or [AlertR](https://github.com/sqall01/alertR)). 51 | 52 | 2. Configure the scripts that you want to run using the configuration files in the `scripts/config/` directory. 53 | 54 | 3. Execute `start_search.py` with the `--init` argument to initialize the scripts with the `monitor_` prefix and let 55 | them establish a state of your system. However, this assumes that your system is currently uncompromised. 56 | If you are unsure of this, you should verify its current state. 57 | 58 | 4. Set up a cron job as `root` user that executes `start_search.py --monitoring` 59 | (e.g., `0 * * * * root /opt/LSMS/start_search.py --monitoring` to start the search hourly). 60 | The `--monitoring` argument let the scripts only report new findings and thus prevent them from constantly reporting 61 | the same issue. 62 | 63 | ## List of Scripts 64 | 65 | | Name | Script | 66 | |----------------------------------------------------------------------|------------------------------------------------------------------------------| 67 | | Monitoring cron files | [monitor_cron.py](scripts/monitor_cron.py) | 68 | | Monitoring /etc/hosts file | [monitor_hosts_file.py](scripts/monitor_hosts_file.py) | 69 | | Monitoring /etc/ld.so.preload file | [monitor_ld_preload.py](scripts/monitor_ld_preload.py) | 70 | | Monitoring /etc/passwd file | [monitor_passwd.py](scripts/monitor_passwd.py) | 71 | | Monitoring modules | [monitor_modules.py](scripts/monitor_modules.py) | 72 | | Monitoring SSH authorized_keys files | [monitor_ssh_authorized_keys.py](scripts/monitor_ssh_authorized_keys.py) | 73 | | Monitoring systemd unit files | [monitor_systemd_units.py](scripts/monitor_systemd_units.py) | 74 | | Search running deleted programs | [search_deleted_exe.py](scripts/search_deleted_exe.py) | 75 | | Search executables in /dev/shm | [search_dev_shm.py](scripts/search_dev_shm.py) | 76 | | Search hidden ELF files | [search_hidden_exe.py](scripts/search_hidden_exe.py) | 77 | | Search immutable files | [search_immutable_files.py](scripts/search_immutable_files.py) | 78 | | Search lastlog entries missing in utmp logs | [search_lastlog_in_utmp.py](scripts/search_lastlog_in_utmp.py) | 79 | | Search fileless programs (memfd_create) | [search_memfd_create.py](scripts/search_memfd_create.py) | 80 | | Search kernel thread impersonations | [search_non_kthreads.py](scripts/search_non_kthreads.py) | 81 | | Search processes that were started by a now disconnected SSH session | [search_ssh_leftover_processes.py](scripts/search_ssh_leftover_processes.py) | 82 | | Search kernel module taint flags for suspicious combinations | [search_tainted_modules.py](scripts/search_tainted_modules.py) | 83 | | Search indicators the utmp logs were tampered with | [search_utmp_tampering.py](scripts/search_utmp_tampering.py) | 84 | | Test script to check if alerting works | [test_alert.py](scripts/test_alert.py) | 85 | | Verify integrity of installed .deb packages | [verify_deb_packages.py](scripts/verify_deb_packages.py) | 86 | -------------------------------------------------------------------------------- /scripts/monitor_ssh_authorized_keys.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Monitor ~/.ssh/authorized_keys for changes to detect malicious backdoor attempts. 13 | 14 | NOTE: The first execution of this script should be done with the argument "--init". 15 | Otherwise, the script will only show you the current state of the environment since no state was established yet. 16 | However, this assumes that the system is uncompromised during the initial execution. 17 | Hence, if you are unsure this is the case you should verify the current state 18 | before monitoring for changes will become an effective security measure. 19 | 20 | Requirements: 21 | None 22 | """ 23 | 24 | import os 25 | import stat 26 | import sys 27 | from typing import List, Tuple, Dict, Any 28 | 29 | import lib.global_vars 30 | from lib.state import load_state, store_state 31 | from lib.util import output_error, output_finding 32 | from lib.util_user import get_system_users 33 | 34 | # Read configuration. 35 | try: 36 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 37 | from config.monitor_ssh_authorized_keys import ACTIVATED 38 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 39 | except: 40 | ALERTR_FIFO = None 41 | FROM_ADDR = None 42 | TO_ADDR = None 43 | ACTIVATED = True 44 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 45 | 46 | 47 | class MonitorSSHException(Exception): 48 | pass 49 | 50 | 51 | def _get_home_dirs() -> List[Tuple[str, str]]: 52 | return [(x.name, x.home) for x in get_system_users()] 53 | 54 | 55 | def _get_system_ssh_data() -> List[Dict[str, Any]]: 56 | ssh_data = [] 57 | user_home_list = _get_home_dirs() 58 | 59 | for user, home in user_home_list: 60 | # Monitor "authorized_keys2" too since SSH also checks this file for keys (even though it is deprecated). 61 | for authorized_file_name in ["authorized_keys", "authorized_keys2"]: 62 | authorized_keys_file = os.path.join(home, ".ssh", authorized_file_name) 63 | if os.path.isfile(authorized_keys_file): 64 | ssh_user_data = {"user": user, 65 | "authorized_keys_file": authorized_keys_file, 66 | "authorized_keys_entries": _parse_authorized_keys_file(authorized_keys_file)} 67 | ssh_data.append(ssh_user_data) 68 | return ssh_data 69 | 70 | 71 | def _parse_authorized_keys_file(authorized_keys_file: str) -> List[str]: 72 | entries = set() 73 | try: 74 | with open(authorized_keys_file, 'rt') as fp: 75 | for line in fp: 76 | entries.add(line.strip()) 77 | 78 | except Exception as e: 79 | raise MonitorSSHException("Unable to parse file '%s'; Exception: '%s'" % (authorized_keys_file, str(e))) 80 | 81 | return list(entries) 82 | 83 | 84 | def monitor_ssh_authorized_keys(): 85 | 86 | # Decide where to output results. 87 | print_output = False 88 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 89 | print_output = True 90 | 91 | if not ACTIVATED: 92 | if print_output: 93 | print("Module deactivated.") 94 | return 95 | 96 | stored_ssh_data = [] 97 | curr_ssh_data = [] 98 | try: 99 | state_data = load_state(STATE_DIR) 100 | if "ssh_data" in state_data.keys(): 101 | stored_ssh_data = state_data["ssh_data"] 102 | curr_ssh_data = _get_system_ssh_data() 103 | 104 | except Exception as e: 105 | output_error(__file__, str(e)) 106 | return 107 | 108 | # Check if any authorized_keys file is world writable. 109 | for curr_entry in curr_ssh_data: 110 | authorized_keys_file = curr_entry["authorized_keys_file"] 111 | file_stat = os.stat(authorized_keys_file) 112 | if file_stat.st_mode & stat.S_IWOTH: 113 | message = "SSH authorized_keys file for user '%s' is world writable." % curr_entry["user"] 114 | 115 | output_finding(__file__, message) 116 | 117 | # Compare stored data with current one. 118 | for stored_entry in stored_ssh_data: 119 | 120 | # Extract current entry belonging to the same user. 121 | curr_user_entry = None 122 | for curr_entry in curr_ssh_data: 123 | if stored_entry["user"] == curr_entry["user"]: 124 | curr_user_entry = curr_entry 125 | break 126 | if curr_user_entry is None: 127 | message = "SSH authorized_keys file for user '%s' was deleted." % stored_entry["user"] 128 | 129 | output_finding(__file__, message) 130 | continue 131 | 132 | # Check authorized_keys path has changed. 133 | if stored_entry["authorized_keys_file"] != curr_user_entry["authorized_keys_file"]: 134 | message = "SSH authorized_keys location for user '%s' changed from '%s' to '%s'." \ 135 | % (stored_entry["user"], 136 | stored_entry["authorized_keys_file"], 137 | curr_user_entry["authorized_keys_file"]) 138 | 139 | output_finding(__file__, message) 140 | 141 | # Check authorized_key was removed. 142 | for authorized_key in stored_entry["authorized_keys_entries"]: 143 | if authorized_key not in curr_user_entry["authorized_keys_entries"]: 144 | message = "SSH authorized_keys entry was removed.\n\n" 145 | message += "Entry: %s" % authorized_key 146 | 147 | output_finding(__file__, message) 148 | 149 | # Check authorized_key was added. 150 | for authorized_key in curr_user_entry["authorized_keys_entries"]: 151 | if authorized_key not in stored_entry["authorized_keys_entries"]: 152 | message = "SSH authorized_keys entry was added.\n\n" 153 | message += "Entry: %s" % authorized_key 154 | 155 | output_finding(__file__, message) 156 | 157 | for curr_entry in curr_ssh_data: 158 | found = False 159 | for stored_entry in stored_ssh_data: 160 | if curr_entry["user"] == stored_entry["user"]: 161 | found = True 162 | break 163 | if not found: 164 | message = "New authorized_keys file was added for user '%s'.\n\n" % curr_entry["user"] 165 | message += "Entries:\n" 166 | for authorized_key in curr_entry["authorized_keys_entries"]: 167 | message += authorized_key 168 | message += "\n" 169 | 170 | output_finding(__file__, message) 171 | 172 | try: 173 | state_data["ssh_data"] = curr_ssh_data 174 | store_state(STATE_DIR, state_data) 175 | 176 | except Exception as e: 177 | output_error(__file__, str(e)) 178 | 179 | 180 | if __name__ == '__main__': 181 | if len(sys.argv) == 2: 182 | # Suppress output in our initial execution to establish a state. 183 | if sys.argv[1] == "--init": 184 | lib.global_vars.SUPPRESS_OUTPUT = True 185 | monitor_ssh_authorized_keys() 186 | -------------------------------------------------------------------------------- /scripts/monitor_systemd_units.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Monitor systemd unit files to find ones that are used for malware persistence. 13 | 14 | NOTE: The first execution of this script should be done with the argument "--init". 15 | Otherwise, the script will only show you the current state of the environment since no state was established yet. 16 | However, this assumes that the system is uncompromised during the initial execution. 17 | Hence, if you are unsure this is the case you should verify the current state 18 | before monitoring for changes will become an effective security measure. 19 | 20 | Requirements: 21 | None 22 | 23 | Reference: 24 | https://www.trendmicro.com/en_us/research/23/c/iron-tiger-sysupdate-adds-linux-targeting.html 25 | """ 26 | 27 | import os 28 | import sys 29 | from typing import Dict 30 | 31 | import lib.global_vars 32 | from lib.state import load_state, store_state 33 | from lib.util import get_diff_per_line, output_error, output_finding 34 | 35 | # Read configuration. 36 | try: 37 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 38 | from config.monitor_systemd_units import ACTIVATED, SYSTEMD_UNIT_DIRS 39 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 40 | except: 41 | ALERTR_FIFO = None 42 | FROM_ADDR = None 43 | TO_ADDR = None 44 | ACTIVATED = True 45 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 46 | SYSTEMD_UNIT_DIRS = ["/etc/systemd/system", 47 | "/etc/systemd/user", 48 | "/etc/systemd/network", 49 | "/usr/lib/systemd/system", 50 | "/usr/lib/systemd/user", 51 | "/usr/lib/systemd/network", 52 | "/usr/local/lib/systemd/system", 53 | "/usr/local/lib/systemd/user", 54 | "/usr/local/lib/systemd/network", 55 | "/lib/systemd/system", 56 | "/lib/systemd/user", 57 | "/lib/systemd/network", 58 | "/run/systemd/system"] 59 | 60 | 61 | def _get_system_unit_files() -> Dict[str, str]: 62 | systemd_unit_files = dict() 63 | for systemd_unit_dir in SYSTEMD_UNIT_DIRS: 64 | for root, _, files in os.walk(systemd_unit_dir): 65 | for file in files: 66 | file_location = os.path.join(root, file) 67 | 68 | # Some files are broken symlinks, hence, check if they exist 69 | if os.path.exists(file_location): 70 | with open(file_location, "rt") as fp: 71 | data = fp.read() 72 | 73 | # Filter for systemd unit files that can execute commands 74 | if "[Unit]" in data and "[Service]" in data: 75 | # Since keys do not have to start at the beginning of the line, we go through each line, 76 | # remove whitespaces leading whitespaces and check if it starts with a key we are interested in 77 | for line in data.split("\n"): 78 | normalized_line = line.strip() 79 | if any(normalized_line.startswith(x) for x in ["ExecStart", 80 | "ExecStartPre", 81 | "ExecStartPost", 82 | "ExecReload", 83 | "ExecStop", 84 | "ExecStopPost"]): 85 | 86 | # Store complete data of unit file. Even on a non-server system such as a 87 | # xubuntu 22.04 we only have around 700 unit files of interest. Calculating with 88 | # 1kB of data per file (which is way larger than a normal unit file has) we only 89 | # need a little over 700 kB memory for this. Even on a Raspberry Pi we have no 90 | # problem doing this. Further, it will prevent race-conditions when we already 91 | # have the data stored and do not read it afterwards from the file if we generate 92 | # alerts. 93 | systemd_unit_files[file_location] = data 94 | break 95 | 96 | return systemd_unit_files 97 | 98 | 99 | def monitor_systemd_units(): 100 | # Decide where to output results. 101 | print_output = False 102 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 103 | print_output = True 104 | 105 | if not ACTIVATED: 106 | if print_output: 107 | print("Module deactivated.") 108 | return 109 | 110 | stored_systemd_units_data = {} 111 | try: 112 | stored_systemd_units_data = load_state(STATE_DIR) 113 | 114 | except Exception as e: 115 | output_error(__file__, str(e)) 116 | return 117 | 118 | # Add units key in case we do not have any stored data yet. 119 | if "units" not in stored_systemd_units_data.keys(): 120 | stored_systemd_units_data["units"] = {} 121 | 122 | curr_systemd_units_data = {} 123 | try: 124 | curr_systemd_units_data = _get_system_unit_files() 125 | 126 | except Exception as e: 127 | output_error(__file__, str(e)) 128 | return 129 | 130 | # Compare stored unit files data with current one. 131 | stored_units_data = stored_systemd_units_data["units"] 132 | for stored_unit_file, stored_unit_data in stored_units_data.items(): 133 | 134 | # Check if unit file was deleted. 135 | if stored_unit_file not in curr_systemd_units_data.keys(): 136 | message = "Systemd unit file '%s' was deleted." % stored_unit_file 137 | output_finding(__file__, message) 138 | continue 139 | 140 | # Check if unit file was modified. 141 | if stored_unit_data != curr_systemd_units_data[stored_unit_file]: 142 | 143 | diff = get_diff_per_line("Old", 144 | stored_unit_data, 145 | "New", 146 | curr_systemd_units_data[stored_unit_file]) 147 | 148 | message = "Systemd unit file '%s' was modified:\n\nDiff:\n%s\n\nNew file:\n%s" % (stored_unit_file, 149 | diff, 150 | curr_systemd_units_data[stored_unit_file]) # noqa:E501 151 | 152 | output_finding(__file__, message) 153 | 154 | # Check new unit file added. 155 | for curr_unit_file in curr_systemd_units_data.keys(): 156 | if curr_unit_file not in stored_units_data.keys(): 157 | message = "Systemd unit file '%s' was added:\n\n%s" % (curr_unit_file, 158 | curr_systemd_units_data[curr_unit_file]) 159 | output_finding(__file__, message) 160 | 161 | try: 162 | store_state(STATE_DIR, {"units": curr_systemd_units_data}) 163 | 164 | except Exception as e: 165 | output_error(__file__, str(e)) 166 | 167 | 168 | if __name__ == '__main__': 169 | if len(sys.argv) == 2: 170 | # Suppress output in our initial execution to establish a state. 171 | if sys.argv[1] == "--init": 172 | lib.global_vars.SUPPRESS_OUTPUT = True 173 | monitor_systemd_units() 174 | -------------------------------------------------------------------------------- /start_search.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | import os 11 | import subprocess 12 | import socket 13 | import sys 14 | import time 15 | from scripts.config.config import START_PROCESS_TIMEOUT, TO_ADDR, FROM_ADDR, ALERTR_FIFO 16 | from scripts.lib.alerts import raise_alert_alertr, raise_alert_mail 17 | 18 | 19 | if __name__ == '__main__': 20 | 21 | print_output = False 22 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 23 | print_output = True 24 | 25 | script_dir = os.path.dirname(os.path.abspath(__file__)) + "/scripts/" 26 | for script in os.listdir(script_dir): 27 | # Execute all python scripts. 28 | if script[-3:] == ".py" and script != "__init__.py": 29 | 30 | if print_output: 31 | print("Executing %s" % script) 32 | 33 | to_execute = [script_dir + script] 34 | 35 | # Pass arguments to scripts. 36 | if len(sys.argv) > 1: 37 | to_execute.extend(sys.argv[1:]) 38 | 39 | process = None 40 | try: 41 | process = subprocess.Popen(to_execute, 42 | stdout=subprocess.PIPE, 43 | stderr=subprocess.PIPE) 44 | 45 | process.wait(START_PROCESS_TIMEOUT) 46 | 47 | # Catch timeout. 48 | except subprocess.TimeoutExpired: 49 | if print_output: 50 | print("Script '%s' timed out." % script) 51 | 52 | else: 53 | if ALERTR_FIFO is not None: 54 | 55 | hostname = socket.gethostname() 56 | optional_data = dict() 57 | optional_data["script"] = script 58 | optional_data["hostname"] = hostname 59 | message = "Script '%s' on host '%s' timed out." % (script, hostname) 60 | optional_data["message"] = message 61 | 62 | raise_alert_alertr(ALERTR_FIFO, 63 | optional_data) 64 | 65 | if FROM_ADDR is not None and TO_ADDR is not None: 66 | 67 | hostname = socket.gethostname() 68 | subject = "[Security] Script '%s' on '%s' timed out" % (script, hostname) 69 | message = "Script '%s' on host '%s' timed out." % (script, hostname) 70 | 71 | raise_alert_mail(FROM_ADDR, 72 | TO_ADDR, 73 | subject, 74 | message) 75 | 76 | # Catch any execution error. 77 | except Exception as e: 78 | if print_output: 79 | print("Executing script '%s' raised error: %s" % (script, str(e))) 80 | 81 | else: 82 | if ALERTR_FIFO is not None: 83 | 84 | hostname = socket.gethostname() 85 | optional_data = dict() 86 | optional_data["script"] = script 87 | optional_data["hostname"] = hostname 88 | message = "Executing script '%s' on host '%s' raised error: %s" % (script, hostname, str(e)) 89 | optional_data["message"] = message 90 | 91 | raise_alert_alertr(ALERTR_FIFO, 92 | optional_data) 93 | 94 | if FROM_ADDR is not None and TO_ADDR is not None: 95 | 96 | hostname = socket.gethostname() 97 | subject = "[Security] Executing script '%s' on '%s' raised error" % (script, hostname) 98 | message = "Executing script '%s' on host '%s' raised error: %s" % (script, hostname, str(e)) 99 | 100 | raise_alert_mail(FROM_ADDR, 101 | TO_ADDR, 102 | subject, 103 | message) 104 | 105 | continue 106 | 107 | exit_code = process.poll() 108 | 109 | # Process did not terminate yet. 110 | if exit_code is None: 111 | process.terminate() 112 | time.sleep(5) 113 | exit_code = process.poll() 114 | 115 | # Kill process if not exited. 116 | if exit_code != -15: 117 | if print_output: 118 | print("Script '%s' did not terminate. Killing it." % script) 119 | 120 | else: 121 | if ALERTR_FIFO is not None: 122 | 123 | hostname = socket.gethostname() 124 | optional_data = dict() 125 | optional_data["script"] = script 126 | optional_data["hostname"] = hostname 127 | message = "Script '%s' on host '%s' did not terminate. Killing it." % (script, hostname) 128 | optional_data["message"] = message 129 | 130 | raise_alert_alertr(ALERTR_FIFO, 131 | optional_data) 132 | 133 | if FROM_ADDR is not None and TO_ADDR is not None: 134 | 135 | hostname = socket.gethostname() 136 | subject = "[Security] Script '%s' on '%s' did not terminate" % (script, hostname) 137 | message = "Script '%s' on host '%s' did not terminate. Killing it." % (script, hostname) 138 | 139 | raise_alert_mail(FROM_ADDR, 140 | TO_ADDR, 141 | subject, 142 | message) 143 | 144 | # noinspection PyBroadException 145 | try: 146 | process.kill() 147 | except: 148 | pass 149 | 150 | # Process executed successfully. 151 | elif exit_code == 0: 152 | if print_output: 153 | stdout, stderr = process.communicate() 154 | print(stdout.decode("ascii")) 155 | print("") 156 | 157 | continue 158 | 159 | # Process encountered error. 160 | else: 161 | if print_output: 162 | print("Script '%s' exited with exit code: %d" % (script, exit_code)) 163 | 164 | else: 165 | if ALERTR_FIFO is not None: 166 | hostname = socket.gethostname() 167 | optional_data = dict() 168 | optional_data["script"] = script 169 | optional_data["hostname"] = hostname 170 | message = "Script '%s' on host '%s' exited with exit code '%d'." % (script, hostname, exit_code) 171 | optional_data["message"] = message 172 | 173 | raise_alert_alertr(ALERTR_FIFO, 174 | optional_data) 175 | 176 | if FROM_ADDR is not None and TO_ADDR is not None: 177 | hostname = socket.gethostname() 178 | subject = "[Security] Script '%s' on '%s' unsuccessful" % (script, hostname) 179 | message = "Script '%s' on host '%s' exited with exit code '%d'." % (script, hostname, exit_code) 180 | 181 | raise_alert_mail(FROM_ADDR, 182 | TO_ADDR, 183 | subject, 184 | message) 185 | 186 | # noinspection PyBroadException 187 | try: 188 | process.kill() 189 | except: 190 | pass 191 | -------------------------------------------------------------------------------- /test/test_lib_util_module.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | # Fix to workaround importing issues from test cases 4 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 5 | 6 | import tempfile 7 | import unittest 8 | 9 | from scripts.lib.util_module import get_system_modules, SystemModuleException, SystemModuleState, SystemModule, \ 10 | SystemModuleTaintFlag 11 | 12 | 13 | class TestUtilModule(unittest.TestCase): 14 | 15 | def test_get_system_modules_empty_file(self): 16 | tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 17 | 18 | modules = get_system_modules(tmp_file.name) 19 | 20 | self.assertEqual([], modules) 21 | 22 | def test_get_system_modules_no_file(self): 23 | self.assertRaises(SystemModuleException, get_system_modules, "/something_that_does/not/exist") 24 | 25 | def test_get_system_modules_one_line(self): 26 | tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 27 | tmp_file.write("mei_pxp 16384 0 - Live 0x0000000000000000") 28 | tmp_file.flush() 29 | 30 | modules = get_system_modules(tmp_file.name) 31 | 32 | self.assertEqual(1, len(modules)) 33 | self.assertEqual("mei_pxp", modules[0].name) 34 | self.assertEqual(16384, modules[0].size) 35 | self.assertEqual(0, modules[0].reference_count) 36 | self.assertEqual(SystemModuleState.LIVE, modules[0].state) 37 | self.assertEqual(set([]), modules[0].dependencies) 38 | self.assertEqual(set([]), modules[0].taint_flags) 39 | 40 | def test_get_system_modules_multiple_lines(self): 41 | modules_str = ["mei_pxp 16384 0 - Live 0x0000000000000000", 42 | "snd_soc_hda_codec 24576 1 snd_soc_avs, Unloading 0x0000000000000000", 43 | "nvidia 56823808 2 nvidia_uvm,nvidia_modeset, Loading 0x0000000000000000 (PO)", 44 | "rpcsec_gss_krb5 36864 0 - Live 0xffffffffc1611000", 45 | "ipv6 450560 32 [permanent], Live 0x7f000000" # Raspberry Pi 46 | ] 47 | 48 | tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 49 | tmp_file.write("\n".join(modules_str)) 50 | tmp_file.flush() 51 | 52 | modules = get_system_modules(tmp_file.name) 53 | 54 | self.assertEqual(5, len(modules)) 55 | self.assertEqual("mei_pxp", modules[0].name) 56 | self.assertEqual(16384, modules[0].size) 57 | self.assertEqual(0, modules[0].reference_count) 58 | self.assertEqual(SystemModuleState.LIVE, modules[0].state) 59 | self.assertEqual(set([]), modules[0].dependencies) 60 | self.assertEqual(set([]), modules[0].taint_flags) 61 | 62 | self.assertEqual("snd_soc_hda_codec", modules[1].name) 63 | self.assertEqual(24576, modules[1].size) 64 | self.assertEqual(1, modules[1].reference_count) 65 | self.assertEqual(SystemModuleState.UNLOADING, modules[1].state) 66 | self.assertEqual({"snd_soc_avs"}, modules[1].dependencies) 67 | self.assertEqual(set([]), modules[1].taint_flags) 68 | 69 | self.assertEqual("nvidia", modules[2].name) 70 | self.assertEqual(56823808, modules[2].size) 71 | self.assertEqual(2, modules[2].reference_count) 72 | self.assertEqual(SystemModuleState.LOADING, modules[2].state) 73 | self.assertEqual({"nvidia_uvm", "nvidia_modeset"}, modules[2].dependencies) 74 | self.assertEqual({SystemModuleTaintFlag.PROPRIETARY_MODULE, SystemModuleTaintFlag.OOT_MODULE}, modules[2].taint_flags) 75 | 76 | self.assertEqual("rpcsec_gss_krb5", modules[3].name) 77 | self.assertEqual(36864, modules[3].size) 78 | self.assertEqual(0, modules[3].reference_count) 79 | self.assertEqual(SystemModuleState.LIVE, modules[3].state) 80 | self.assertEqual(set([]), modules[3].dependencies) 81 | self.assertEqual(set([]), modules[3].taint_flags) 82 | 83 | self.assertEqual("ipv6", modules[4].name) 84 | self.assertEqual(450560, modules[4].size) 85 | self.assertEqual(32, modules[4].reference_count) 86 | self.assertEqual(SystemModuleState.LIVE, modules[4].state) 87 | self.assertEqual(set(["[permanent]"]), modules[4].dependencies) 88 | self.assertEqual(set([]), modules[4].taint_flags) 89 | 90 | def test_get_system_modules_illegal_line(self): 91 | tmp_file = tempfile.NamedTemporaryFile(mode='w+t') 92 | tmp_file.write("mei_pxp invalid 16384 0 - Live 0x0000000000000000") 93 | tmp_file.flush() 94 | 95 | self.assertRaises(SystemModuleException, get_system_modules, tmp_file.name) 96 | 97 | def test_SystemModule_from_proc_modules_line(self): 98 | module = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000") 99 | 100 | self.assertEqual("mei_pxp", module.name) 101 | self.assertEqual(16384, module.size) 102 | self.assertEqual(0, module.reference_count) 103 | self.assertEqual(SystemModuleState.LIVE, module.state) 104 | self.assertEqual(set([]), module.dependencies) 105 | 106 | def test_SystemModule_from_proc_modules_line_illegal(self): 107 | self.assertRaises(ValueError, SystemModule.from_proc_modules_line, "mei_pxp invalid 0 - Live 0x0000000000000000") 108 | self.assertRaises(ValueError, SystemModule.from_proc_modules_line, "mei_pxp 123 invalid - Live 0x0000000000000000") 109 | self.assertRaises(ValueError, SystemModule.from_proc_modules_line, "mei_pxp 16384 0 Live 0x0000000000000000") 110 | self.assertRaises(ValueError, SystemModule.from_proc_modules_line, "mei_pxp 16384 0 - Li ve 0x0000000000000000") 111 | self.assertRaises(ValueError, SystemModule.from_proc_modules_line, "mei_pxp 16384 0 - Live 1234") 112 | 113 | def test_SystemModule_eq(self): 114 | module1 = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000") 115 | module2 = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000") 116 | module3 = SystemModule.from_proc_modules_line("nei_pxp 16384 0 - Live 0x0000000000000000") 117 | 118 | something_else = "something" 119 | self.assertEqual(module1, module2) 120 | self.assertNotEqual(module1, module3) 121 | self.assertNotEqual(module1, something_else) 122 | 123 | def test_SystemModule_hash(self): 124 | module1 = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000") 125 | module2 = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000") 126 | module3 = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000 (PE)") 127 | 128 | hash_set = set() 129 | hash_set.add(module1) 130 | hash_set.add(module2) 131 | hash_set.add(module3) 132 | 133 | self.assertEqual(2, len(hash_set)) 134 | 135 | found_module1 = False 136 | found_module3 = False 137 | for temp_module in hash_set: 138 | if temp_module == module1: 139 | found_module1 = True 140 | elif temp_module == module3: 141 | found_module3 = True 142 | 143 | self.assertTrue(found_module1) 144 | self.assertTrue(found_module3) 145 | 146 | def test_SystemModule_to_dict(self): 147 | module1 = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000 (OE)") 148 | module2 = SystemModule.from_proc_modules_line("mei_pxp 16384 3 something,else Live 0x0000000000000000") 149 | 150 | module1_dict = module1.to_dict() 151 | module2_dict = module2.to_dict() 152 | 153 | self.assertEqual("mei_pxp", module1_dict["name"]) 154 | self.assertEqual(16384, module1_dict["size"]) 155 | self.assertEqual(0, module1_dict["reference_count"]) 156 | self.assertEqual("LIVE", module1_dict["state"]) 157 | self.assertEqual([], module1_dict["dependencies"]) 158 | self.assertEqual(2, len(module1_dict["taint_flags"])) 159 | self.assertEqual({"OOT_MODULE", "UNSIGNED_MODULE"}, set(module1_dict["taint_flags"])) 160 | 161 | self.assertEqual("mei_pxp", module2_dict["name"]) 162 | self.assertEqual(16384, module2_dict["size"]) 163 | self.assertEqual(3, module2_dict["reference_count"]) 164 | self.assertEqual("LIVE", module2_dict["state"]) 165 | self.assertEqual(2, len(module2_dict["dependencies"])) 166 | self.assertEqual({"something", "else"}, set(module2_dict["dependencies"])) 167 | self.assertEqual([], module2_dict["taint_flags"]) 168 | 169 | def test_SystemModule_from_dict(self): 170 | module1 = SystemModule.from_proc_modules_line("mei_pxp 16384 0 - Live 0x0000000000000000 (OE)") 171 | module2 = SystemModule.from_proc_modules_line("mei_pxp 16384 3 something,else Live 0x0000000000000000") 172 | 173 | self.assertEqual(module1, SystemModule.from_dict(module1.to_dict())) 174 | self.assertEqual(module2, SystemModule.from_dict(module2.to_dict())) 175 | -------------------------------------------------------------------------------- /scripts/search_utmp_tampering.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Searches for indicators that utmp, wtmp and btmp were tampered with. 13 | These files keep track of all logins and logouts to the system. 14 | 15 | utmp - maintains a full accounting of the current status of the system, system boot time (used by uptime), 16 | recording user logins at which terminals, logouts, system events etc. 17 | 18 | wtmp - acts as a historical utmp 19 | 20 | btmp - records failed login attempts 21 | 22 | The following detections are possible: 23 | 24 | TypeError - the type of the utmp entry is invalid since only 1-9 are allowed as value according to utmp(5) 25 | 26 | TimeZero - the timestamp is set to zero and hence the entry could be trashed by a malicious clean-up tool 27 | 28 | TimeTooOld - the timestamp in the entry is older than the one configured and could be set by a malicious clean-up tool 29 | 30 | TimeInconsistency - the timestamp in the entry is not in chronological order as it usually is the case 31 | (except for a few seconds/minutes depending on the system load/state) 32 | 33 | NOTE: On RaspberryPis there are entries in wtmp and utmp which trigger TimeZero detection. It seems that these 34 | entries are generated during boot time when the system time is not yet initialized. 35 | 36 | Requirements: 37 | pip package `python-dateutil` 38 | 39 | Reference: 40 | - https://en.wikipedia.org/wiki/Utmp 41 | - https://sandflysecurity.com/blog/using-linux-utmpdump-for-forensics-and-detecting-log-file-tampering 42 | """ 43 | 44 | import datetime 45 | import enum 46 | import os 47 | import sys 48 | from typing import Dict, List, Optional, cast 49 | 50 | from lib.state import load_state, store_state 51 | from lib.util import output_error, output_finding 52 | from lib.util_utmp import UtmpEntry, parse_utmp_file 53 | 54 | # Read configuration. 55 | try: 56 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 57 | from config.search_utmp_tampering import ACTIVATED, UTMP_FILE_LOCATIONS, UTMP_OLDEST_ENTRY 58 | MONITORING_MODE = False 59 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 60 | except: 61 | ALERTR_FIFO = None 62 | FROM_ADDR = None 63 | TO_ADDR = None 64 | ACTIVATED = True 65 | MONITORING_MODE = False 66 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 67 | UTMP_FILE_LOCATIONS = ["/var/run/utmp", "/var/log/wtmp", "/var/log/btmp"] 68 | UTMP_OLDEST_ENTRY = datetime.datetime.now() - datetime.timedelta(days=3650) 69 | 70 | 71 | class UtmpDetection(enum.Enum): 72 | Clean = 0 73 | TypeError = 1 74 | TimeZero = 2 75 | TimeTooOld = 3 76 | TimeInconsistency = 4 77 | 78 | 79 | def _check_utmp_data(utmp_data: List[UtmpEntry], utmp_file: str) -> Dict[UtmpEntry, List[UtmpDetection]]: 80 | """ 81 | Checks utmp data for suspicious entries. 82 | """ 83 | detections = {} 84 | prev_entry = None 85 | for entry in utmp_data: 86 | entry_detections = [] 87 | result = _check_utmp_type(entry) 88 | if result != UtmpDetection.Clean: 89 | entry_detections.append(result) 90 | 91 | result = _check_utmp_timestamp(prev_entry, entry, utmp_file) 92 | if result != UtmpDetection.Clean: 93 | entry_detections.append(result) 94 | 95 | if entry_detections: 96 | detections[entry] = entry_detections 97 | prev_entry = entry 98 | 99 | return detections 100 | 101 | 102 | def _check_utmp_type(entry: UtmpEntry) -> UtmpDetection: 103 | """ 104 | Checks the type value of the utmp entry for sanity. 105 | """ 106 | # Only valid values are 1-9 according to utmp(5) 107 | if 0 < entry.ut_type <= 9: 108 | return UtmpDetection.Clean 109 | return UtmpDetection.TypeError 110 | 111 | 112 | def _check_utmp_timestamp(prev: Optional[UtmpEntry], curr: UtmpEntry, utmp_file: str) -> UtmpDetection: 113 | """ 114 | Checks the timestamp value of the utmp entry for sanity. 115 | """ 116 | if curr.ut_time.year == 1970 and curr.ut_time.month == 1 and curr.ut_time.day == 1: 117 | return UtmpDetection.TimeZero 118 | 119 | if curr.ut_time.replace(tzinfo=None) < UTMP_OLDEST_ENTRY: 120 | return UtmpDetection.TimeTooOld 121 | 122 | # Ignore /var/run/utmp in the time inconsistency check because entries are not in chronological order in this file 123 | if prev is not None and not os.path.basename(utmp_file).endswith("utmp") and curr.ut_time < prev.ut_time: 124 | # If the current entry is younger than the previous one, check the difference between both 125 | # (since usually the difference is only a few microseconds in normal circumstances). 126 | # To be on the safe side we allow a few seconds difference. 127 | # Furthermore, type 2 (BOOT_TIME) in Microsoft WSL has sometimes greater differences. 128 | difference_in_seconds = (prev.ut_time - curr.ut_time).total_seconds() 129 | if ((prev.ut_type != 2 and difference_in_seconds > 5) 130 | or (prev.ut_type == 2 and difference_in_seconds > 120)): 131 | return UtmpDetection.TimeInconsistency 132 | 133 | return UtmpDetection.Clean 134 | 135 | 136 | def search_utmp_tampering(): 137 | """ 138 | Starts the search for utmp tampering indicators in all configured files. 139 | """ 140 | # Decide where to output results. 141 | print_output = False 142 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 143 | print_output = True 144 | 145 | if not ACTIVATED: 146 | if print_output: 147 | print("Module deactivated.") 148 | return 149 | 150 | # Load last results if monitoring mode is active 151 | last_results = {} # type: Dict[str, Dict[UtmpEntry, List[UtmpDetection]]] 152 | if MONITORING_MODE: 153 | try: 154 | stored_data = load_state(STATE_DIR) 155 | if "detections" in stored_data.keys(): 156 | for utmp_file, stored_detections in stored_data["detections"].items(): 157 | last_results[utmp_file] = {} 158 | for k, v in stored_detections.items(): 159 | last_results[utmp_file][UtmpEntry(k)] = cast(List[UtmpDetection], list(map(lambda x: UtmpDetection[x], v))) 160 | 161 | except Exception as e: 162 | output_error(__file__, str(e)) 163 | return 164 | 165 | new_results = {} # type: Dict[str, Dict[UtmpEntry, List[UtmpDetection]]] 166 | for utmp_file in UTMP_FILE_LOCATIONS: 167 | if not os.path.isfile(utmp_file): 168 | continue 169 | 170 | utmp_data = [] 171 | try: 172 | utmp_data = parse_utmp_file(utmp_file) 173 | except Exception as e: 174 | output_error(__file__, str(e)) 175 | continue 176 | 177 | detections = _check_utmp_data(utmp_data, utmp_file) 178 | 179 | # Check if a new detection has occurred 180 | # (in non-monitoring mode this will always yield new detections) 181 | has_new_detections = False 182 | if utmp_file not in last_results: 183 | has_new_detections = True 184 | else: 185 | for k, v in detections.items(): 186 | if k in last_results[utmp_file] and last_results[utmp_file][k] == v: 187 | continue 188 | has_new_detections = True 189 | 190 | # Only output findings if we have a new detection 191 | if has_new_detections and detections: 192 | message = "%d suspicious entry (or entries) in %s found:\n\n" % (len(detections), utmp_file) 193 | for k, v in detections.items(): 194 | message += "\nLine: %s" % k.line 195 | message += "\nDetections: %s\n" % ", ".join(map(lambda x: x.name, v)) 196 | 197 | output_finding(__file__, message) 198 | 199 | new_results[utmp_file] = detections 200 | 201 | # Store results if monitoring mode is active 202 | if MONITORING_MODE: 203 | try: 204 | temp_results = {} # type: Dict[str, Dict[str, List[UtmpDetection]]] 205 | for utmp_file, temp_detections in new_results.items(): 206 | temp_results[utmp_file] = {} 207 | for k, v in temp_detections.items(): 208 | temp_results[utmp_file][k.line] = list(map(lambda x: x.name, v)) 209 | 210 | store_state(STATE_DIR, {"detections": temp_results}) 211 | 212 | except Exception as e: 213 | output_error(__file__, str(e)) 214 | 215 | 216 | if __name__ == '__main__': 217 | is_init_run = False 218 | if len(sys.argv) == 2: 219 | if sys.argv[1] == "--init": 220 | is_init_run = True 221 | if "--monitoring" in sys.argv: 222 | MONITORING_MODE = True 223 | 224 | # Script does not need to establish a state. 225 | if not is_init_run: 226 | search_utmp_tampering() 227 | -------------------------------------------------------------------------------- /test/test_search_lastlog_in_utmp.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | # Fix to workaround importing issues from test cases 4 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 5 | 6 | import datetime 7 | import unittest 8 | import shutil 9 | from unittest.mock import patch 10 | 11 | from scripts.search_lastlog_in_utmp import _check_lastlog_in_umtp, search_lastlog_in_utmp 12 | from scripts.lib.util_utmp import UtmpEntry 13 | from scripts.lib.util_lastlog import LastlogEntry 14 | 15 | 16 | class TestSearchLastlogInUtmp(unittest.TestCase): 17 | 18 | TempDirectory = "/tmp/TestSearchLastlogInUtmp" 19 | 20 | def setUp(self): 21 | os.makedirs(TestSearchLastlogInUtmp.TempDirectory) 22 | 23 | def tearDown(self): 24 | shutil.rmtree(TestSearchLastlogInUtmp.TempDirectory) 25 | 26 | def test_check_lastlog_in_umtp(self): 27 | utmp_entry = UtmpEntry("[7] [06556] [ts/1] [sqall ] [pts/1 ] [172.19.80.1 ] [172.19.80.1 ] [2025-01-09T08:10:23,898892+00:00]") 28 | lastlog_entry = LastlogEntry(1000, "sqall", "pts/1", "172.19.80.1", 1736410223) 29 | 30 | result = _check_lastlog_in_umtp([lastlog_entry], [utmp_entry]) 31 | self.assertEqual(0, len(result)) 32 | 33 | def test_check_lastlog_in_umtp_missing_entry(self): 34 | utmp_entry = UtmpEntry("[7] [06556] [ts/1] [someone ] [pts/1 ] [172.19.80.1 ] [172.19.80.1 ] [2025-01-09T08:10:23,898892+00:00]") 35 | lastlog_entry = LastlogEntry(1000, "sqall", "pts/1", "172.19.80.1", 1736410223) 36 | 37 | result = _check_lastlog_in_umtp([lastlog_entry], [utmp_entry]) 38 | self.assertEqual(1, len(result)) 39 | self.assertEqual(1000, result[0].uid) 40 | self.assertEqual("sqall", result[0].name) 41 | self.assertEqual("pts/1", result[0].device) 42 | self.assertEqual("172.19.80.1", result[0].host) 43 | self.assertEqual(1736410223, result[0].latest_time.timestamp()) 44 | 45 | @patch("scripts.search_lastlog_in_utmp.output_error") 46 | @patch("scripts.search_lastlog_in_utmp.output_finding") 47 | @patch("scripts.search_lastlog_in_utmp.UTMP_FILE_LOCATIONS", [os.path.join(os.path.dirname(__file__), 48 | "resources", 49 | "wtmp_benign")]) 50 | @patch("scripts.search_lastlog_in_utmp.LASTLOG_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 51 | "resources", 52 | "lastlog")) 53 | @patch("scripts.search_lastlog_in_utmp.PASSWD_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 54 | "resources", 55 | "passwd")) 56 | def test_search_lastlog_in_utmp(self, output_finding_mock, output_error_mock): 57 | search_lastlog_in_utmp() 58 | 59 | output_error_mock.assert_not_called() 60 | output_finding_mock.assert_called_once() 61 | 62 | self.assertTrue("1 missing entry (or entries) in " in output_finding_mock.call_args.args[1]) 63 | self.assertTrue("Missing entry: 1000 sqall pts/1 172.19.80.1 2025-01-09 08:10:23+00:00" in output_finding_mock.call_args.args[1]) 64 | 65 | @patch("scripts.search_lastlog_in_utmp.output_error") 66 | @patch("scripts.search_lastlog_in_utmp.output_finding") 67 | @patch("scripts.search_lastlog_in_utmp.UTMP_FILE_LOCATIONS", [os.path.join(os.path.dirname(__file__), 68 | "resources", 69 | "wtmp_benign2")]) 70 | @patch("scripts.search_lastlog_in_utmp.LASTLOG_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 71 | "resources", 72 | "lastlog")) 73 | @patch("scripts.search_lastlog_in_utmp.PASSWD_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 74 | "resources", 75 | "passwd")) 76 | def test_search_lastlog_in_utmp_no_result(self, output_finding_mock, output_error_mock): 77 | search_lastlog_in_utmp() 78 | 79 | output_error_mock.assert_not_called() 80 | output_finding_mock.assert_not_called() 81 | 82 | @patch("scripts.search_lastlog_in_utmp.output_error") 83 | @patch("scripts.search_lastlog_in_utmp.output_finding") 84 | @patch("scripts.search_lastlog_in_utmp.UTMP_FILE_LOCATIONS", [os.path.join(os.path.dirname(__file__), 85 | "resources", 86 | "wtmp_benign")]) 87 | @patch("scripts.search_lastlog_in_utmp.LASTLOG_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 88 | "resources", 89 | "lastlog")) 90 | @patch("scripts.search_lastlog_in_utmp.PASSWD_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 91 | "resources", 92 | "passwd")) 93 | @patch("scripts.search_lastlog_in_utmp.MONITORING_MODE", True) 94 | @patch("scripts.search_lastlog_in_utmp.STATE_DIR", TempDirectory) 95 | def test_search_lastlog_in_utmp_monitoring_persistence(self, output_finding_mock, output_error_mock): 96 | search_lastlog_in_utmp() 97 | 98 | output_error_mock.assert_not_called() 99 | output_finding_mock.assert_called_once() 100 | 101 | self.assertTrue("1 missing entry (or entries) in " in output_finding_mock.call_args.args[1]) 102 | self.assertTrue("Missing entry: 1000 sqall pts/1 172.19.80.1 2025-01-09 08:10:23+00:00" in output_finding_mock.call_args.args[1]) 103 | 104 | output_finding_mock.reset_mock() 105 | 106 | search_lastlog_in_utmp() 107 | 108 | output_error_mock.assert_not_called() 109 | output_finding_mock.assert_not_called() 110 | 111 | @patch("scripts.search_lastlog_in_utmp._check_lastlog_in_umtp") 112 | @patch("scripts.search_lastlog_in_utmp.output_error") 113 | @patch("scripts.search_lastlog_in_utmp.output_finding") 114 | @patch("scripts.search_lastlog_in_utmp.UTMP_FILE_LOCATIONS", [os.path.join(os.path.dirname(__file__), 115 | "resources", 116 | "wtmp_benign")]) 117 | @patch("scripts.search_lastlog_in_utmp.LASTLOG_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 118 | "resources", 119 | "lastlog")) 120 | @patch("scripts.search_lastlog_in_utmp.PASSWD_FILE_LOCATION", os.path.join(os.path.dirname(__file__), 121 | "resources", 122 | "passwd")) 123 | @patch("scripts.search_lastlog_in_utmp.MONITORING_MODE", True) 124 | @patch("scripts.search_lastlog_in_utmp.STATE_DIR", TempDirectory) 125 | def test_search_lastlog_in_utmp_monitoring_persistence_cleanup(self, output_finding_mock, output_error_mock, check_lastlog_in_umtp_mock): 126 | check_lastlog_in_umtp_mock.return_value = [LastlogEntry(1000, "sqall", "pts/1", "172.19.80.1", 1736410223)] 127 | 128 | search_lastlog_in_utmp() 129 | 130 | output_error_mock.assert_not_called() 131 | output_finding_mock.assert_called_once() 132 | 133 | self.assertTrue("1 missing entry (or entries) in " in output_finding_mock.call_args.args[1]) 134 | self.assertTrue("Missing entry: 1000 sqall pts/1 172.19.80.1 2025-01-09 08:10:23+00:00" in output_finding_mock.call_args.args[1]) 135 | 136 | output_finding_mock.reset_mock() 137 | check_lastlog_in_umtp_mock.return_value = [] 138 | 139 | search_lastlog_in_utmp() 140 | 141 | output_error_mock.assert_not_called() 142 | output_finding_mock.assert_not_called() 143 | 144 | check_lastlog_in_umtp_mock.return_value = [LastlogEntry(1000, "sqall", "pts/1", "172.19.80.1", 1736410223)] 145 | 146 | search_lastlog_in_utmp() 147 | 148 | output_error_mock.assert_not_called() 149 | output_finding_mock.assert_called_once() 150 | self.assertTrue("1 missing entry (or entries) in " in output_finding_mock.call_args.args[1]) 151 | self.assertTrue("Missing entry: 1000 sqall pts/1 172.19.80.1 2025-01-09 08:10:23+00:00" in output_finding_mock.call_args.args[1]) 152 | -------------------------------------------------------------------------------- /scripts/monitor_cron.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # written by sqall 4 | # twitter: https://twitter.com/sqall01 5 | # blog: https://h4des.org 6 | # github: https://github.com/sqall01 7 | # 8 | # Licensed under the MIT License. 9 | 10 | """ 11 | Short summary: 12 | Monitor /etc/crontab, /etc/cron.d/*, user specific crontab files and script files run by cron (e.g., script files in /etc/cron.hourly) for changes to detect attempts for attacker persistence. 13 | Additionally, check if crontab entries and user specific crontab files belong to existing system users. 14 | 15 | NOTE: The first execution of this script should be done with the argument "--init". 16 | Otherwise, the script will only show you the current state of the environment since no state was established yet. 17 | However, this assumes that the system is uncompromised during the initial execution. 18 | Hence, if you are unsure this is the case you should verify the current state 19 | before monitoring for changes will become an effective security measure. 20 | 21 | Requirements: 22 | None 23 | """ 24 | 25 | import hashlib 26 | import os 27 | import re 28 | import sys 29 | from typing import Dict, List, Set 30 | 31 | import lib.global_vars 32 | from lib.state import load_state, store_state 33 | from lib.util import output_error, output_finding 34 | from lib.util_user import get_system_users 35 | 36 | # Read configuration. 37 | try: 38 | from config.config import ALERTR_FIFO, FROM_ADDR, TO_ADDR, STATE_DIR 39 | from config.monitor_cron import ACTIVATED, USER_CRONTAB_DIR, CRON_SCRIPT_DIRS 40 | STATE_DIR = os.path.join(os.path.dirname(__file__), STATE_DIR, os.path.basename(__file__)) 41 | except: 42 | ALERTR_FIFO = None 43 | FROM_ADDR = None 44 | TO_ADDR = None 45 | ACTIVATED = True 46 | STATE_DIR = os.path.join("/tmp", os.path.basename(__file__)) 47 | USER_CRONTAB_DIR = "/var/spool/cron/crontabs/" 48 | CRON_SCRIPT_DIRS = ["/etc/cron.daily", "/etc/cron.hourly", "/etc/cron.monthly", "/etc/cron.weekly", "/etc/cron.d"] 49 | 50 | 51 | def _calculate_hash(file_location: str) -> str: 52 | with open(file_location, "rb") as fp: 53 | file_hash = hashlib.md5() 54 | chunk = fp.read(1048576) 55 | while chunk: 56 | file_hash.update(chunk) 57 | chunk = fp.read(1048576) 58 | 59 | return file_hash.hexdigest().upper() 60 | 61 | 62 | def _get_cron_script_files() -> Dict[str, str]: 63 | cron_script_files = dict() 64 | for cron_script_dir in CRON_SCRIPT_DIRS: 65 | for cron_script_file in os.listdir(cron_script_dir): 66 | cron_script_location = os.path.join(cron_script_dir, cron_script_file) 67 | cron_script_files[cron_script_location] = _calculate_hash(cron_script_location) 68 | 69 | return cron_script_files 70 | 71 | 72 | def _get_crontab_files() -> Dict[str, List[str]]: 73 | crontab_entries = dict() 74 | 75 | # Add default location of crontab entries. 76 | crontab_files = ["/etc/crontab"] 77 | 78 | # Add crontab files that are installed by other software. 79 | for crond_file in os.listdir("/etc/cron.d"): 80 | crond_location = os.path.join("/etc/cron.d", crond_file) 81 | if os.path.isfile(crond_location): 82 | crontab_files.append(crond_location) 83 | 84 | # Add user individual crontab files. 85 | for crontab_file in os.listdir(USER_CRONTAB_DIR): 86 | crontab_location = os.path.join(USER_CRONTAB_DIR, crontab_file) 87 | if os.path.isfile(crontab_location): 88 | crontab_files.append(crontab_location) 89 | 90 | for crontab_file in crontab_files: 91 | crontab_entries[crontab_file] = _parse_crontab(crontab_file) 92 | 93 | return crontab_entries 94 | 95 | 96 | def _get_crontab_users(curr_crontab_data: Dict[str, List[str]]) -> Set[str]: 97 | crontab_users = set() 98 | 99 | # User individual crontab files are named after the username. 100 | for crontab_file in os.listdir(USER_CRONTAB_DIR): 101 | crontab_users.add(crontab_file) 102 | 103 | # Extract all crontab entries that contain a user that should run the command. 104 | crontab_entries_with_user = list(curr_crontab_data["/etc/crontab"]) 105 | for crontab_file in curr_crontab_data.keys(): 106 | if crontab_file.startswith("/etc/cron.d/"): 107 | crontab_entries_with_user.extend(curr_crontab_data[crontab_file]) 108 | 109 | # Extract user from crontab entries. 110 | for crontab_entry in crontab_entries_with_user: 111 | match = re.fullmatch(r'([*?\-,/\d]+)\s([*?\-,/\d]+)\s([*?\-,/\dLW]+)\s([*?\-,/\d]+)\s([*?\-,/\dL#]+)\s([\-\w]+)\s+(.+)', 112 | crontab_entry) 113 | if match is not None: 114 | crontab_users.add(match.groups()[5]) 115 | 116 | return crontab_users 117 | 118 | 119 | def _parse_crontab(file_location: str) -> List[str]: 120 | entries = list() 121 | with open(file_location, 'rt') as fp: 122 | for line in fp: 123 | line_strip = line.strip() 124 | if line_strip == "" or line_strip[0] == "#": 125 | continue 126 | 127 | entries.append(line_strip) 128 | 129 | return entries 130 | 131 | 132 | def monitor_cron(): 133 | 134 | # Decide where to output results. 135 | print_output = False 136 | if ALERTR_FIFO is None and FROM_ADDR is None and TO_ADDR is None: 137 | print_output = True 138 | 139 | if not ACTIVATED: 140 | if print_output: 141 | print("Module deactivated.") 142 | return 143 | 144 | stored_cron_data = {} 145 | try: 146 | stored_cron_data = load_state(STATE_DIR) 147 | 148 | except Exception as e: 149 | output_error(__file__, str(e)) 150 | return 151 | 152 | # Add crontab key in case we do not have any stored data yet. 153 | if "crontab" not in stored_cron_data.keys(): 154 | stored_cron_data["crontab"] = {} 155 | 156 | # Add cronscripts key in case we do not have any stored data yet. 157 | if "cronscripts" not in stored_cron_data.keys(): 158 | stored_cron_data["cronscripts"] = {} 159 | 160 | curr_crontab_data = {} 161 | try: 162 | curr_crontab_data = _get_crontab_files() 163 | 164 | except Exception as e: 165 | output_error(__file__, str(e)) 166 | return 167 | 168 | # Compare stored crontab data with current one. 169 | stored_crontab_data = stored_cron_data["crontab"] 170 | for stored_crontab_file, stored_crontab_entries in stored_crontab_data.items(): 171 | 172 | # Check if crontab file was deleted. 173 | if stored_crontab_file not in curr_crontab_data.keys(): 174 | message = "Crontab file '%s' was deleted." % stored_crontab_file 175 | output_finding(__file__, message) 176 | continue 177 | 178 | # Check entries were deleted. 179 | for stored_crontab_entry in stored_crontab_entries: 180 | if stored_crontab_entry not in curr_crontab_data[stored_crontab_file]: 181 | message = "Entry in crontab file '%s' was deleted.\n\n" % stored_crontab_file 182 | message += "Deleted entry: %s" % stored_crontab_entry 183 | output_finding(__file__, message) 184 | 185 | # Check entries were added. 186 | for curr_crontab_entry in curr_crontab_data[stored_crontab_file]: 187 | if curr_crontab_entry not in stored_crontab_entries: 188 | message = "Entry in crontab file '%s' was added.\n\n" % stored_crontab_file 189 | message += "Added entry: %s" % curr_crontab_entry 190 | output_finding(__file__, message) 191 | 192 | # Check new crontab file added. 193 | for curr_crontab_file, curr_crontab_entries in curr_crontab_data.items(): 194 | if curr_crontab_file not in stored_crontab_data.keys(): 195 | message = "Crontab file '%s' was added.\n\n" % curr_crontab_file 196 | for curr_crontab_entry in curr_crontab_entries: 197 | message += "Entry: %s\n" % curr_crontab_entry 198 | output_finding(__file__, message) 199 | 200 | # Check users running crontab entries actually exist as system users. 201 | system_users = get_system_users() 202 | for crontab_user in _get_crontab_users(curr_crontab_data): 203 | if not any([crontab_user == x.name for x in system_users]): 204 | message = "Crontab entry or entries are run as user '%s' but no such system user exists." % crontab_user 205 | output_finding(__file__, message) 206 | 207 | curr_script_data = {} 208 | try: 209 | curr_script_data = _get_cron_script_files() 210 | 211 | except Exception as e: 212 | output_error(__file__, str(e)) 213 | return 214 | 215 | # Compare stored cron script data with current one. 216 | stored_script_data = stored_cron_data["cronscripts"] 217 | for stored_script_file, stored_script_hash in stored_script_data.items(): 218 | 219 | # Check if cron script file was deleted. 220 | if stored_script_file not in curr_script_data.keys(): 221 | message = "Cron script file '%s' was deleted." % stored_script_file 222 | output_finding(__file__, message) 223 | continue 224 | 225 | # Check if cron script file was modified. 226 | if stored_script_hash != curr_script_data[stored_script_file]: 227 | message = "Cron script file '%s' was modified." % stored_script_file 228 | output_finding(__file__, message) 229 | 230 | # Check new cron script file added. 231 | for curr_script_file in curr_script_data.keys(): 232 | if curr_script_file not in stored_script_data.keys(): 233 | message = "Cron script file '%s' was added." % curr_script_file 234 | output_finding(__file__, message) 235 | 236 | try: 237 | store_state(STATE_DIR, {"crontab": curr_crontab_data, 238 | "cronscripts": curr_script_data}) 239 | 240 | except Exception as e: 241 | output_error(__file__, str(e)) 242 | 243 | 244 | if __name__ == '__main__': 245 | if len(sys.argv) == 2: 246 | # Suppress output in our initial execution to establish a state. 247 | if sys.argv[1] == "--init": 248 | lib.global_vars.SUPPRESS_OUTPUT = True 249 | monitor_cron() 250 | -------------------------------------------------------------------------------- /test/test_search_tainted_modules.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from scripts.lib.util_module import SystemModule 5 | 6 | # Fix to workaround importing issues from test cases 7 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 8 | 9 | import shutil 10 | import unittest 11 | from unittest.mock import patch 12 | 13 | from scripts.search_tainted_modules import search_tainted_modules 14 | 15 | 16 | class TestSearchTaintedModules(unittest.TestCase): 17 | 18 | TempDirectory = "/tmp/TestSearchTaintedModules" 19 | 20 | def setUp(self): 21 | os.makedirs(TestSearchTaintedModules.TempDirectory) 22 | 23 | def tearDown(self): 24 | shutil.rmtree(TestSearchTaintedModules.TempDirectory) 25 | 26 | @patch("scripts.search_tainted_modules._get_suspicious_modules") 27 | @patch("scripts.search_tainted_modules.output_error") 28 | @patch("scripts.search_tainted_modules.output_finding") 29 | def test_search_tainted_modules_no_result(self, output_finding_mock, output_error_mock, get_suspicious_modules_mock): 30 | get_suspicious_modules_mock.return_value = [] 31 | 32 | search_tainted_modules() 33 | 34 | output_error_mock.assert_not_called() 35 | output_finding_mock.assert_not_called() 36 | 37 | @patch("scripts.search_tainted_modules._get_suspicious_modules") 38 | @patch("scripts.search_tainted_modules.output_error") 39 | @patch("scripts.search_tainted_modules.output_finding") 40 | def test_search_tainted_modules_multi_result(self, output_finding_mock, output_error_mock, get_suspicious_modules_mock): 41 | module1 = SystemModule.from_dict({'name': 'nvidia_uvm', 42 | 'size': 1200128, 43 | 'reference_count': 0, 44 | 'state': 'LIVE', 45 | 'dependencies': [], 46 | 'taint_flags': ['PROPRIETARY_MODULE', 'OOT_MODULE', 'UNSIGNED_MODULE']}) 47 | module2 = SystemModule.from_dict({'name': 'nvidia_drm', 48 | 'size': 1234, 49 | 'reference_count': 2, 50 | 'state': 'LIVE', 51 | 'dependencies': [], 52 | 'taint_flags': ['PROPRIETARY_MODULE', 'OOT_MODULE']}) 53 | get_suspicious_modules_mock.return_value = [module1, module2] 54 | 55 | search_tainted_modules() 56 | 57 | output_error_mock.assert_not_called() 58 | output_finding_mock.assert_called_once() 59 | self.assertTrue("2 suspicious loaded module(s) found:" in output_finding_mock.call_args.args[1]) 60 | self.assertTrue("%s - State: %s; Dependencies: %s; Taint Flags: %s" % (module1.name, 61 | module1.state.name, 62 | ",".join(module1.dependencies), 63 | ",".join(map(lambda x: x.name, module1.taint_flags))) 64 | in output_finding_mock.call_args.args[1]) 65 | self.assertTrue("%s - State: %s; Dependencies: %s; Taint Flags: %s" % (module2.name, 66 | module2.state.name, 67 | ",".join(module2.dependencies), 68 | ",".join(map(lambda x: x.name, module2.taint_flags))) 69 | in output_finding_mock.call_args.args[1]) 70 | 71 | @patch("scripts.search_tainted_modules._get_suspicious_modules") 72 | @patch("scripts.search_tainted_modules.output_error") 73 | @patch("scripts.search_tainted_modules.output_finding") 74 | def test_search_tainted_modules_one_result(self, output_finding_mock, output_error_mock, get_suspicious_modules_mock): 75 | module = SystemModule.from_dict({'name': 'nvidia_uvm', 76 | 'size': 1200128, 77 | 'reference_count': 0, 78 | 'state': 'LIVE', 79 | 'dependencies': [], 80 | 'taint_flags': ['PROPRIETARY_MODULE', 'OOT_MODULE', 'UNSIGNED_MODULE']}) 81 | get_suspicious_modules_mock.return_value = [module] 82 | 83 | search_tainted_modules() 84 | 85 | output_error_mock.assert_not_called() 86 | output_finding_mock.assert_called_once() 87 | self.assertTrue("1 suspicious loaded module(s) found:" in output_finding_mock.call_args.args[1]) 88 | self.assertTrue("%s - State: %s; Dependencies: %s; Taint Flags: %s" % (module.name, 89 | module.state.name, 90 | ",".join(module.dependencies), 91 | ",".join(map(lambda x: x.name, module.taint_flags))) 92 | in output_finding_mock.call_args.args[1]) 93 | 94 | @patch("scripts.search_tainted_modules._get_suspicious_modules") 95 | @patch("scripts.search_tainted_modules.output_error") 96 | @patch("scripts.search_tainted_modules.output_finding") 97 | @patch("scripts.search_tainted_modules.MONITORING_MODE", True) 98 | @patch("scripts.search_tainted_modules.STATE_DIR", TempDirectory) 99 | def test_search_tainted_modules_monitoring_persistence(self, output_finding_mock, output_error_mock, get_suspicious_modules_mock): 100 | module = SystemModule.from_dict({'name': 'nvidia_uvm', 101 | 'size': 1200128, 102 | 'reference_count': 0, 103 | 'state': 'LIVE', 104 | 'dependencies': [], 105 | 'taint_flags': ['PROPRIETARY_MODULE', 'OOT_MODULE', 'UNSIGNED_MODULE']}) 106 | get_suspicious_modules_mock.return_value = [module] 107 | 108 | search_tainted_modules() 109 | 110 | output_error_mock.assert_not_called() 111 | output_finding_mock.assert_called_once() 112 | self.assertTrue("1 suspicious loaded module(s) found:" in output_finding_mock.call_args.args[1]) 113 | self.assertTrue("%s - State: %s; Dependencies: %s; Taint Flags: %s" % (module.name, 114 | module.state.name, 115 | ",".join(module.dependencies), 116 | ",".join(map(lambda x: x.name, module.taint_flags))) 117 | in output_finding_mock.call_args.args[1]) 118 | 119 | search_tainted_modules() 120 | 121 | output_error_mock.assert_not_called() 122 | output_finding_mock.assert_called_once() 123 | 124 | @patch("scripts.search_tainted_modules._get_suspicious_modules") 125 | @patch("scripts.search_tainted_modules.output_error") 126 | @patch("scripts.search_tainted_modules.output_finding") 127 | @patch("scripts.search_tainted_modules.MONITORING_MODE", True) 128 | @patch("scripts.search_tainted_modules.STATE_DIR", TempDirectory) 129 | def test_search_tainted_modules_monitoring_persistence_cleanup(self, output_finding_mock, output_error_mock, get_suspicious_modules_mock): 130 | module = SystemModule.from_dict({'name': 'nvidia_uvm', 131 | 'size': 1200128, 132 | 'reference_count': 0, 133 | 'state': 'LIVE', 134 | 'dependencies': [], 135 | 'taint_flags': ['PROPRIETARY_MODULE', 'OOT_MODULE', 'UNSIGNED_MODULE']}) 136 | get_suspicious_modules_mock.return_value = [module] 137 | 138 | search_tainted_modules() 139 | 140 | output_error_mock.assert_not_called() 141 | output_finding_mock.assert_called_once() 142 | output_error_mock.assert_not_called() 143 | output_finding_mock.assert_called_once() 144 | self.assertTrue("1 suspicious loaded module(s) found:" in output_finding_mock.call_args.args[1]) 145 | self.assertTrue("%s - State: %s; Dependencies: %s; Taint Flags: %s" % (module.name, 146 | module.state.name, 147 | ",".join(module.dependencies), 148 | ",".join(map(lambda x: x.name, module.taint_flags))) 149 | in output_finding_mock.call_args.args[1]) 150 | 151 | output_error_mock.reset_mock() 152 | output_finding_mock.reset_mock() 153 | get_suspicious_modules_mock.return_value = [] 154 | 155 | search_tainted_modules() 156 | 157 | output_error_mock.assert_not_called() 158 | output_finding_mock.assert_not_called() 159 | 160 | get_suspicious_modules_mock.return_value = [module] 161 | 162 | search_tainted_modules() 163 | 164 | output_error_mock.assert_not_called() 165 | output_finding_mock.assert_called_once() 166 | self.assertTrue("1 suspicious loaded module(s) found:" in output_finding_mock.call_args.args[1]) 167 | self.assertTrue("%s - State: %s; Dependencies: %s; Taint Flags: %s" % (module.name, 168 | module.state.name, 169 | ",".join(module.dependencies), 170 | ",".join(map(lambda x: x.name, module.taint_flags))) 171 | in output_finding_mock.call_args.args[1]) 172 | -------------------------------------------------------------------------------- /test/test_search_utmp_tampering.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | # Fix to workaround importing issues from test cases 4 | sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts")) 5 | 6 | import datetime 7 | import unittest 8 | import shutil 9 | from unittest.mock import patch 10 | 11 | from scripts.lib.util_utmp import UtmpEntry 12 | from scripts.search_utmp_tampering import UtmpDetection, _check_utmp_type, _check_utmp_timestamp, _check_utmp_data, \ 13 | search_utmp_tampering 14 | 15 | class TestSearchUtmpTampering(unittest.TestCase): 16 | 17 | TempDirectory = "/tmp/TestSearchUtmpTampering" 18 | 19 | def setUp(self): 20 | os.makedirs(TestSearchUtmpTampering.TempDirectory) 21 | 22 | def tearDown(self): 23 | shutil.rmtree(TestSearchUtmpTampering.TempDirectory) 24 | 25 | def test_check_utmp_type(self): 26 | entity_clean = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 27 | self.assertEqual(UtmpDetection.Clean, _check_utmp_type(entity_clean)) 28 | 29 | entity_zero = UtmpEntry("[0] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 30 | self.assertEqual(UtmpDetection.TypeError, _check_utmp_type(entity_zero)) 31 | 32 | entity_greater_nine = UtmpEntry("[10] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 33 | self.assertEqual(UtmpDetection.TypeError, _check_utmp_type(entity_greater_nine)) 34 | 35 | @patch("scripts.search_utmp_tampering.UTMP_OLDEST_ENTRY", datetime.datetime.now() - datetime.timedelta(days=30)) 36 | def test_check_utmp_timestamp(self): 37 | entity_clean = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 38 | self.assertEqual(UtmpDetection.Clean, _check_utmp_timestamp(None, entity_clean, "/var/log/wtmp")) 39 | 40 | entity_time_zero = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [1970-01-01T00:00:00,000000+00:00]") 41 | self.assertEqual(UtmpDetection.TimeZero, _check_utmp_timestamp(None, entity_time_zero, "/var/log/wtmp")) 42 | 43 | entity_too_old = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-08-15T07:57:42,674975+00:00]") 44 | self.assertEqual(UtmpDetection.TimeTooOld, _check_utmp_timestamp(None, entity_too_old, "/var/log/wtmp")) 45 | 46 | entity_5after_before_clean = UtmpEntry("[1] [00000] [~~ ] [runlevel ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:47,674975+00:00]") 47 | self.assertEqual(UtmpDetection.Clean, _check_utmp_timestamp(entity_5after_before_clean, entity_clean, "/var/log/wtmp")) 48 | 49 | entity_6seconds_after_clean = UtmpEntry("[1] [00000] [~~ ] [runlevel ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:48,674975+00:00]") 50 | self.assertEqual(UtmpDetection.TimeInconsistency, _check_utmp_timestamp(entity_6seconds_after_clean, entity_clean, "/var/log/wtmp")) 51 | 52 | entity_120seconds_after_clean_type_2 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:59:42,674975+00:00]") 53 | self.assertEqual(UtmpDetection.Clean, _check_utmp_timestamp(entity_120seconds_after_clean_type_2, entity_clean, "/var/log/wtmp")) 54 | 55 | entity_121seconds_after_clean_type_2 = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:59:43,674975+00:00]") 56 | self.assertEqual(UtmpDetection.TimeInconsistency, _check_utmp_timestamp(entity_121seconds_after_clean_type_2, entity_clean, "/var/log/wtmp")) 57 | self.assertEqual(UtmpDetection.Clean, _check_utmp_timestamp(entity_121seconds_after_clean_type_2, entity_clean, "/var/run/utmp")) 58 | 59 | def test_check_utmp_data(self): 60 | utmp_data = [ 61 | UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]"), 62 | UtmpEntry("[0] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:43,674975+00:00]"), 63 | UtmpEntry("[10] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:44,674975+00:00]"), 64 | UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [1970-01-01T00:00:00,000000+00:00]") 65 | ] 66 | results = _check_utmp_data(utmp_data, "/var/log/wtmp") 67 | self.assertEqual(3, len(results)) 68 | 69 | @patch("scripts.search_utmp_tampering._check_utmp_data") 70 | @patch("scripts.search_utmp_tampering.output_error") 71 | @patch("scripts.search_utmp_tampering.output_finding") 72 | @patch("scripts.search_utmp_tampering.UTMP_FILE_LOCATIONS", [os.path.join(os.path.dirname(__file__), 73 | "resources", 74 | "wtmp_benign")]) 75 | def test_search_utmp_tampering_no_result(self, output_finding_mock, output_error_mock, check_utmp_data_mock): 76 | check_utmp_data_mock.return_value = {} 77 | 78 | search_utmp_tampering() 79 | 80 | output_error_mock.assert_not_called() 81 | output_finding_mock.assert_not_called() 82 | 83 | @patch("scripts.search_utmp_tampering._check_utmp_data") 84 | @patch("scripts.search_utmp_tampering.output_error") 85 | @patch("scripts.search_utmp_tampering.output_finding") 86 | @patch("scripts.search_utmp_tampering.UTMP_FILE_LOCATIONS", ["/something_that_does/not/exist", 87 | os.path.join(os.path.dirname(__file__), 88 | "resources", 89 | "wtmp_benign")]) 90 | def test_search_utmp_tampering_file_not_found(self, output_finding_mock, output_error_mock, check_utmp_data_mock): 91 | check_utmp_data_mock.return_value = {} 92 | 93 | search_utmp_tampering() 94 | 95 | output_error_mock.assert_not_called() 96 | output_finding_mock.assert_not_called() 97 | 98 | @patch("scripts.search_utmp_tampering._check_utmp_data") 99 | @patch("scripts.search_utmp_tampering.output_error") 100 | @patch("scripts.search_utmp_tampering.output_finding") 101 | @patch("scripts.search_utmp_tampering.UTMP_FILE_LOCATIONS", [os.path.join(os.path.dirname(__file__), 102 | "resources", 103 | "wtmp_benign")]) 104 | @patch("scripts.search_utmp_tampering.MONITORING_MODE", True) 105 | @patch("scripts.search_utmp_tampering.STATE_DIR", TempDirectory) 106 | def test_search_utmp_tampering_monitoring_persistence(self, output_finding_mock, output_error_mock, check_utmp_data_mock): 107 | entry = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 108 | check_utmp_data_mock.return_value = {entry: [UtmpDetection.TypeError]} 109 | 110 | search_utmp_tampering() 111 | 112 | output_error_mock.assert_not_called() 113 | output_finding_mock.assert_called_once() 114 | self.assertTrue("1 suspicious entry (or entries) in " in output_finding_mock.call_args.args[1]) 115 | self.assertTrue("Line: %s" % entry.line in output_finding_mock.call_args.args[1]) 116 | self.assertTrue("Detections: TypeError" in output_finding_mock.call_args.args[1]) 117 | 118 | output_finding_mock.reset_mock() 119 | 120 | search_utmp_tampering() 121 | 122 | output_error_mock.assert_not_called() 123 | output_finding_mock.assert_not_called() 124 | 125 | @patch("scripts.search_utmp_tampering._check_utmp_data") 126 | @patch("scripts.search_utmp_tampering.output_error") 127 | @patch("scripts.search_utmp_tampering.output_finding") 128 | @patch("scripts.search_utmp_tampering.UTMP_FILE_LOCATIONS", [os.path.join(os.path.dirname(__file__), 129 | "resources", 130 | "wtmp_benign")]) 131 | @patch("scripts.search_utmp_tampering.MONITORING_MODE", True) 132 | @patch("scripts.search_utmp_tampering.STATE_DIR", TempDirectory) 133 | def test_search_utmp_tampering_monitoring_persistence_cleanup(self, output_finding_mock, output_error_mock, check_utmp_data_mock): 134 | entry = UtmpEntry("[2] [00000] [~~ ] [reboot ] [~ ] [5.15.167.4-microsoft-standard-WSL2] [0.0.0.0 ] [2025-09-16T07:57:42,674975+00:00]") 135 | check_utmp_data_mock.return_value = {entry: [UtmpDetection.TypeError]} 136 | 137 | search_utmp_tampering() 138 | 139 | output_error_mock.assert_not_called() 140 | output_finding_mock.assert_called_once() 141 | self.assertTrue("1 suspicious entry (or entries) in " in output_finding_mock.call_args.args[1]) 142 | self.assertTrue("Line: %s" % entry.line in output_finding_mock.call_args.args[1]) 143 | self.assertTrue("Detections: TypeError" in output_finding_mock.call_args.args[1]) 144 | 145 | output_error_mock.reset_mock() 146 | output_finding_mock.reset_mock() 147 | check_utmp_data_mock.return_value = {} 148 | 149 | search_utmp_tampering() 150 | 151 | output_error_mock.assert_not_called() 152 | output_finding_mock.assert_not_called() 153 | 154 | check_utmp_data_mock.return_value = {entry: [UtmpDetection.TypeError]} 155 | 156 | search_utmp_tampering() 157 | 158 | output_error_mock.assert_not_called() 159 | output_finding_mock.assert_called_once() 160 | self.assertTrue("1 suspicious entry (or entries) in " in output_finding_mock.call_args.args[1]) 161 | self.assertTrue("Line: %s" % entry.line in output_finding_mock.call_args.args[1]) 162 | self.assertTrue("Detections: TypeError" in output_finding_mock.call_args.args[1]) 163 | -------------------------------------------------------------------------------- /scripts/lib/util_module.py: -------------------------------------------------------------------------------- 1 | import re 2 | from enum import Enum 3 | from typing import Set, List, Dict, Any, cast 4 | 5 | 6 | class SystemModuleException(Exception): 7 | pass 8 | 9 | 10 | class SystemModuleState(Enum): 11 | INVALID = 0 12 | LIVE = 1 13 | LOADING = 2 14 | UNLOADING = 3 15 | 16 | @staticmethod 17 | def from_str(value: str): 18 | value = value.lower() 19 | if value == "live": 20 | return SystemModuleState.LIVE 21 | elif value == "loading": 22 | return SystemModuleState.LOADING 23 | elif value == "unloading": 24 | return SystemModuleState.UNLOADING 25 | return SystemModuleState.INVALID 26 | 27 | 28 | class SystemModuleTaintFlag(Enum): 29 | """ 30 | - https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/kernel/panic.c#n494 31 | - https://www.kernel.org/doc/Documentation/sysctl/kernel.txt (section "tainted") 32 | 33 | 1 (P): proprietary module was loaded 34 | 2 (F): module was force loaded 35 | 4 (S): SMP kernel oops on an officially SMP incapable processor 36 | 8 (R): module was force unloaded 37 | 16 (M): processor reported a Machine Check Exception (MCE) 38 | 32 (B): bad page referenced or some unexpected page flags 39 | 64 (U): taint requested by userspace application 40 | 128 (D): kernel died recently, i.e. there was an OOPS or BUG 41 | 256 (A): an ACPI table was overridden by user 42 | 512 (W): kernel issued warning 43 | 1024 (C): staging driver was loaded 44 | 2048 (I): workaround for bug in platform firmware applied 45 | 4096 (O): externally-built ("out-of-tree") module was loaded 46 | 8192 (E): unsigned module was loaded 47 | 16384 (L): soft lockup occurred 48 | 32768 (K): kernel has been live patched 49 | 65536 (X): Auxiliary taint, defined and used by for distros 50 | 131072 (T): The kernel was built with the struct randomization plugin 51 | """ 52 | INVALID = 0 53 | PROPRIETARY_MODULE = 1 54 | FORCED_MODULE = 2 55 | CPU_OUT_OF_SPEC = 4 56 | FORCED_RMMOD = 8 57 | MACHINE_CHECK = 16 58 | BAD_PAGE = 32 59 | USER = 64 60 | DIE = 128 61 | OVERRIDDEN_ACPI_TABLE = 256 62 | WARN = 512 63 | CRAP = 1024 64 | FIRMWARE_WORKAROUND = 2048 65 | OOT_MODULE = 4096 66 | UNSIGNED_MODULE = 8192 67 | SOFTLOCKUP = 16384 68 | LIVEPATCH = 32768 69 | AUX = 65536 70 | RANDSTRUCT = 131072 71 | 72 | @staticmethod 73 | def from_str(value: str): 74 | value = value.upper() 75 | if value == "P": 76 | return SystemModuleTaintFlag.PROPRIETARY_MODULE 77 | elif value == "F": 78 | return SystemModuleTaintFlag.FORCED_MODULE 79 | elif value == "S": 80 | return SystemModuleTaintFlag.CPU_OUT_OF_SPEC 81 | elif value == "R": 82 | return SystemModuleTaintFlag.FORCED_RMMOD 83 | elif value == "M": 84 | return SystemModuleTaintFlag.MACHINE_CHECK 85 | elif value == "B": 86 | return SystemModuleTaintFlag.BAD_PAGE 87 | elif value == "U": 88 | return SystemModuleTaintFlag.USER 89 | elif value == "D": 90 | return SystemModuleTaintFlag.DIE 91 | elif value == "A": 92 | return SystemModuleTaintFlag.OVERRIDDEN_ACPI_TABLE 93 | elif value == "W": 94 | return SystemModuleTaintFlag.WARN 95 | elif value == "C": 96 | return SystemModuleTaintFlag.CRAP 97 | elif value == "I": 98 | return SystemModuleTaintFlag.FIRMWARE_WORKAROUND 99 | elif value == "O": 100 | return SystemModuleTaintFlag.OOT_MODULE 101 | elif value == "E": 102 | return SystemModuleTaintFlag.UNSIGNED_MODULE 103 | elif value == "L": 104 | return SystemModuleTaintFlag.SOFTLOCKUP 105 | elif value == "K": 106 | return SystemModuleTaintFlag.LIVEPATCH 107 | elif value == "X": 108 | return SystemModuleTaintFlag.AUX 109 | elif value == "T": 110 | return SystemModuleTaintFlag.RANDSTRUCT 111 | return SystemModuleTaintFlag.INVALID 112 | 113 | 114 | class SystemModule: 115 | 116 | def __init__(self, 117 | name: str, 118 | size: int, 119 | reference_count: int, 120 | state: SystemModuleState, 121 | dependencies: Set[str], 122 | taint_flags: Set[SystemModuleTaintFlag]): 123 | """ 124 | Represents a module from /proc/modules 125 | 126 | @param name: module name 127 | @param size: size in bytes of the module 128 | @param reference_count: count of references to this module (documentation says "number of instances", 129 | but source code says reference count https://elixir.bootlin.com/linux/v6.12.6/source/kernel/module/procfs.c#L107) 130 | @param state: state of the module 131 | @param dependencies: name of modules that this module depends on 132 | @param taint_flags: taint flags for the module 133 | """ 134 | self._name = name 135 | self._size = size 136 | self._reference_count = reference_count 137 | self._state = state 138 | self._dependencies = dependencies 139 | self._taint_flags = taint_flags 140 | 141 | def __eq__(self, other): 142 | return (hasattr(other, "name") 143 | and self.name == other.name 144 | and hasattr(other, "size") 145 | and self.size == other.size 146 | and hasattr(other, "reference_count") 147 | and self.reference_count == other.reference_count 148 | and hasattr(other, "state") 149 | and self.state.value == other.state.value 150 | and hasattr(other, "dependencies") 151 | and self.dependencies == other.dependencies 152 | and hasattr(other, "taint_flags") 153 | and set(map(lambda x: x.value, self.taint_flags)) == set(map(lambda x: x.value, other.taint_flags))) 154 | 155 | def __hash__(self): 156 | return hash((self.name, 157 | self.size, 158 | self.reference_count, 159 | self.state.value, 160 | ",".join(self.dependencies), 161 | sum(map(lambda x: x.value, self.taint_flags)))) 162 | 163 | def __str__(self): 164 | return "%s:%d:%d:%s:%s:%s" % (self._name, 165 | self._size, 166 | self._reference_count, 167 | self._state.name, 168 | self._dependencies, 169 | self._taint_flags) 170 | 171 | def to_dict(self) -> Dict[str, Any]: 172 | return {"name": self.name, 173 | "size": self.size, 174 | "reference_count": self.reference_count, 175 | "state": self.state.name, 176 | "dependencies": list(self.dependencies), 177 | "taint_flags": list(map(lambda x: x.name, self.taint_flags))} 178 | 179 | @staticmethod 180 | def from_dict(module_dict: Dict[str, Any]): 181 | return SystemModule(module_dict["name"], 182 | module_dict["size"], 183 | module_dict["reference_count"], 184 | cast(SystemModuleState, SystemModuleState[module_dict["state"]]), 185 | module_dict["dependencies"], 186 | cast(Set[SystemModuleTaintFlag], set(map(lambda x: SystemModuleTaintFlag[x], module_dict["taint_flags"])))) 187 | 188 | @staticmethod 189 | def from_proc_modules_line(proc_line: str): 190 | """ 191 | Parses a line from /proc/modules and creates a module object 192 | 193 | @param proc_line: line from /proc/modules to parse 194 | @return: module object parsed from argument line 195 | """ 196 | 197 | """ 198 | Examples from Ubuntu 22.04: 199 | mei_pxp 16384 0 - Live 0x0000000000000000 200 | irqbypass 12288 1 kvm, Live 0x0000000000000000 201 | nvidia 56823808 2 nvidia_uvm,nvidia_modeset, Live 0x0000000000000000 (PO) 202 | vboxnetadp 28672 0 - Live 0x0000000000000000 (OE) 203 | vboxdrv 696320 2 vboxnetadp,vboxnetflt, Live 0x0000000000000000 (OE) 204 | rpcsec_gss_krb5 36864 0 - Live 0xffffffffc1611000 205 | ipv6 450560 32 [permanent], Live 0x7f000000 206 | """ 207 | proc_line = proc_line.strip() 208 | 209 | match = re.match(r'^(\w+) (\d+) (\d+) ((\w|,|-|\[permanent\])*) (\w+) 0x[0-9a-fA-f]+( \(([A-Z]+)\))?', proc_line) 210 | if not match: 211 | raise ValueError("Illegal line: %s" % proc_line) 212 | 213 | name = match.group(1) 214 | size = int(match.group(2), 10) 215 | reference_count = int(match.group(3), 10) 216 | state = SystemModuleState.from_str(match.group(6)) 217 | 218 | dependencies = set([]) 219 | dependencies_str = match.group(4) 220 | if dependencies_str != "-": 221 | for dependency_str in filter(lambda x: x != "", dependencies_str.split(",")): 222 | dependencies.add(dependency_str) 223 | 224 | taint_flags = set([]) 225 | taint_flags_str = match.group(8) 226 | if taint_flags_str: 227 | for flag in taint_flags_str.strip(): 228 | taint_flags.add(SystemModuleTaintFlag.from_str(flag)) 229 | 230 | return SystemModule(name, 231 | size, 232 | reference_count, 233 | state, 234 | dependencies, 235 | taint_flags) 236 | 237 | @property 238 | def name(self) -> str: 239 | return self._name 240 | 241 | @property 242 | def size(self) -> int: 243 | return self._size 244 | 245 | @property 246 | def reference_count(self) -> int: 247 | return self._reference_count 248 | 249 | @property 250 | def state(self) -> SystemModuleState: 251 | return self._state 252 | 253 | @property 254 | def dependencies(self) -> Set[str]: 255 | return set(self._dependencies) 256 | 257 | @property 258 | def taint_flags(self) -> Set[SystemModuleTaintFlag]: 259 | return set(self._taint_flags) 260 | 261 | 262 | def get_system_modules(modules_file: str = "/proc/modules") -> List[SystemModule]: 263 | """ 264 | Gets the modules loaded into the kernel from /proc/modules 265 | :return: 266 | """ 267 | module_list = [] 268 | try: 269 | with open(modules_file, 'rt') as fp: 270 | for line in fp: 271 | if line.strip() == "": 272 | continue 273 | module_list.append(SystemModule.from_proc_modules_line(line.strip())) 274 | 275 | except Exception as e: 276 | raise SystemModuleException(str(e)) 277 | 278 | return module_list 279 | --------------------------------------------------------------------------------