5 | Maintainer: @highlightjs/core-team
6 | Website: https://highlightjs.org/
7 | License: see project LICENSE
8 | Touched: 2021
9 | */pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}.hljs{background:#f3f3f3;color:#444}.hljs-comment{color:#697070}.hljs-punctuation,.hljs-tag{color:#444a}.hljs-tag .hljs-attr,.hljs-tag .hljs-name{color:#444}.hljs-attribute,.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-name,.hljs-selector-tag{font-weight:700}.hljs-deletion,.hljs-number,.hljs-quote,.hljs-selector-class,.hljs-selector-id,.hljs-string,.hljs-template-tag,.hljs-type{color:#800}.hljs-section,.hljs-title{color:#800;font-weight:700}.hljs-link,.hljs-operator,.hljs-regexp,.hljs-selector-attr,.hljs-selector-pseudo,.hljs-symbol,.hljs-template-variable,.hljs-variable{color:#ab5656}.hljs-literal{color:#695}.hljs-addition,.hljs-built_in,.hljs-bullet,.hljs-code{color:#397300}.hljs-meta{color:#1f7199}.hljs-meta .hljs-string{color:#38a}.hljs-emphasis{font-style:italic}.hljs-strong{font-weight:700}
--------------------------------------------------------------------------------
/scripts/_elements/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/abrignoni/ALEAPP/f611ae42161895e6051f663f371b6e10040b8567/scripts/_elements/logo.png
--------------------------------------------------------------------------------
/scripts/_elements/timeline/images/arrow-left.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scripts/_elements/timeline/images/arrow-right.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/scripts/artifacts/BadooConnections.py:
--------------------------------------------------------------------------------
1 | # Get Information related to possible connections (messages, views etc) of the user with other users from the Badoo app (com.badoo.mobile)
2 | # Author: Fabian Nunes {fabiannunes12@gmail.com}
3 | # Date: 2023-05-03
4 | # Version: 1.0
5 | # Requirements: Python 3.7 or higher
6 |
7 | from scripts.artifact_report import ArtifactHtmlReport
8 | from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
9 |
10 |
11 | def get_badoo_conn(files_found, report_folder, seeker, wrap_text):
12 | logfunc("Processing data for Badoo Conections")
13 | files_found = [x for x in files_found if not x.endswith('wal') and not x.endswith('shm')]
14 | file_found = str(files_found[0])
15 | db = open_sqlite_db_readonly(file_found)
16 |
17 | cursor = db.cursor()
18 | cursor.execute('''
19 | Select id, name, gender, origin, datetime("sort_timestamp"/1000,'unixepoch'), avatar_url, display_message
20 | from connection
21 | ''')
22 |
23 | all_rows = cursor.fetchall()
24 | usageentries = len(all_rows)
25 | if usageentries > 0:
26 | logfunc(f"Found {usageentries} entries in connection")
27 | report = ArtifactHtmlReport('Chat')
28 | report.start_artifact_report(report_folder, 'Badoo Connections')
29 | report.add_script()
30 | data_headers = ('ID', 'Name', 'Gender', 'Origin', 'Sort Timestamp', 'Avatar URL', 'Display Message')
31 | data_list = []
32 |
33 | for row in all_rows:
34 | id = row[0]
35 | name = row[1]
36 | gender = row[2]
37 | origin = row[3]
38 | sort_timestamp = row[4]
39 | avatar_url = '
'
40 | display_message = row[6]
41 | data_list.append((id, name, gender, origin, sort_timestamp, avatar_url, display_message))
42 | # Filter by date
43 | table_id = "BadooConnection"
44 | report.write_artifact_data_table(data_headers, data_list, file_found, table_id=table_id, html_escape=False)
45 | report.end_artifact_report()
46 |
47 | tsvname = f'Badoo - Connections'
48 | tsv(report_folder, data_headers, data_list, tsvname)
49 |
50 | tlactivity = f'Badoo - Connections'
51 | timeline(report_folder, tlactivity, data_list, data_headers)
52 |
53 | else:
54 | logfunc('No Badoo Connection data available')
55 |
56 | db.close()
57 |
58 |
59 | __artifacts__ = {
60 | "BadooConnections": (
61 | "Badoo",
62 | ('*com.badoo.mobile/databases/CombinedConnectionsDatabase*'),
63 | get_badoo_conn)
64 | }
65 |
--------------------------------------------------------------------------------
/scripts/artifacts/BashHistory.py:
--------------------------------------------------------------------------------
1 | import codecs
2 | import csv
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
6 |
7 | def get_BashHistory(files_found, report_folder, seeker, wrap_text):
8 | data_list = []
9 | file_found = str(files_found[0])
10 | counter = 1
11 | with codecs.open(file_found, 'r', 'utf-8-sig') as csvfile:
12 | for row in csvfile:
13 | data_list.append((counter, row))
14 | counter += 1
15 |
16 | if len(data_list) > 0:
17 | report = ArtifactHtmlReport('Bash History')
18 | report.start_artifact_report(report_folder, f'Bash History')
19 | report.add_script()
20 | data_headers = ('Order', 'Command')
21 | report.write_artifact_data_table(data_headers, data_list, file_found)
22 | report.end_artifact_report()
23 |
24 | tsvname = f'Bash History'
25 | tsv(report_folder, data_headers, data_list, tsvname)
26 |
27 | else:
28 | logfunc(f'No Bash History file available')
29 |
30 | __artifacts__ = {
31 | "Bash History": (
32 | "Bash History",
33 | ('*/.bash_history'),
34 | get_BashHistory)
35 | }
--------------------------------------------------------------------------------
/scripts/artifacts/ChessComAccount.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 | import xml.etree.ElementTree as ET
4 | import datetime
5 |
6 | from scripts.artifact_report import ArtifactHtmlReport
7 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
8 |
9 | def get_ChessComAccount(files_found, report_folder, seeker, wrap_text):
10 |
11 | title = "Chess.com Account"
12 |
13 | # Data results
14 | data_headers = ('Key', 'Value')
15 | data_list = []
16 |
17 | # Login credentials
18 | credentials_file = str(files_found[0])
19 | cred_tree = ET.parse(credentials_file)
20 | cred_root = cred_tree.getroot()
21 | cred_strings = cred_root.findall("string")
22 | for item in cred_strings:
23 | key = item.attrib.get("name")
24 | value = item.text
25 | if key in ["pref_username_or_email", "pref_password"]:
26 | data_list.append([key, value])
27 |
28 | # Session
29 | session_file = str(files_found[1])
30 | sesh_tree = ET.parse(session_file)
31 | sesh_root = sesh_tree.getroot()
32 | sesh_strings = sesh_root.findall("string")
33 | for item in sesh_strings:
34 | key = item.attrib.get("name")
35 | value = item.text
36 | if key in ["pref_username", "pref_email", "pref_member_since"]:
37 | if key == "pref_member_since":
38 | value = datetime.datetime.utcfromtimestamp(int(value)).isoformat(sep=" ", timespec="seconds")
39 | data_list.append([key, value])
40 |
41 | # Reporting
42 | description = "Chess.com Account"
43 | report = ArtifactHtmlReport(title)
44 | report.start_artifact_report(report_folder, title, description)
45 | report.add_script()
46 | report.write_artifact_data_table(data_headers, data_list, credentials_file, html_escape=False)
47 | report.end_artifact_report()
48 |
49 | tsv(report_folder, data_headers, data_list, title)
50 |
51 | __artifacts__ = {
52 | "ChessComAcct": (
53 | "Chess.com",
54 | ('*/com.chess/shared_prefs/com.chess.app.login_credentials.xml', '*/data/data/com.chess/shared_prefs/com.chess.app.session_preferences.xml'),
55 | get_ChessComAccount)
56 | }
57 |
58 |
59 |
--------------------------------------------------------------------------------
/scripts/artifacts/ChessComFriends.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_ChessComFriends(files_found, report_folder, seeker, wrap_text):
8 |
9 | title = "Chess.com Friends"
10 |
11 | # Chess database
12 | db_filepath = str(files_found[0])
13 | conn = sqlite3.connect(db_filepath)
14 | c = conn.cursor()
15 | sql = """SELECT friends.id AS "ID", friends.username AS "Username", friends.first_name AS "First Name", friends.last_name AS "Last Name", datetime(friends.last_login_date, 'unixepoch') AS "Last Login" FROM friends"""
16 | c.execute(sql)
17 | results = c.fetchall()
18 | conn.close()
19 |
20 | # Data results
21 | data_headers = ('ID', 'Username', 'First Name', 'Last Name', 'Last Login')
22 | data_list = results
23 |
24 | # Reporting
25 | description = "Chess.com Friends"
26 | report = ArtifactHtmlReport(title)
27 | report.start_artifact_report(report_folder, title, description)
28 | report.add_script()
29 | report.write_artifact_data_table(data_headers, data_list, db_filepath, html_escape=False)
30 | report.end_artifact_report()
31 |
32 | tsv(report_folder, data_headers, data_list, title)
33 |
34 | __artifacts__ = {
35 | "ChessComFriends": (
36 | "Chess.com",
37 | ('*/com.chess/databases/chess-database*'),
38 | get_ChessComFriends)
39 | }
40 |
41 |
--------------------------------------------------------------------------------
/scripts/artifacts/ChessComGames.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 | import xml.etree.ElementTree as ET
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
7 |
8 | def get_ChessComGames(files_found, report_folder, seeker, wrap_text):
9 |
10 | title = "Chess.com Games"
11 |
12 | # Username
13 | username = "None"
14 | session_file = list(filter(lambda x: "xml" in x, files_found))[0]
15 | sesh_tree = ET.parse(session_file)
16 | sesh_root = sesh_tree.getroot()
17 | sesh_strings = sesh_root.findall("string")
18 | for item in sesh_strings:
19 | key = item.attrib.get("name")
20 | if key == "pref_username":
21 | username = item.text
22 |
23 | # Chess database
24 | db_filepath = list(filter(lambda x: "chess-database" in x, files_found))[0]
25 | conn = sqlite3.connect(db_filepath)
26 | c = conn.cursor()
27 | sql = f"""SELECT datetime(daily_games.game_start_time, 'unixepoch') AS "First Move", datetime(daily_games.timestamp, 'unixepoch') AS "Last Move", daily_games.game_id AS "Game ID", daily_games.white_username AS "White", daily_games.black_username AS "Black", CASE daily_games.is_opponent_friend WHEN 1 THEN "Friend" WHEN 0 THEN "User" ELSE "ERROR" END AS "Friend Status", daily_games.result_message AS "Result" FROM daily_games WHERE daily_games.white_username = "{username}" OR daily_games.black_username = "{username}" ORDER BY daily_games.timestamp"""
28 | c.execute(sql)
29 | results = c.fetchall()
30 | conn.close()
31 |
32 | # Data results
33 | data_headers = ('First Move', 'Last Move', 'Game ID', 'White', 'Black', 'Friend Status', 'Result')
34 | data_list = results
35 |
36 | # Reporting
37 | description = "Chess.com Games"
38 | report = ArtifactHtmlReport(title)
39 | report.start_artifact_report(report_folder, title, description)
40 | report.add_script()
41 | report.write_artifact_data_table(data_headers, data_list, db_filepath, html_escape=False)
42 | report.end_artifact_report()
43 |
44 | tsv(report_folder, data_headers, data_list, title)
45 |
46 | __artifacts__ = {
47 | "ChessComGames": (
48 | "Chess.com",
49 | ('*/com.chess/databases/chess-database*', '*/data/data/com.chess/shared_prefs/com.chess.app.session_preferences.xml'),
50 | get_ChessComGames)
51 | }
52 |
53 |
54 |
--------------------------------------------------------------------------------
/scripts/artifacts/ChessComMessages.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_ChessComMessages(files_found, report_folder, seeker, wrap_text):
8 |
9 | title = "Chess.com Messages"
10 |
11 | # Chess database
12 | db_filepath = str(files_found[0])
13 | conn = sqlite3.connect(db_filepath)
14 | c = conn.cursor()
15 | sql = """SELECT datetime(messages.created_at, 'unixepoch') AS Sent, messages.conversation_id AS Conversation, messages.sender_username AS Sender, messages.content AS Message FROM messages"""
16 | c.execute(sql)
17 | results = c.fetchall()
18 | conn.close()
19 |
20 | # Data results
21 | data_headers = ('Sent', 'Conversation', 'Sender', 'Message')
22 | data_list = results
23 |
24 | # Reporting
25 | description = "Chess.com Messages"
26 | report = ArtifactHtmlReport(title)
27 | report.start_artifact_report(report_folder, title, description)
28 | report.add_script()
29 | report.write_artifact_data_table(data_headers, data_list, db_filepath, html_escape=False)
30 | report.end_artifact_report()
31 |
32 | tsv(report_folder, data_headers, data_list, title)
33 |
34 | __artifacts__ = {
35 | "ChessComMessages": (
36 | "Chess.com",
37 | ('*/com.chess/databases/chess-database*'),
38 | get_ChessComMessages)
39 | }
40 |
41 |
--------------------------------------------------------------------------------
/scripts/artifacts/ChessWithFriends.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_ChessWithFriends(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | SELECT
14 | chat_messages.chat_message_id,
15 | users.name,
16 | users.email_address,
17 | chat_messages.message,
18 | chat_messages.created_at
19 | FROM
20 | chat_messages
21 | INNER JOIN
22 | users
23 | ON
24 | chat_messages.user_id=users.user_id
25 | ORDER BY
26 | chat_messages.created_at DESC
27 | ''')
28 |
29 | all_rows = cursor.fetchall()
30 | usageentries = len(all_rows)
31 | if usageentries > 0:
32 | report = ArtifactHtmlReport('Chats')
33 | report.start_artifact_report(report_folder, 'Chess With Friends')
34 | report.add_script()
35 | data_headers = ('Message_ID','User_Name','User_Email','Chat_Message','Chat_Message_Creation' )
36 | data_list = []
37 | for row in all_rows:
38 | data_list.append((row[0],row[1],row[2],row[3],row[4]))
39 |
40 | report.write_artifact_data_table(data_headers, data_list, file_found)
41 | report.end_artifact_report()
42 |
43 | tsvname = f'Chess With Friends Chats'
44 | tsv(report_folder, data_headers, data_list, tsvname)
45 | else:
46 | logfunc('No Chess With Friends data available')
47 |
48 | db.close()
49 |
50 | __artifacts__ = {
51 | "ChessWithFriends": (
52 | "Chats",
53 | ('*/com.zynga.chess.googleplay/databases/wf_database.sqlite', '*/data/data/com.zynga.chess.googleplay/db/wf_database.sqlite'),
54 | get_ChessWithFriends)
55 | }
56 |
--------------------------------------------------------------------------------
/scripts/artifacts/GarminNotifications.py:
--------------------------------------------------------------------------------
1 | # Get Information relative to the notifications stored in the database of the Garmin Connect Mobile application
2 | # Author: Fabian Nunes {fabiannunes12@gmail.com}
3 | # Date: 2023-02-24
4 | # Version: 1.0
5 | # Requirements: Python 3.7 or higher
6 |
7 | from scripts.artifact_report import ArtifactHtmlReport
8 | from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
9 |
10 |
11 | def get_garmin_notifications(files_found, report_folder, seeker, wrap_text):
12 | logfunc("Processing data for Garmin Notifications")
13 | file_found = str(files_found[0])
14 | db = open_sqlite_db_readonly(file_found)
15 |
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | Select
19 | status,
20 | datetime("statusTimestamp"/1000,'unixepoch'),
21 | title,
22 | message,
23 | type,
24 | category,
25 | packageName
26 | from notification_info
27 | ''')
28 |
29 | all_rows = cursor.fetchall()
30 | usageentries = len(all_rows)
31 | if usageentries > 0:
32 | logfunc(f"Found {usageentries} notifications")
33 | report = ArtifactHtmlReport('Garmin - Notifications')
34 | report.start_artifact_report(report_folder, 'Garmin - Notifications')
35 | report.add_script()
36 | data_headers = ('Status', 'Status Timestamp', 'Title', 'Message', 'Type', 'Category', 'Package Name')
37 | data_list = []
38 | for row in all_rows:
39 | data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6]))
40 |
41 | table_id = "garmin_notifications"
42 | report.filter_by_date(table_id, 1)
43 | report.write_artifact_data_table(data_headers, data_list, file_found, table_id=table_id)
44 | report.end_artifact_report()
45 |
46 | tsvname = f'Notifications'
47 | tsv(report_folder, data_headers, data_list, tsvname)
48 |
49 | tlactivity = f'Notifications'
50 | timeline(report_folder, tlactivity, data_list, data_headers)
51 |
52 | else:
53 | logfunc('No Garmin Notifications data available')
54 |
55 | db.close()
56 |
57 |
58 | __artifacts__ = {
59 | "GarminNotifications": (
60 | "Garmin-Notifications",
61 | ('*/com.garmin.android.apps.connectmobile/databases/notification-database*'),
62 | get_garmin_notifications)
63 | }
64 |
--------------------------------------------------------------------------------
/scripts/artifacts/GarminPersistent.py:
--------------------------------------------------------------------------------
1 | # Get Information stored in the Garmin Persistent json file
2 | # Author: Fabian Nunes {fabiannunes12@gmail.com}
3 | # Date: 2023-02-24
4 | # Version: 1.0
5 | # Requirements: Python 3.7 or higher, json and datetime
6 | import datetime
7 | import json
8 |
9 | from scripts.artifact_report import ArtifactHtmlReport
10 | from scripts.ilapfuncs import logfunc, tsv
11 |
12 |
13 | def get_presisted(files_found, report_folder, seeker, wrap_text):
14 | # Dictionary to store the user information
15 | user_info = {}
16 | # Attributes to be extracted from the xml file
17 | attribute = ["Fid", "AuthToken", "RefreshToken", "TokenCreationEpochInSecs", "ExpiresInSecs"]
18 |
19 | logfunc("Processing data for Garmin Presistent file")
20 | file = str(files_found[0])
21 | logfunc("Processing file: " + file)
22 | #Open JSON file
23 | with open(file, "r") as f:
24 | data = json.load(f)
25 | #Get the user information
26 | for i in attribute:
27 | if i in data:
28 | if(i == "TokenCreationEpochInSecs"):
29 | user_info[i] = str(data[i]) + " (" + str(datetime.datetime.utcfromtimestamp(data[i])) + ")"
30 | elif(i == "ExpiresInSecs"):
31 | user_info[i] = str(data[i]) + " (" + str(datetime.datetime.utcfromtimestamp(data[i] + data["TokenCreationEpochInSecs"])) + ")"
32 | else:
33 | user_info[i] = data[i]
34 |
35 | if len(user_info) > 0:
36 | logfunc("Found Garmin Presistent file")
37 | report = ArtifactHtmlReport('Persistent')
38 | report.start_artifact_report(report_folder, 'Persistent')
39 | report.add_script()
40 | data_headers = ('Name', 'Value')
41 | data_list = []
42 | for key, value in user_info.items():
43 | data_list.append((key, value))
44 | report.write_artifact_data_table(data_headers, data_list, file)
45 | report.end_artifact_report()
46 | tsvname = f'Garmin Log'
47 | tsv(report_folder, data_headers, data_list, tsvname)
48 | else:
49 | logfunc("No Garmin Presistent data found")
50 |
51 |
52 | __artifacts__ = {
53 | "GarminPresistent": (
54 | "Garmin-Files",
55 | ('*/com.garmin.android.apps.connectmobile/files/PersistedInstallation*'),
56 | get_presisted)
57 | }
58 |
--------------------------------------------------------------------------------
/scripts/artifacts/GarminSync.py:
--------------------------------------------------------------------------------
1 | # Get Information related to the sync process stored in the sync_cache database file
2 | # Author: Fabian Nunes {fabiannunes12@gmail.com}
3 | # Date: 2023-02-24
4 | # Version: 1.0
5 | # Requirements: Python 3.7 or higher
6 |
7 | from scripts.artifact_report import ArtifactHtmlReport
8 | from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
9 |
10 |
11 | def get_garmin_sync(files_found, report_folder, seeker, wrap_text):
12 | logfunc("Processing data for Garmin Sync")
13 | file_found = str(files_found[0])
14 | db = open_sqlite_db_readonly(file_found)
15 |
16 | # Get information from the table device_sync_audit
17 | cursor = db.cursor()
18 | cursor.execute('''
19 | Select
20 | device_info,
21 | audit_text,
22 | app_version,
23 | datetime("created_timestamp"/1000,'unixepoch')
24 | from device_sync_audit
25 | ''')
26 |
27 | all_rows = cursor.fetchall()
28 | usageentries = len(all_rows)
29 | if usageentries > 0:
30 | logfunc(f"Found {usageentries} entries in device_sync_audit")
31 | report = ArtifactHtmlReport('Sync')
32 | report.start_artifact_report(report_folder, 'Sync')
33 | report.add_script()
34 | data_headers = ('Device Info', 'Audit Text', 'App Version', 'Created Timestamp')
35 | data_list = []
36 | for row in all_rows:
37 | data_list.append((row[0], row[1], row[2], row[3]))
38 |
39 | # Filter by date
40 | table_id = "GarminSync"
41 | report.filter_by_date(table_id, 3)
42 | report.write_artifact_data_table(data_headers, data_list, file_found, table_id=table_id)
43 | report.end_artifact_report()
44 |
45 | tsvname = f'Garmin - Sync'
46 | tsv(report_folder, data_headers, data_list, tsvname)
47 |
48 | tlactivity = f'Garmin - Sync'
49 | timeline(report_folder, tlactivity, data_list, data_headers)
50 |
51 | else:
52 | logfunc('No Garmin Sync data available')
53 |
54 | db.close()
55 |
56 |
57 | __artifacts__ = {
58 | "GarminSync": (
59 | "Garmin-Sync",
60 | ('*/com.garmin.android.apps.connectmobile/databases/sync_cache*'),
61 | get_garmin_sync)
62 | }
63 |
--------------------------------------------------------------------------------
/scripts/artifacts/HideX.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_HideX(files_found, report_folder, seeker, wrap_text):
8 | for file_found in files_found:
9 | file_found = str(file_found)
10 |
11 | if file_found.endswith('hidex.db'):
12 | db = open_sqlite_db_readonly(file_found)
13 | cursor = db.cursor()
14 | cursor.execute('''
15 | SELECT
16 | id,
17 | packageName,
18 | case isActive
19 | WHEN 0 then ''
20 | WHEN 1 then 'Yes'
21 | end
22 | FROM p_lock_app
23 | ''')
24 |
25 | all_rows = cursor.fetchall()
26 | usageentries = len(all_rows)
27 | if usageentries > 0:
28 | report = ArtifactHtmlReport('HideX - Locked Apps')
29 | report.start_artifact_report(report_folder, 'HideX - Locked Apps')
30 | report.add_script()
31 | data_headers = ('ID','Package Name','Is Active')
32 | data_list = []
33 | for row in all_rows:
34 | data_list.append((row[0],row[1],row[2]))
35 |
36 | report.write_artifact_data_table(data_headers, data_list, file_found)
37 | report.end_artifact_report()
38 |
39 | tsvname = f'HideX'
40 | tsv(report_folder, data_headers, data_list, tsvname)
41 |
42 | else:
43 | logfunc('No HideX data available')
44 |
45 | db.close()
46 |
47 | __artifacts__ = {
48 | "HideX": (
49 | "GroupMe",
50 | ('*/com.flatfish.cal.privacy/databases/hidex.db*'),
51 | get_HideX)
52 | }
53 |
--------------------------------------------------------------------------------
/scripts/artifacts/Turbo_AppUsage.py:
--------------------------------------------------------------------------------
1 | __artifacts_v2__ = {
2 | "Turbo_AppUsage": {
3 | "name": "Turbo_AppUsage",
4 | "description": "Parses application usage via Device Health Services",
5 | "author": "@KevinPagano3",
6 | "version": "0.0.1",
7 | "date": "2021-06-29",
8 | "requirements": "none",
9 | "category": "Device Health Services",
10 | "notes": "",
11 | "paths": ('*/com.google.android.apps.turbo/shared_prefs/app_usage_stats.xml'),
12 | "function": "get_Turbo_AppUsage"
13 | }
14 | }
15 |
16 | import datetime
17 | import struct
18 | import xml.etree.ElementTree as ET
19 |
20 | from scripts.artifact_report import ArtifactHtmlReport
21 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
22 |
23 | def get_Turbo_AppUsage(files_found, report_folder, seeker, wrap_text):
24 |
25 | data_list = []
26 | splits = []
27 | app_name = ''
28 | timestamp_split = ''
29 |
30 | for file_found in files_found:
31 | file_name = str(file_found)
32 | tree = ET.parse(file_found)
33 |
34 | for elem in tree.iter(tag='string'):
35 | splits = elem.text.split('#')
36 |
37 | app_name = splits[0]
38 | timesplitter = splits[1].split(',')
39 | count = len(timesplitter)
40 |
41 | for i in range(len(timesplitter)):
42 | timestamp_split = datetime.datetime.utcfromtimestamp(int(timesplitter[i])/1000).strftime('%Y-%m-%d %H:%M:%S.%f')
43 | timestamp_split = timestamp_split.strip('0')
44 |
45 | data_list.append((timestamp_split, app_name, file_found))
46 |
47 | if data_list:
48 | report = ArtifactHtmlReport('Turbo - Application Usage')
49 | report.start_artifact_report(report_folder, f'Turbo - Application Usage')
50 | report.add_script()
51 | data_headers = ('App Launch Timestamp','App Name','Source')
52 | report.write_artifact_data_table(data_headers, data_list, file_found)
53 | report.end_artifact_report()
54 |
55 | tsvname = f'Turbo - Application Usage'
56 | tsv(report_folder, data_headers, data_list, tsvname)
57 |
58 | tlactivity = f'Turbo - Application Usage'
59 | timeline(report_folder, tlactivity, data_list, data_headers)
60 | else:
61 | logfunc(f'No Turbo - Application Usage data available')
62 |
63 |
--------------------------------------------------------------------------------
/scripts/artifacts/Twitter.py:
--------------------------------------------------------------------------------
1 | # Twitter Searches
2 | # Author: Kevin Pagano (https://startme.stark4n6.com)
3 | # Date 2023-04-26
4 | # Version: 0.1
5 | # Requirements: None
6 |
7 | import sqlite3
8 | import textwrap
9 |
10 | from scripts.artifact_report import ArtifactHtmlReport
11 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, kmlgen
12 |
13 | def get_Twitter(files_found, report_folder, seeker, wrap_text):
14 |
15 | for file_found in files_found:
16 | file_found = str(file_found)
17 | if file_found.endswith('-search.db'):
18 | break
19 | else:
20 | continue # Skip all other files
21 |
22 | db = open_sqlite_db_readonly(file_found)
23 |
24 | cursor = db.cursor()
25 | cursor.execute('''
26 | select
27 | datetime(time/1000,'unixepoch'),
28 | name,
29 | query,
30 | query_id,
31 | user_search_suggestion,
32 | topic_search_suggestion,
33 | latitude,
34 | longitude,
35 | radius,
36 | location,
37 | priority,
38 | score
39 | from search_queries
40 | ''')
41 |
42 | all_rows = cursor.fetchall()
43 | usageentries = len(all_rows)
44 | if usageentries > 0:
45 | report = ArtifactHtmlReport('Twitter - Searches')
46 | report.start_artifact_report(report_folder, 'Twitter - Searches')
47 | report.add_script()
48 | data_headers = ('Timestamp','Name','Query','Query ID','User Search Suggestion','Topic Search Suggestion','Latitude','Longitude','Radius','Location','Priority','Score')
49 |
50 | data_list = []
51 | for row in all_rows:
52 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11]))
53 |
54 | report.write_artifact_data_table(data_headers, data_list, file_found)
55 | report.end_artifact_report()
56 |
57 | tsvname = f'Twitter - Searches'
58 | tsv(report_folder, data_headers, data_list, tsvname)
59 |
60 | tlactivity = f'Twitter - Searches'
61 | timeline(report_folder, tlactivity, data_list, data_headers)
62 |
63 | else:
64 | logfunc('No Twitter - Searches data available')
65 |
66 | db.close()
67 |
68 | __artifacts__ = {
69 | "twitter": (
70 | "Twitter",
71 | ('*/com.twitter.android/databases/*-search.db*'),
72 | get_Twitter)
73 | }
74 |
--------------------------------------------------------------------------------
/scripts/artifacts/VerizonRDDAnalytics.py:
--------------------------------------------------------------------------------
1 | # Module Description: Parses Verizon RDD Analytics Battery History
2 | # Author: John Hyla
3 | # Date: 2023-07-07
4 | # Artifact version: 0.0.1
5 | # Requirements: none
6 |
7 | import os
8 | import sqlite3
9 | import datetime
10 |
11 | from scripts.artifact_report import ArtifactHtmlReport
12 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
13 |
14 | def get_rdd_analytics(files_found, report_folder, seeker, wrap_text):
15 |
16 | source_file = ''
17 | for file_found in files_found:
18 | file_name = str(file_found)
19 |
20 | db = open_sqlite_db_readonly(file_name)
21 | cursor = db.cursor()
22 | try:
23 |
24 | cursor.execute('''
25 | SELECT datetime(ActualTime/1000, "UNIXEPOCH") as actual_time,
26 | FormattedTime,
27 | BatteryLevel,
28 | GPS,
29 | Charging,
30 | ScreenOn,
31 | Brightness,
32 | BatteryTemp
33 | FROM TableBatteryHistory
34 | ''')
35 |
36 | all_rows = cursor.fetchall()
37 | usageentries = len(all_rows)
38 | except Exception as e:
39 | print (e)
40 | usageentries = 0
41 |
42 | if usageentries > 0:
43 | report = ArtifactHtmlReport('Verizon RDD - Battery History')
44 | report.start_artifact_report(report_folder, 'Verizon RDD - Battery History')
45 | report.add_script()
46 | data_headers = ('ActualTime', 'FormattedTime', 'BatteryLevel', 'GPS', 'Charging', 'ScreenOn', 'Brightness', 'BatteryTemp') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
47 | data_list = []
48 | for row in all_rows:
49 | data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]))
50 |
51 | report.write_artifact_data_table(data_headers, data_list, file_found)
52 | report.end_artifact_report()
53 |
54 | tsvname = f'Verizon RDD - Battery History'
55 | tsv(report_folder, data_headers, data_list, tsvname, source_file)
56 |
57 | else:
58 | logfunc('No Battery History found')
59 |
60 |
61 | db.close()
62 |
63 | return
64 |
65 |
66 | __artifacts__ = {
67 | "VerizonRDD-Battery": (
68 | "Verizon RDD Analytics",
69 | ('*/com.verizon.mips.services/databases/RDD_ANALYTICS_DATABASE'),
70 | get_rdd_analytics)
71 | }
--------------------------------------------------------------------------------
/scripts/artifacts/WordsWithFriends.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_WordsWithFriends(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | SELECT
14 | datetime(messages.created_at/1000, 'unixepoch'),
15 | messages.conv_id,
16 | users.name,
17 | users.email_address,
18 | messages.text
19 | FROM
20 | messages
21 | INNER JOIN
22 | users
23 | ON
24 | messages.user_zynga_id=users.zynga_account_id
25 | ORDER BY
26 | messages.created_at DESC
27 | ''')
28 |
29 | all_rows = cursor.fetchall()
30 | usageentries = len(all_rows)
31 | if usageentries > 0:
32 | report = ArtifactHtmlReport('Chats')
33 | report.start_artifact_report(report_folder, 'Words With Friends')
34 | report.add_script()
35 | data_headers = ('Chat_Message_Creation','Message_ID','User_Name','User_Email','Chat_Message' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
36 | data_list = []
37 | for row in all_rows:
38 | data_list.append((row[0],row[1],row[2],row[3],row[4]))
39 |
40 | report.write_artifact_data_table(data_headers, data_list, file_found)
41 | report.end_artifact_report()
42 |
43 | tsvname = f'Words With Friends Chats'
44 | tsv(report_folder, data_headers, data_list, tsvname)
45 |
46 | tlactivity = f'Words with Friends Chats'
47 | timeline(report_folder, tlactivity, data_list, data_headers)
48 | else:
49 | logfunc('No Words With Friends data available')
50 |
51 | db.close()
52 |
53 | __artifacts__ = {
54 | "WordsWithFriends": (
55 | "Chats",
56 | ('*/com.zynga.words/db/wf_database.sqlite'),
57 | get_WordsWithFriends)
58 | }
--------------------------------------------------------------------------------
/scripts/artifacts/Zapya.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import datetime
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_Zapya(files_found, report_folder, seeker, wrap_text):
8 | file_found = str(files_found[0])
9 | source_file = file_found.replace(seeker.data_folder, '')
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | SELECT device, name, direction, createtime/1000, path, title FROM transfer
14 | ''')
15 |
16 | all_rows = cursor.fetchall()
17 | usageentries = len(all_rows)
18 | if usageentries > 0:
19 | report = ArtifactHtmlReport('Zapya')
20 | report.start_artifact_report(report_folder, 'Zapya')
21 | report.add_script()
22 | data_headers = ('Device','Name','direction', 'fromid', 'toid', 'createtime','path', 'title') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
23 | data_list = []
24 |
25 | for row in all_rows:
26 | from_id = ''
27 | to_id = ''
28 | if (row[2] == 1):
29 | direction = 'Outgoing'
30 | to_id = row[0]
31 | else:
32 | direction = 'Incoming'
33 | from_id = row[0]
34 |
35 | createtime = datetime.datetime.utcfromtimestamp(int(row[3])).strftime('%Y-%m-%d %H:%M:%S')
36 | data_list.append((row[0], row[1], direction, from_id, to_id, createtime, row[4], row[5]))
37 |
38 | report.write_artifact_data_table(data_headers, data_list, file_found)
39 | report.end_artifact_report()
40 |
41 | tsvname = f'Zapya'
42 | tsv(report_folder, data_headers, data_list, tsvname, source_file)
43 |
44 | tlactivity = f'Zapya'
45 | timeline(report_folder, tlactivity, data_list, data_headers)
46 | else:
47 | logfunc('No Zapya data available')
48 |
49 | db.close()
50 |
51 | __artifacts__ = {
52 | "Zapya": (
53 | "File Transfer",
54 | ('*/com.dewmobile.kuaiya.play/databases/transfer20.db*'),
55 | get_Zapya)
56 | }
--------------------------------------------------------------------------------
/scripts/artifacts/accounts_de.py:
--------------------------------------------------------------------------------
1 | __artifacts_v2__ = {
2 | "accounts_de": {
3 | "name": "Accounts_de",
4 | "description": "",
5 | "author": "@AlexisBrignoni",
6 | "creation_date": "2020-03-02",
7 | "last_update_date": "2025-03-14",
8 | "requirements": "none",
9 | "category": "Accounts",
10 | "notes": "",
11 | "paths": ('*/system_de/*/accounts_de.db*'),
12 | "output_types": "standard",
13 | "artifact_icon": "user"
14 | }
15 | }
16 |
17 |
18 | from scripts.ilapfuncs import artifact_processor, \
19 | get_file_path_list_checking_uid, get_results_with_extra_sourcepath_if_needed, \
20 | convert_unix_ts_to_utc, convert_ts_human_to_utc
21 |
22 |
23 | @artifact_processor
24 | def accounts_de(files_found, report_folder, seeker, wrap_text):
25 | source_path_list = get_file_path_list_checking_uid(files_found, "accounts_de.db", -2, "mirror")
26 | source_path = ""
27 | data_list = []
28 |
29 | query = '''
30 | SELECT
31 | last_password_entry_time_millis_epoch,
32 | accounts.type,
33 | accounts.name,
34 | debug_table.action_type,
35 | debug_table.time
36 | FROM accounts
37 | INNER JOIN debug_table on accounts._id=debug_table._id
38 | ORDER by time
39 | '''
40 |
41 | data_headers = (
42 | ('Last password entry', 'datetime'),
43 | 'Account Type', 'Account Name', 'Action Type',
44 | ('Debug Time', 'datetime'))
45 |
46 | data_headers, data, source_path = get_results_with_extra_sourcepath_if_needed(source_path_list, query, data_headers)
47 |
48 | for record in data:
49 | record = list(record)
50 | record[0] = convert_unix_ts_to_utc(record[0])
51 | record[4] = convert_ts_human_to_utc(record[4])
52 | data_list.append(record)
53 |
54 | return data_headers, data_list, source_path
55 |
--------------------------------------------------------------------------------
/scripts/artifacts/adb_hosts.py:
--------------------------------------------------------------------------------
1 | __artifacts_v2__ = {
2 | "adb_hosts": {
3 | "name": "ADB Hosts",
4 | "description": "Authentication keys used in the Android Debug Bridge (ADB) protocol \
5 | to secure communication between a device and a computer.",
6 | "author": "@AlexisBrignoni",
7 | "creation_date": "2020-11-21",
8 | "last_update_date": "2025-03-15",
9 | "requirements": "none",
10 | "category": "Device Information",
11 | "notes": "",
12 | "paths": ('*/misc/adb/adb_keys'),
13 | "output_types": ["html", "lava", "tsv"],
14 | "artifact_icon": "terminal"
15 | }
16 | }
17 |
18 |
19 | from scripts.ilapfuncs import artifact_processor, \
20 | get_file_path, get_txt_file_content, device_info
21 |
22 |
23 | @artifact_processor
24 | def adb_hosts(files_found, report_folder, seeker, wrap_text):
25 | source_path = get_file_path(files_found, "adb_keys")
26 | data_list = []
27 |
28 | file = get_txt_file_content(source_path)
29 | for line in file:
30 | try:
31 | adb_host = line.split(" ")[1].rstrip('\n')
32 | if 'unknown' not in adb_host:
33 | device_info("ADB Hosts", "Hosts", adb_host, source_path)
34 | data_list.append(adb_host.split('@', 1))
35 | except:
36 | pass
37 |
38 | data_headers = ('Username', 'Hostname')
39 |
40 | return data_headers, data_list, source_path
--------------------------------------------------------------------------------
/scripts/artifacts/appLockerfishingnetdb.py:
--------------------------------------------------------------------------------
1 |
2 | from scripts.artifact_report import ArtifactHtmlReport
3 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
4 |
5 | def get_appLockerfishingnetdb(files_found, report_folder, seeker, wrap_text):
6 |
7 | data_list = []
8 | for file_found in files_found:
9 | file_found = str(file_found)
10 |
11 | message = 'The located database is encrypted. It contains information regarding the source directory of the encrypted files, timestamp metadata, and original filenames.'
12 | decryptioninst = 'To decrypt follow the instructions at the following URL: https://theincidentalchewtoy.wordpress.com/2021/12/07/decrypting-the-calculator-apps/'
13 | keytodecrypt = 'Rny48Ni8aPjYCnUI'
14 |
15 | data_list.append((message, decryptioninst, keytodecrypt))
16 |
17 |
18 | if data_list:
19 | report = ArtifactHtmlReport('Calculator Locker Database')
20 | report.start_artifact_report(report_folder, 'Calculator Locker Database')
21 | report.add_script()
22 | data_headers = ('Encrypted Pattern', 'Decrypted Pattern', 'Key To Decrypt')
23 | report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Media'])
24 | report.end_artifact_report()
25 |
26 | tsvname = f'Calculator Locker Database data'
27 | tsv(report_folder, data_headers, data_list, tsvname)
28 |
29 |
30 | else:
31 | logfunc('No Calculator Locker Database data available')
32 |
33 | __artifacts__ = {
34 | "App Locker DB": (
35 | "Encrypting Media Apps",
36 | ('*/.privacy_safe/db/privacy_safe.db'),
37 | get_appLockerfishingnetdb)
38 | }
--------------------------------------------------------------------------------
/scripts/artifacts/appLockerfishingnetpat.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from Crypto.Cipher import AES
3 | from Crypto.Util.Padding import unpad
4 | import xml.etree.ElementTree as ET
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
7 |
8 | def get_appLockerfishingnetpat(files_found, report_folder, seeker, wrap_text):
9 |
10 | standardKey = '526e7934384e693861506a59436e5549'
11 | standardIV = '526e7934384e693861506a59436e5549'
12 | data_list = []
13 |
14 | for file_found in files_found:
15 | file_found = str(file_found)
16 |
17 | tree = ET.parse(file_found)
18 | root = tree.getroot()
19 | encryptedPattern = root.findall('./string[@name="85B064D26810275C89F1F2CC15E20B442E98874398F16F6717BBD5D34920E3F8"]')[0].text
20 | cipher = AES.new(bytes.fromhex(standardKey), AES.MODE_CBC, bytes.fromhex(standardIV))
21 | decryptedPattern = unpad(cipher.decrypt(bytes.fromhex(encryptedPattern)), AES.block_size)
22 |
23 |
24 | data_list.append((encryptedPattern, decryptedPattern))
25 |
26 |
27 | if data_list:
28 | report = ArtifactHtmlReport('Calculator Locker Pattern')
29 | report.start_artifact_report(report_folder, 'Calculator Locker Pattern')
30 | report.add_script()
31 | data_headers = ('Encrypted Pattern', 'Decrypted Pattern')
32 | report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Media'])
33 | report.end_artifact_report()
34 |
35 | tsvname = f'Calculator Locker Pattern data'
36 | tsv(report_folder, data_headers, data_list, tsvname)
37 |
38 |
39 | else:
40 | logfunc('No Calculator Locker Pattern data available')
41 |
42 | __artifacts__ = {
43 | "App Locker Pat": (
44 | "Encrypting Media Apps",
45 | ('*/com.hld.anzenbokusufake/shared_prefs/share_privacy_safe.xml'),
46 | get_appLockerfishingnetpat)
47 | }
--------------------------------------------------------------------------------
/scripts/artifacts/appopSetupWiz.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | import xml.etree.ElementTree as ET
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, abxread, checkabx
6 |
7 | def get_appopSetupWiz(files_found, report_folder, seeker, wrap_text):
8 |
9 | for file_found in files_found:
10 | file_found = str(file_found)
11 | if not file_found.endswith('appops.xml'):
12 | continue # Skip all other files
13 |
14 | data_list = []
15 | #check if file is abx
16 | if (checkabx(file_found)):
17 | multi_root = False
18 | tree = abxread(file_found, multi_root)
19 | else:
20 | tree = ET.parse(file_found)
21 | root = tree.getroot()
22 |
23 | for elem in root.iter('pkg'):
24 | if elem.attrib['n'] == 'com.google.android.setupwizard':
25 | pkg = elem.attrib['n']
26 | for subelem in elem:
27 | #print(subelem.attrib)
28 | for subelem2 in subelem:
29 | #print(subelem2.attrib)
30 | for subelem3 in subelem2:
31 | test = subelem3.attrib.get('t', 0)
32 | if int(test) > 0:
33 | timestamp = (datetime.datetime.utcfromtimestamp(int(subelem3.attrib['t'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
34 | else:
35 | timestamp = ''
36 | data_list.append((timestamp, pkg))
37 | if data_list:
38 | report = ArtifactHtmlReport('Appops.xml Setup Wizard')
39 | report.start_artifact_report(report_folder, 'Appops.xml Setup Wizard')
40 | report.add_script()
41 | data_headers = ('Timestamp','Package')
42 | report.write_artifact_data_table(data_headers, data_list, file_found)
43 | report.end_artifact_report()
44 |
45 | tsvname = f'Appops Setup Wizard data'
46 | tsv(report_folder, data_headers, data_list, tsvname)
47 |
48 | tlactivity = f'Appops Setup Wizard data'
49 | timeline(report_folder, tlactivity, data_list, data_headers)
50 | else:
51 | logfunc('No Appops Setup Wizard data available')
52 |
53 | __artifacts__ = {
54 | "appopSetupWiz": (
55 | "Wipe & Setup",
56 | ('*/system/appops.xml'),
57 | get_appopSetupWiz)
58 | }
--------------------------------------------------------------------------------
/scripts/artifacts/artGlobals.py:
--------------------------------------------------------------------------------
1 | versionf = 0
--------------------------------------------------------------------------------
/scripts/artifacts/atrackerdetect.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | import xml.etree.ElementTree as ET
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
6 |
7 | def get_atrackerdetect(files_found, report_folder, seeker, wrap_text):
8 | data_list=[]
9 | for file_found in files_found:
10 | file_found = str(file_found)
11 |
12 | tree = ET.parse(file_found)
13 | root = tree.getroot()
14 |
15 | for elem in root.iter():
16 | attribute = (elem.attrib)
17 | if attribute:
18 | data = attribute.get('name')
19 | if data.startswith('device'):
20 | mac = data.split('_', 2)[1]
21 | desc = data.split('_', 2)[2]
22 | data_list.append((desc, mac, elem.text))
23 | else:
24 | data_list.append((data, attribute.get('value'),''))
25 |
26 | if data_list:
27 | report = ArtifactHtmlReport('Apple Tracker Detect Prefs')
28 | report.start_artifact_report(report_folder, 'Apple Tracker Detect Prefs')
29 | report.add_script()
30 | data_headers = ('Key', 'Value', 'Milliseconds from Last Boot Time')
31 | report.write_artifact_data_table(data_headers, data_list, file_found)
32 | report.end_artifact_report()
33 |
34 | tsvname = f'Apple Tracker Detect Prefs'
35 | tsv(report_folder, data_headers, data_list, tsvname)
36 |
37 | else:
38 | logfunc('No Apple Tracker Detect Prefs data available')
39 |
40 | __artifacts__ = {
41 | "atrackerdetect": (
42 | "AirTags",
43 | ('*/com.apple.trackerdetect/shared_prefs/com.apple.trackerdetect_preferences.xml'),
44 | get_atrackerdetect)
45 | }
--------------------------------------------------------------------------------
/scripts/artifacts/bittorrentClientpref.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | import xml.etree.ElementTree as ET
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, abxread, checkabx, logdevinfo
6 |
7 | def timestampcalc(timevalue):
8 | timestamp = (datetime.datetime.utcfromtimestamp(int(timevalue)/1000).strftime('%Y-%m-%d %H:%M:%S'))
9 | return timestamp
10 |
11 | def get_bittorrentClientpref(files_found, report_folder, seeker, wrap_text):
12 | data_list = []
13 | for file_found in files_found:
14 | file_found = str(file_found)
15 |
16 |
17 | #check if file is abx
18 | if (checkabx(file_found)):
19 | multi_root = False
20 | tree = abxread(file_found, multi_root)
21 | else:
22 | tree = ET.parse(file_found)
23 |
24 | root = tree.getroot()
25 |
26 | for elem in root:
27 | key = elem.attrib['name']
28 | value = elem.attrib.get('value')
29 | text = elem.text
30 | if key == 'BornOn':
31 | value = timestampcalc(value)
32 | data_list.append((key, value, text))
33 |
34 | if data_list:
35 | report = ArtifactHtmlReport('Bittorent Client Preferences')
36 | report.start_artifact_report(report_folder, 'Bittorent Client Preferences')
37 | report.add_script()
38 | data_headers = ('Key', 'Value', 'Text')
39 | report.write_artifact_data_table(data_headers, data_list, file_found)
40 | report.end_artifact_report()
41 |
42 | tsvname = f'Bittorent Client Preferences'
43 | tsv(report_folder, data_headers, data_list, tsvname)
44 |
45 | else:
46 | logfunc('No Bittorent Client Preferences data available')
47 |
48 | __artifacts__ = {
49 | "BitTorrent Prefs": (
50 | "BitTorrent",
51 | ('*/com.bittorrent.client/shared_prefs/com.bittorrent.client_preferences.xml'),
52 | get_bittorrentClientpref)
53 | }
--------------------------------------------------------------------------------
/scripts/artifacts/bittorrentDlhist.py:
--------------------------------------------------------------------------------
1 | import bencoding
2 | import hashlib
3 | import datetime
4 | import textwrap
5 |
6 | from scripts.artifact_report import ArtifactHtmlReport
7 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
8 |
9 | def timestampcalc(timevalue):
10 | timestamp = (datetime.datetime.utcfromtimestamp(int(timevalue)/1000).strftime('%Y-%m-%d %H:%M:%S'))
11 | return timestamp
12 |
13 | def get_bittorrentDlhist(files_found, report_folder, seeker, wrap_text):
14 |
15 | data_list = []
16 | for file_found in files_found:
17 | file_found = str(file_found)
18 |
19 | with open(file_found, 'rb') as f:
20 | decodedDict = bencoding.bdecode(f.read())
21 |
22 | for key, value in decodedDict.items():
23 | if key.decode() == 'records':
24 | for x in value:
25 | time = timestampcalc(x[b'a'])
26 | filename = x[b'n'].decode()
27 | filepath = x[b's'].decode()
28 | print(filepath)
29 | data_list.append((time,filename,filepath,textwrap.fill(file_found.strip(), width=25)))
30 |
31 | # Reporting
32 | title = "BitTorrent Download Info"
33 | report = ArtifactHtmlReport(title)
34 | report.start_artifact_report(report_folder, title)
35 | report.add_script()
36 | data_headers = ('Record Timestamp', 'Filename', 'Download File Path', 'Source File')
37 | report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Data'])
38 | report.end_artifact_report()
39 |
40 | tsv(report_folder, data_headers, data_list, title)
41 |
42 | __artifacts__ = {
43 | "bittorrentDlhist": (
44 | "BitTorrent",
45 | ('*/dlhistory*.config.bak','*/dlhistory*.config'),
46 | get_bittorrentDlhist)
47 | }
--------------------------------------------------------------------------------
/scripts/artifacts/blueskysearches.py:
--------------------------------------------------------------------------------
1 | __artifacts_v2__ = {
2 | "blueskysearches": {
3 | "name": "Bluesky",
4 | "description": "User generated searches",
5 | "author": "DFIRcon 2025 Miami",
6 | "version": "0.0.1",
7 | "date": "2024-11-15",
8 | "requirements": "none",
9 | "category": "Bluesky",
10 | "notes": "",
11 | "paths": ('*/xyz.blueskyweb.app/databases/RKStorage*'),
12 | "function": "get_blueskysearches"
13 | }
14 | }
15 |
16 | import sqlite3
17 | import json
18 |
19 | from scripts.artifact_report import ArtifactHtmlReport
20 | from scripts.ilapfuncs import logfunc, timeline, tsv, is_platform_windows, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone
21 |
22 | def get_blueskysearches(files_found, report_folder, seeker, wrap_text):
23 |
24 | data_list = []
25 |
26 | for file_found in files_found:
27 | file_found = str(file_found)
28 |
29 | if file_found.endswith('RKStorage'):
30 | db = open_sqlite_db_readonly(file_found)
31 | #SQL QUERY TIME!
32 | cursor = db.cursor()
33 | cursor.execute('''
34 | SELECT
35 | key, value
36 | FROM catalystLocalStorage
37 | WHERE key like 'searchHistory'
38 | ''')
39 |
40 | all_rows = cursor.fetchall()
41 | usageentries = len(all_rows)
42 |
43 | if usageentries > 0:
44 | for row in all_rows:
45 |
46 | searches = row[1]
47 | searches = json.loads(searches)
48 | for item in searches:
49 | data_list.append((item,))
50 | db.close()
51 |
52 | else:
53 | continue
54 |
55 | if data_list:
56 | description = 'Bluesky'
57 | report = ArtifactHtmlReport('Bluesky user generated searches')
58 | report.start_artifact_report(report_folder, 'Bluesky Searches', description)
59 | report.add_script()
60 | data_headers = ('Searches',)
61 | report.write_artifact_data_table(data_headers, data_list, file_found,html_escape=False)
62 | report.end_artifact_report()
63 |
64 | tsvname = 'Bluesky Searches'
65 | tsv(report_folder, data_headers, data_list, tsvname)
66 |
67 |
68 | else:
69 | logfunc('No Bluesky searches available')
70 |
--------------------------------------------------------------------------------
/scripts/artifacts/browserlocation.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import datetime
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_browserlocation(files_found, report_folder, seeker, wrap_text):
8 |
9 | source_file = ''
10 |
11 | for file_found in files_found:
12 | file_found = str(file_found)
13 |
14 | if file_found.endswith('-db'):
15 | source_file = file_found.replace(seeker.data_folder, '')
16 | continue
17 |
18 | source_file = file_found.replace(seeker.data_folder, '')
19 |
20 | db = open_sqlite_db_readonly(file_found)
21 | cursor = db.cursor()
22 | try:
23 | cursor.execute('''
24 | SELECT timestamp/1000, latitude, longitude, accuracy FROM CachedPosition;
25 | ''')
26 |
27 | all_rows = cursor.fetchall()
28 | usageentries = len(all_rows)
29 | except:
30 | usageentries = 0
31 |
32 | if usageentries > 0:
33 | report = ArtifactHtmlReport('Browser Locations')
34 | report.start_artifact_report(report_folder, 'Browser Locations')
35 | report.add_script()
36 | data_headers = ('timestamp','latitude', 'longitude', 'accuracy') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
37 | data_list = []
38 | for row in all_rows:
39 | timestamp = datetime.datetime.utcfromtimestamp(int(row[0])).strftime('%Y-%m-%d %H:%M:%S')
40 | data_list.append((timestamp, row[1], row[2], row[3]))
41 |
42 | report.write_artifact_data_table(data_headers, data_list, file_found)
43 | report.end_artifact_report()
44 |
45 | tsvname = f'Browser Locations'
46 | tsv(report_folder, data_headers, data_list, tsvname, source_file)
47 |
48 | else:
49 | logfunc('No Browser Locations found')
50 |
51 | db.close()
52 |
53 | __artifacts__ = {
54 | "Browser Location": (
55 | "GEO Location",
56 | ('*/com.android.browser/app_geolocation/CachedGeoposition.db'),
57 | get_browserlocation)
58 | }
59 |
--------------------------------------------------------------------------------
/scripts/artifacts/burnerContacts.py:
--------------------------------------------------------------------------------
1 | __artifacts_v2__ = {
2 | "burnerContacts": {
3 | "name": "Burner: Second Phone Number",
4 | "description": "Parses Burner Contacts",
5 | "author": "Heather Charpentier (With Tons of Help from Alexis Brignoni!)",
6 | "version": "0.0.1",
7 | "date": "2024-02-15",
8 | "requirements": "none",
9 | "category": "Burner",
10 | "notes": "",
11 | "paths": ('*/data/data/com.adhoclabs.burner/databases/burnerDatabase.db*'),
12 | "function": "get_burnerContacts"
13 | }
14 | }
15 |
16 | import sqlite3
17 |
18 | from scripts.artifact_report import ArtifactHtmlReport
19 | from scripts.ilapfuncs import logfunc, timeline, tsv, is_platform_windows, open_sqlite_db_readonly, convert_ts_human_to_utc, convert_utc_human_to_timezone
20 |
21 | def get_burnerContacts(files_found, report_folder, seeker, wrap_text):
22 |
23 | data_list = []
24 |
25 | for file_found in files_found:
26 | file_found = str(file_found)
27 |
28 | if file_found.endswith('burnerDatabase.db'):
29 | db = open_sqlite_db_readonly(file_found)
30 | #SQL QUERY TIME!
31 | cursor = db.cursor()
32 | cursor.execute('''
33 | SELECT
34 | id as 'User ID',
35 | phoneNumber as 'Phone Number'
36 | FROM ContactEntity
37 | ''')
38 |
39 | all_rows = cursor.fetchall()
40 | usageentries = len(all_rows)
41 | if usageentries > 0:
42 | for row in all_rows:
43 |
44 | data_list.append((row[0],row[1]))
45 | db.close()
46 |
47 | else:
48 | continue
49 |
50 | if data_list:
51 | description = 'Burner: Second Phone Number'
52 | report = ArtifactHtmlReport('Burner Contacts')
53 | report.start_artifact_report(report_folder, 'Burner Contacts', description)
54 | report.add_script()
55 | data_headers = ('User ID','Phone Number')
56 | report.write_artifact_data_table(data_headers, data_list, file_found,html_escape=False)
57 | report.end_artifact_report()
58 |
59 | tsvname = 'Burner Contacts'
60 | tsv(report_folder, data_headers, data_list, tsvname)
61 |
62 | tlactivity = 'Burner Contacts'
63 | timeline(report_folder, tlactivity, data_list, data_headers)
64 |
65 | else:
66 | logfunc('No Burner data available')
67 |
--------------------------------------------------------------------------------
/scripts/artifacts/cmh.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import json
3 | import os
4 | import shutil
5 | import sqlite3
6 |
7 | from scripts.artifact_report import ArtifactHtmlReport
8 | from scripts.ilapfuncs import logfunc, tsv, timeline, kmlgen, is_platform_windows, open_sqlite_db_readonly
9 |
10 | def get_cmh(files_found, report_folder, seeker, wrap_text):
11 |
12 | file_found = str(files_found[0])
13 | db = open_sqlite_db_readonly(file_found)
14 | cursor = db.cursor()
15 | cursor.execute('''
16 | SELECT
17 | datetime(images.datetaken /1000, "unixepoch") as datetaken,
18 | datetime(images.date_added, "unixepoch") as dateadded,
19 | datetime(images.date_modified, "unixepoch") as datemodified,
20 | images.title,
21 | images.bucket_display_name,
22 | images.latitude,
23 | images.longitude,
24 | location_view.address_text,
25 | location_view.uri,
26 | images._data,
27 | images.isprivate
28 | FROM images
29 | left join location_view
30 | on location_view._id = images._id
31 | ''')
32 | all_rows = cursor.fetchall()
33 | usageentries = len(all_rows)
34 | if usageentries > 0:
35 | report = ArtifactHtmlReport('Samsung CMH')
36 | report.start_artifact_report(report_folder, f'Geodata')
37 | report.add_script()
38 | data_headers = ('Timestamp', 'Date Added', 'Date Modified', 'Title', 'Bucket Name', 'Latitude', 'Longitude','Address', 'URI', 'Data Location', 'Is Private')
39 | data_list = []
40 | for row in all_rows:
41 | data_list.append((row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10]))
42 | report.write_artifact_data_table(data_headers, data_list, file_found)
43 | report.end_artifact_report()
44 |
45 | tsvname = f'Samsung CMH Geodata'
46 | tsv(report_folder, data_headers, data_list, tsvname)
47 |
48 | tlactivity = f'Samsung CMH Geodata'
49 | timeline(report_folder, tlactivity, data_list, data_headers)
50 |
51 | kmlactivity = 'Samsung CMH Geodata'
52 | kmlgen(report_folder, kmlactivity, data_list, data_headers)
53 |
54 | else:
55 | logfunc(f'No Samsung_CMH_GeoData available')
56 | db.close()
57 |
58 | __artifacts__ = {
59 | "cmh": (
60 | "Samsung_CMH",
61 | ('*/cmh.db'),
62 | get_cmh)
63 | }
--------------------------------------------------------------------------------
/scripts/artifacts/duckThumbs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | from pathlib import Path
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly, is_platform_windows, media_to_html, logfunc
7 |
8 |
9 | def get_duckThumbs(files_found, report_folder, seeker, wrap_text):
10 | data_list = []
11 | for file_found in files_found:
12 | file_found = str(file_found)
13 |
14 | filename = (Path(file_found).name)
15 | utctime = int(Path(file_found).stem)
16 | filepath = str(Path(file_found).parents[1])
17 |
18 | timestamp = (datetime.datetime.utcfromtimestamp(utctime/1000).strftime('%Y-%m-%d %H:%M:%S'))
19 |
20 | thumb = media_to_html(filename, files_found, report_folder)
21 |
22 | platform = is_platform_windows()
23 | if platform:
24 | thumb = thumb.replace('?', '')
25 |
26 | data_list.append((timestamp, thumb, filename, file_found))
27 |
28 |
29 | if data_list:
30 | description = 'DuckDuckGo Tab Thumbnails'
31 | report = ArtifactHtmlReport('DuckDuckGo Tab Thumbnails')
32 | report.start_artifact_report(report_folder, 'DuckDuckGo Tab Thumbnails', description)
33 | report.add_script()
34 | data_headers = ('Timestamp','Thumbnail','Filename','Location' )
35 | report.write_artifact_data_table(data_headers, data_list, filepath, html_escape=False)
36 | report.end_artifact_report()
37 |
38 | tsvname = 'DuckDuckGo Tab Thumbnails'
39 | tsv(report_folder, data_headers, data_list, tsvname)
40 |
41 | tlactivity = f'DuckDuckGo Tab Thumbnails'
42 | timeline(report_folder, tlactivity, data_list, data_headers)
43 | else:
44 | logfunc('No DuckDuckGo Tab Thumbnails data available')
45 |
46 | __artifacts__ = {
47 | "DuckThumbs'": (
48 | "DuckDuckGo",
49 | ('*/com.duckduckgo.mobile.android/cache/tabPreviews/*/*.jpg'),
50 | get_duckThumbs)
51 | }
--------------------------------------------------------------------------------
/scripts/artifacts/etc_hosts.py:
--------------------------------------------------------------------------------
1 | import codecs
2 | import csv
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
6 |
7 | def get_etc_hosts(files_found, report_folder, seeker, wrap_text):
8 | data_list = []
9 | file_found = str(files_found[0])
10 |
11 | with codecs.open(file_found, 'r', 'utf-8-sig') as csvfile:
12 | for row in csvfile:
13 | sline = '\t'.join(row.split())
14 | sline = sline.split('\t')
15 | sline_one = sline[0]
16 | sline_two = sline[1]
17 | if (sline_one == '127.0.0.1' and sline_two == 'localhost') or \
18 | (sline_one == '::1' and sline_two == 'ip6-localhost'):
19 | pass # Skipping the defaults, so only anomaly entries are seen
20 | else:
21 | data_list.append((sline_one, sline_two))
22 |
23 | if len(data_list) > 0:
24 | report = ArtifactHtmlReport('Etc Hosts')
25 | report.start_artifact_report(report_folder, f'Etc Hosts')
26 | report.add_script()
27 | data_headers = ('IP Address', 'Hostname')
28 | report.write_artifact_data_table(data_headers, data_list, file_found)
29 | report.end_artifact_report()
30 |
31 | tsvname = f'Etc Hosts'
32 | tsv(report_folder, data_headers, data_list, tsvname)
33 |
34 | else:
35 | logfunc(f'No etc hosts file available, or nothing significant found.')
36 |
37 | __artifacts__ = {
38 | "Etc_hosts": (
39 | "Etc Hosts",
40 | ('*/system/etc/hosts'),
41 | get_etc_hosts)
42 | }
--------------------------------------------------------------------------------
/scripts/artifacts/factory_reset.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | from scripts.artifact_report import ArtifactHtmlReport
4 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, logdevinfo
5 |
6 | def get_factory_reset(files_found, report_folder, seeker, wrap_text):
7 |
8 | for file_found in files_found:
9 | file_found = str(file_found)
10 | if not file_found.endswith('factory_reset'):
11 | continue # Skip all other files
12 |
13 | data_list = []
14 | file_name = 'factory_reset'
15 |
16 | modTimesinceEpoc = os.path.getmtime(file_found)
17 |
18 | reset_time = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(modTimesinceEpoc))
19 |
20 | logdevinfo(f"Factory Reset Timestamp: {reset_time}")
21 | data_list.append((reset_time, file_name))
22 |
23 | if data_list:
24 | report = ArtifactHtmlReport('Factory Reset')
25 | report.start_artifact_report(report_folder, 'Factory Reset')
26 | report.add_script()
27 | data_headers = ('Timestamp', 'File Name')
28 | report.write_artifact_data_table(data_headers, data_list, file_found)
29 | report.end_artifact_report()
30 |
31 | tsvname = f'Factory Reset'
32 | tsv(report_folder, data_headers, data_list, tsvname)
33 |
34 | tlactivity = f'Factory Reset'
35 | timeline(report_folder, tlactivity, data_list, data_headers)
36 |
37 | else:
38 | logfunc('No Factory Reset data available')
39 |
40 | __artifacts__ = {
41 | "Factory_reset": (
42 | "Wipe & Setup",
43 | ('*/misc/bootstat/factory_reset'),
44 | get_factory_reset)
45 | }
--------------------------------------------------------------------------------
/scripts/artifacts/firefoxCookies.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 | import textwrap
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
7 |
8 | def get_firefoxCookies(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 | if not os.path.basename(file_found) == 'cookies.sqlite': # skip -journal and other files
13 | continue
14 |
15 | db = open_sqlite_db_readonly(file_found)
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | SELECT
19 | datetime(lastAccessed/1000000,'unixepoch') AS LastAccessedDate,
20 | datetime(creationTime/1000000,'unixepoch') AS CreationDate,
21 | host AS Host,
22 | name AS Name,
23 | value AS Value,
24 | datetime(expiry,'unixepoch') AS ExpirationDate,
25 | path AS Path
26 | from moz_cookies
27 | ORDER BY lastAccessedDate ASC
28 | ''')
29 |
30 | all_rows = cursor.fetchall()
31 | usageentries = len(all_rows)
32 | if usageentries > 0:
33 | report = ArtifactHtmlReport('Firefox - Cookies')
34 | report.start_artifact_report(report_folder, 'Firefox - Cookies')
35 | report.add_script()
36 | data_headers = ('Last Accessed Timestamp','Created Timestamp','Host','Name','Value','Expiration Timestamp','Path')
37 | data_list = []
38 | for row in all_rows:
39 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6]))
40 |
41 | report.write_artifact_data_table(data_headers, data_list, file_found)
42 | report.end_artifact_report()
43 |
44 | tsvname = f'Firefox - Cookies'
45 | tsv(report_folder, data_headers, data_list, tsvname)
46 |
47 | tlactivity = f'Firefox - Cookies'
48 | timeline(report_folder, tlactivity, data_list, data_headers)
49 | else:
50 | logfunc('No Firefox - Cookies data available')
51 |
52 | db.close()
53 |
54 | __artifacts__ = {
55 | "FirefoxCookies": (
56 | "Firefox",
57 | ('*/org.mozilla.firefox/files/mozilla/*.default/cookies.sqlite*'),
58 | get_firefoxCookies)
59 | }
--------------------------------------------------------------------------------
/scripts/artifacts/firefoxDownloads.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 | import textwrap
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
7 |
8 | def get_firefoxDownloads(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 | if not os.path.basename(file_found) == 'mozac_downloads_database': # skip -journal and other files
13 | continue
14 |
15 | db = open_sqlite_db_readonly(file_found)
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | SELECT
19 | datetime(created_at/1000,'unixepoch') AS CreatedDate,
20 | file_name AS FileName,
21 | url AS URL,
22 | content_type AS MimeType,
23 | content_length AS FileSize,
24 | CASE status
25 | WHEN 3 THEN 'Paused'
26 | WHEN 4 THEN 'Canceled'
27 | WHEN 5 THEN 'Failed'
28 | WHEN 6 THEN 'Finished'
29 | END AS Status,
30 | destination_directory AS DestDir
31 | FROM downloads
32 | ''')
33 |
34 | all_rows = cursor.fetchall()
35 | usageentries = len(all_rows)
36 | if usageentries > 0:
37 | report = ArtifactHtmlReport('Firefox - Downloads')
38 | report.start_artifact_report(report_folder, 'Firefox - Downloads')
39 | report.add_script()
40 | data_headers = ('Created Timestamp','File Name','URL','MIME Type','File Size (Bytes)','Status','Destination Directory')
41 | data_list = []
42 | for row in all_rows:
43 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6]))
44 |
45 | report.write_artifact_data_table(data_headers, data_list, file_found)
46 | report.end_artifact_report()
47 |
48 | tsvname = f'Firefox - Downloads'
49 | tsv(report_folder, data_headers, data_list, tsvname)
50 |
51 | tlactivity = f'Firefox - Downloads'
52 | timeline(report_folder, tlactivity, data_list, data_headers)
53 | else:
54 | logfunc('No Firefox - Downloads data available')
55 |
56 | db.close()
57 |
58 | __artifacts__ = {
59 | "FirefoxDownloads": (
60 | "Firefox",
61 | ('*/org.mozilla.firefox/databases/mozac_downloads_database*'),
62 | get_firefoxDownloads)
63 | }
--------------------------------------------------------------------------------
/scripts/artifacts/firefoxFormHistory.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 | import textwrap
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
7 |
8 | def get_firefoxFormHistory(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 | if not os.path.basename(file_found) == 'formhistory.sqlite': # skip -journal and other files
13 | continue
14 |
15 | db = open_sqlite_db_readonly(file_found)
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | SELECT
19 | datetime(firstUsed/1000000, 'unixepoch') AS FirstUsed,
20 | datetime(lastUsed/1000000, 'unixepoch') AS LastUsed,
21 | fieldname AS FieldName,
22 | value AS Value,
23 | timesUsed AS TimesUsed,
24 | id AS ID
25 | FROM moz_formhistory
26 | ORDER BY id ASC
27 | ''')
28 |
29 | all_rows = cursor.fetchall()
30 | usageentries = len(all_rows)
31 | if usageentries > 0:
32 | report = ArtifactHtmlReport('Firefox - Form History')
33 | report.start_artifact_report(report_folder, 'Firefox - Form History')
34 | report.add_script()
35 | data_headers = ('First Used Timestamp','Last Used Timestamp','Field Name','Value','Times Used','ID')
36 | data_list = []
37 | for row in all_rows:
38 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))
39 |
40 | report.write_artifact_data_table(data_headers, data_list, file_found)
41 | report.end_artifact_report()
42 |
43 | tsvname = f'Firefox - Form History'
44 | tsv(report_folder, data_headers, data_list, tsvname)
45 |
46 | tlactivity = f'Firefox - Form History'
47 | timeline(report_folder, tlactivity, data_list, data_headers)
48 | else:
49 | logfunc('No Firefox - Form History data available')
50 |
51 | db.close()
52 |
53 | __artifacts__ = {
54 | "FirefoxFormHistory": (
55 | "Firefox",
56 | ('*/org.mozilla.firefox/files/mozilla/*.default/formhistory.sqlite*'),
57 | get_firefoxFormHistory)
58 | }
--------------------------------------------------------------------------------
/scripts/artifacts/firefoxPermissions.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 | import textwrap
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
7 |
8 | def get_firefoxPermissions(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 | if not os.path.basename(file_found) == 'permissions.sqlite': # skip -journal and other files
13 | continue
14 |
15 | db = open_sqlite_db_readonly(file_found)
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | SELECT
19 | datetime(modificationTime/1000,'unixepoch') AS ModDate,
20 | origin AS Origin,
21 | type AS PermType,
22 | CASE permission
23 | WHEN 1 THEN 'Allow'
24 | WHEN 2 THEN 'Block'
25 | END AS PermState,
26 | CASE expireTime
27 | WHEN 0 THEN ''
28 | else datetime(expireTime/1000,'unixepoch')
29 | END AS ExpireDate
30 | FROM moz_perms
31 | ORDER BY ModDate ASC
32 | ''')
33 |
34 | all_rows = cursor.fetchall()
35 | usageentries = len(all_rows)
36 | if usageentries > 0:
37 | report = ArtifactHtmlReport('Firefox - Permissions')
38 | report.start_artifact_report(report_folder, 'Firefox - Permissions')
39 | report.add_script()
40 | data_headers = ('Modification Timestamp','Origin Site','Permission Type','Status','Expiration Timestamp')
41 | data_list = []
42 | for row in all_rows:
43 | data_list.append((row[0],row[1],row[2],row[3],row[4]))
44 |
45 | report.write_artifact_data_table(data_headers, data_list, file_found)
46 | report.end_artifact_report()
47 |
48 | tsvname = f'Firefox - Permissions'
49 | tsv(report_folder, data_headers, data_list, tsvname)
50 |
51 | tlactivity = f'Firefox - Permissions'
52 | timeline(report_folder, tlactivity, data_list, data_headers)
53 | else:
54 | logfunc('No Firefox - Permissions data available')
55 |
56 | db.close()
57 |
58 | __artifacts__ = {
59 | "FirefoxPermissions": (
60 | "Firefox",
61 | ('*/org.mozilla.firefox/files/mozilla/*.default/permissions.sqlite*'),
62 | get_firefoxPermissions)
63 | }
--------------------------------------------------------------------------------
/scripts/artifacts/firefoxRecentlyClosedTabs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 | import textwrap
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
7 |
8 | def get_firefoxRecentlyClosedTabs(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 | if not os.path.basename(file_found) == 'recently_closed_tabs': # skip -journal and other files
13 | continue
14 |
15 | db = open_sqlite_db_readonly(file_found)
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | SELECT
19 | datetime(created_at/1000,'unixepoch') AS CreatedDate,
20 | title as Title,
21 | url as URL
22 | FROM recently_closed_tabs
23 | ''')
24 |
25 | all_rows = cursor.fetchall()
26 | usageentries = len(all_rows)
27 | if usageentries > 0:
28 | report = ArtifactHtmlReport('Firefox - Recently Closed Tabs')
29 | report.start_artifact_report(report_folder, 'Firefox - Recently Closed Tabs')
30 | report.add_script()
31 | data_headers = ('Timestamp','Title','URL')
32 | data_list = []
33 | for row in all_rows:
34 | data_list.append((row[0],row[1],row[2]))
35 |
36 | report.write_artifact_data_table(data_headers, data_list, file_found)
37 | report.end_artifact_report()
38 |
39 | tsvname = f'Firefox - Recently Closed Tabs'
40 | tsv(report_folder, data_headers, data_list, tsvname)
41 |
42 | tlactivity = f'Firefox - Recently Closed Tabs'
43 | timeline(report_folder, tlactivity, data_list, data_headers)
44 | else:
45 | logfunc('No Firefox - Recently Closed Tabs data available')
46 |
47 | db.close()
48 |
49 | __artifacts__ = {
50 | "FirefoxRecentlyClosedTabs": (
51 | "Firefox",
52 | ('*/org.mozilla.firefox/databases/recently_closed_tabs*'),
53 | get_firefoxRecentlyClosedTabs)
54 | }
--------------------------------------------------------------------------------
/scripts/artifacts/firefoxTopSites.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sqlite3
3 | import textwrap
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
7 |
8 | def get_firefoxTopSites(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 | if not os.path.basename(file_found) == 'top_sites': # skip -journal and other files
13 | continue
14 |
15 | db = open_sqlite_db_readonly(file_found)
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | SELECT
19 | datetime(created_at/1000,'unixepoch') AS CreatedDate,
20 | title AS Title,
21 | url AS URL,
22 | CASE is_default
23 | WHEN 0 THEN 'No'
24 | WHEN 1 THEN 'Yes'
25 | END as IsDefault
26 | FROM top_sites
27 | ''')
28 |
29 | all_rows = cursor.fetchall()
30 | usageentries = len(all_rows)
31 | if usageentries > 0:
32 | report = ArtifactHtmlReport('Firefox - Top Sites')
33 | report.start_artifact_report(report_folder, 'Firefox - Top Sites')
34 | report.add_script()
35 | data_headers = ('Created Timestamp','Title','URL','Is Default')
36 | data_list = []
37 | for row in all_rows:
38 | data_list.append((row[0],row[1],row[2],row[3]))
39 |
40 | report.write_artifact_data_table(data_headers, data_list, file_found)
41 | report.end_artifact_report()
42 |
43 | tsvname = f'Firefox - Top Sites'
44 | tsv(report_folder, data_headers, data_list, tsvname)
45 |
46 | tlactivity = f'Firefox - Top Sites'
47 | timeline(report_folder, tlactivity, data_list, data_headers)
48 | else:
49 | logfunc('No Firefox - Top Sites data available')
50 |
51 | db.close()
52 |
53 | __artifacts__ = {
54 | "FirefoxTopSites": (
55 | "Firefox",
56 | ('*/org.mozilla.firefox/databases/top_sites*'),
57 | get_firefoxTopSites)
58 | }
--------------------------------------------------------------------------------
/scripts/artifacts/frosting.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import os
3 | import textwrap
4 |
5 | from packaging import version
6 | from scripts.artifact_report import ArtifactHtmlReport
7 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
8 |
9 | def get_frosting(files_found, report_folder, seeker, wrap_text):
10 |
11 |
12 | for file_found in files_found:
13 | file_name = str(file_found)
14 | if not file_found.endswith('frosting.db'):
15 | continue # Skip all other files
16 |
17 | db = open_sqlite_db_readonly(file_found)
18 | cursor = db.cursor()
19 | cursor.execute('''
20 | select
21 | case last_updated
22 | when 0 then ''
23 | else datetime(last_updated/1000,'unixepoch')
24 | end as "Last Updated",
25 | pk,
26 | apk_path
27 | from frosting
28 | ''')
29 |
30 | all_rows = cursor.fetchall()
31 | usageentries = len(all_rows)
32 | if usageentries > 0:
33 | report = ArtifactHtmlReport('App Updates (Frosting.db)')
34 | report.start_artifact_report(report_folder, 'App Updates (Frosting.db)')
35 | report.add_script()
36 | data_headers = ('Last Updated Timestamp','App Package Name','APK Path') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
37 | data_list = []
38 | for row in all_rows:
39 | data_list.append((row[0],row[1],row[2]))
40 |
41 | report.write_artifact_data_table(data_headers, data_list, file_found)
42 | report.end_artifact_report()
43 |
44 | tsvname = f'App Updates (Frosting.db)'
45 | tsv(report_folder, data_headers, data_list, tsvname)
46 |
47 | tlactivity = f'App Updates (Frosting.db)'
48 | timeline(report_folder, tlactivity, data_list, data_headers)
49 | else:
50 | logfunc('No App Updates (Frosting.db) data available')
51 |
52 | db.close()
53 |
54 | __artifacts__ = {
55 | "frosting": (
56 | "Installed Apps",
57 | ('*/com.android.vending/databases/frosting.db*'),
58 | get_frosting)
59 | }
--------------------------------------------------------------------------------
/scripts/artifacts/gmail.py:
--------------------------------------------------------------------------------
1 | # gmailActive: Get gmail account information
2 | # Author: Joshua James {joshua@dfirscience.org}
3 | # Date: 2021-11-08
4 | # Artifact version: 0.0.1
5 | # Android version tested: 11
6 | # Requirements: none
7 |
8 | import xml.etree.ElementTree as ET
9 |
10 | from scripts.artifact_report import ArtifactHtmlReport
11 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
12 |
13 | class keyboard_event:
14 | def __init__(self, id, app, text, textbox_name, textbox_id, event_date, start_date='', end_date=''):
15 | self.id = id
16 | self.app = app
17 | self.text = text
18 | self.textbox_name = textbox_name
19 | self.textbox_id = textbox_id
20 | self.event_date = event_date
21 | self.start_date = start_date
22 | self.end_date = end_date
23 |
24 | def get_gmailActive(files_found, report_folder, seeker, wrap_text):
25 | #logfunc("If you can read this, the module is working!")
26 | #logfunc(files_found)
27 | activeAccount = ''
28 | file_found = str(files_found[0])
29 | xmlTree = ET.parse(file_found)
30 | root = xmlTree.getroot()
31 | for child in root:
32 | if child.attrib['name'] == "active-account":
33 | logfunc("Active gmail account found: " + child.text)
34 | activeAccount = child.text
35 |
36 | if activeAccount != '':
37 | report = ArtifactHtmlReport('Gmail - Active')
38 | report.start_artifact_report(report_folder, 'Gmail - Active')
39 | report.add_script()
40 | data_headers = ('Active Gmail Address','') # final , needed for table formatting
41 | data_list = []
42 | data_list.append((activeAccount, ''))# We only expect one active account
43 | report.write_artifact_data_table(data_headers, data_list, file_found)
44 | report.end_artifact_report()
45 |
46 | tsvname = f'Gmail - Active'
47 | tsv(report_folder, data_headers, data_list, tsvname)
48 | else:
49 | logfunc('No active Gmail account found')
50 |
51 | __artifacts__ = {
52 | "GmailActive": (
53 | "Gmail",
54 | ('*/com.google.android.gm/shared_prefs/Gmail.xml'),
55 | get_gmailActive)
56 | }
--------------------------------------------------------------------------------
/scripts/artifacts/googleInitiatedNav.py:
--------------------------------------------------------------------------------
1 | import blackboxprotobuf
2 | from datetime import *
3 | from scripts.artifact_report import ArtifactHtmlReport
4 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, convert_utc_human_to_timezone, kmlgen, timeline
5 |
6 | def get_googleInitiatedNav(files_found, report_folder, seeker, wrap_text):
7 | data_list = []
8 | for file_found in files_found:
9 | with open(file_found, 'rb') as f:
10 | data = f.read()
11 |
12 | arreglo = (data)
13 | pb = arreglo[8:]
14 | values, types = blackboxprotobuf.decode_message(pb)
15 |
16 | if isinstance(values, dict):
17 | timestamp = values['1']['2']
18 | timestamp = datetime.fromtimestamp(timestamp/1000000, tz=timezone.utc)
19 | timestamp = convert_utc_human_to_timezone(timestamp, 'UTC')
20 | intendeddest = values['1']['4']['1'].decode()
21 |
22 | data_list.append((timestamp, intendeddest))
23 | else:
24 | for data in values['1']:
25 | timestamp = data['2']
26 | timestamp = datetime.fromtimestamp(timestamp/1000000, tz=timezone.utc)
27 | timestamp = convert_utc_human_to_timezone(timestamp, 'UTC')
28 | intendeddest = data['4']['1'].decode()
29 |
30 | data_list.append((timestamp, intendeddest))
31 |
32 | if len(data_list) > 0:
33 | report = ArtifactHtmlReport('Google Initiated Navigation')
34 | report.start_artifact_report(report_folder, f'Google Initiated Navigation')
35 | report.add_script()
36 | data_headers = ('Timestamp', 'Initiated Navigation Destination')
37 | report.write_artifact_data_table(data_headers, data_list, file_found)
38 | report.end_artifact_report()
39 |
40 | tsvname = f'Google Initiated Navigation'
41 | tsv(report_folder, data_headers, data_list, tsvname)
42 |
43 | tlactivity = f'Google Initiated Navigation'
44 | timeline(report_folder, tlactivity, data_list, data_headers)
45 |
46 | else:
47 | logfunc(f'No Google Initiated Navigation available')
48 |
49 | __artifacts__ = {
50 | "googleInitiatedNav": (
51 | "GEO Location",
52 | ('*/com.google.android.apps.maps/files/new_recent_history_cache_navigated.cs','*/new_recent_history_cache_navigated.cs'),
53 | get_googleInitiatedNav)
54 | }
--------------------------------------------------------------------------------
/scripts/artifacts/googlePlaySearches.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_googlePlaySearches(files_found, report_folder, seeker, wrap_text):
8 |
9 | for file_found in files_found:
10 | file_found = str(file_found)
11 | if file_found.endswith('suggestions.db'):
12 | break # Skip all other files
13 |
14 | db = open_sqlite_db_readonly(file_found)
15 | cursor = db.cursor()
16 | cursor.execute('''
17 | SELECT
18 | datetime(date / 1000, "unixepoch"),
19 | display1,
20 | query
21 | from suggestions
22 | ''')
23 |
24 | all_rows = cursor.fetchall()
25 | usageentries = len(all_rows)
26 | if usageentries > 0:
27 | report = ArtifactHtmlReport('Google Play Searches')
28 | report.start_artifact_report(report_folder, 'Google Play Searches')
29 | report.add_script()
30 | data_headers = ('Timestamp','Display','query' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
31 | data_list = []
32 | for row in all_rows:
33 | data_list.append((row[0],row[1],row[2]))
34 |
35 | report.write_artifact_data_table(data_headers, data_list, file_found)
36 | report.end_artifact_report()
37 |
38 | tsvname = f'google play searches'
39 | tsv(report_folder, data_headers, data_list, tsvname)
40 |
41 | tlactivity = f'Google Play Searches'
42 | timeline(report_folder, tlactivity, data_list, data_headers)
43 | else:
44 | logfunc('No Google Play Searches data available')
45 |
46 | db.close()
47 |
48 | __artifacts__ = {
49 | "GooglePlaySearches": (
50 | "Google Play",
51 | ('*/com.android.vending/databases/suggestions.db*'),
52 | get_googlePlaySearches)
53 | }
54 |
--------------------------------------------------------------------------------
/scripts/artifacts/googlemapaudio.py:
--------------------------------------------------------------------------------
1 | from re import fullmatch
2 | from datetime import datetime
3 | from pathlib import Path
4 | from os.path import getsize
5 |
6 |
7 | from scripts.artifact_report import ArtifactHtmlReport
8 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, media_to_html
9 |
10 | def convertGeo(s):
11 | length = len(s)
12 | if length > 6:
13 | return (s[0 : length-6] + "." + s[length-6 : length])
14 | else:
15 | return (s)
16 |
17 | def get_googlemapaudio(files_found, report_folder, seeker, wrap_text):
18 |
19 | files_found = list(filter(lambda x: "sbin" not in x, files_found))
20 |
21 | data_headers = ("Timestamp", "Filename", "Audio", "Size")
22 | audio_info = []
23 | source_dir = ""
24 |
25 | pattern = r"-?\d+_\d+"
26 |
27 | for file_found in files_found:
28 |
29 | name = Path(file_found).name
30 |
31 | match = fullmatch(pattern, name)
32 | file_size = getsize(file_found)
33 | has_data = file_size > 0
34 |
35 | if match and has_data:
36 |
37 | # Timestamp
38 | timestamp = Path(file_found).name.split("_")[1]
39 | timestamp_datetime = datetime.utcfromtimestamp(int(timestamp) / 1000)
40 | timestamp_str = timestamp_datetime.isoformat(timespec="seconds", sep=" ")
41 |
42 | # Audio
43 | audio = media_to_html(name, files_found, report_folder)
44 |
45 | # Size
46 | file_size_kb = f"{round(file_size / 1024, 2)} kb"
47 |
48 | # Artefacts
49 | info = (timestamp_str, name, audio, file_size_kb)
50 | audio_info.append(info)
51 |
52 |
53 | if audio_info:
54 |
55 | source_dir = str(Path(files_found[0]).parent)
56 |
57 | report = ArtifactHtmlReport('Google Maps Voice Guidance')
58 | report.start_artifact_report(report_folder, 'Google Maps Voice Guidance')
59 | report.add_script()
60 |
61 | report.write_artifact_data_table(data_headers, audio_info, source_dir, html_escape=False)
62 | report.end_artifact_report()
63 |
64 | tsvname = f'Google Map Audio'
65 | tsv(report_folder, data_headers, audio_info, tsvname, source_dir)
66 |
67 | else:
68 | logfunc('No Google Audio Locations found')
69 |
70 | __artifacts__ = {
71 | "Googlemapaudio": (
72 | "Google Maps Voice Guidance",
73 | ('*/com.google.android.apps.maps/app_tts-cache/*_*'),
74 | get_googlemapaudio)
75 | }
--------------------------------------------------------------------------------
/scripts/artifacts/googlemapaudioTemp.py:
--------------------------------------------------------------------------------
1 | from re import fullmatch
2 | from datetime import datetime
3 | from pathlib import Path
4 | from os.path import getsize
5 | import os
6 |
7 |
8 | from scripts.artifact_report import ArtifactHtmlReport
9 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, media_to_html
10 |
11 | def get_googlemapaudioTemp(files_found, report_folder, seeker, wrap_text):
12 |
13 | data_list = []
14 |
15 | for file_found in files_found:
16 |
17 | name = Path(file_found).name
18 | modified_time = os.path.getmtime(file_found)
19 | file_size = getsize(file_found)
20 | has_data = file_size > 0
21 |
22 | if os.path.isdir(file_found) is False:
23 |
24 | if has_data :
25 |
26 | # Timestamp
27 | utc_modified_date = datetime.utcfromtimestamp(modified_time)
28 |
29 | # Audio
30 | audio = media_to_html(file_found, files_found, report_folder)
31 |
32 | # Size
33 | file_size_kb = f"{round(file_size / 1024, 2)} kb"
34 |
35 | # Artefacts
36 | data_list.append((utc_modified_date,audio,name,file_size))
37 |
38 |
39 | if len(data_list) > 0:
40 |
41 | source_dir = str(Path(file_found).parent)
42 |
43 | report = ArtifactHtmlReport('Google Maps Temp Voice Guidance')
44 | report.start_artifact_report(report_folder, 'Google Maps Temp Voice Guidance')
45 | report.add_script()
46 | data_headers = ('Timestamp Modified', 'Audio', 'Name', 'File Size')
47 | report.write_artifact_data_table(data_headers, data_list, source_dir, html_escape=False)
48 | report.end_artifact_report()
49 |
50 | tsvname = f'Google Maps Temp Voice Guidance'
51 | tsv(report_folder, data_headers, data_list, tsvname, source_dir)
52 |
53 | else:
54 | logfunc('No Google Maps Temp Voice Guidance found')
55 |
56 | __artifacts__ = {
57 | "GooglemapaudioT": (
58 | "Google Maps Temp Voice Guidance",
59 | ('*/com.google.android.apps.maps/app_tts-temp/**'),
60 | get_googlemapaudioTemp)
61 | }
--------------------------------------------------------------------------------
/scripts/artifacts/imagemngCache.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from os.path import isfile, isdir, join, basename, dirname, getsize, abspath, getmtime
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, media_to_html, timeline
6 |
7 | def get_imagemngCache(files_found, report_folder, seeker, wrap_text):
8 | data_list = []
9 |
10 | for file_found in files_found:
11 | if isdir(file_found):
12 | continue
13 | #else:
14 | filename = basename(file_found)
15 | thumb = media_to_html(filename, files_found, report_folder)
16 | last_modified_date = datetime.datetime.utcfromtimestamp(getmtime(file_found))
17 | data_list.append((last_modified_date, thumb, filename, file_found))
18 |
19 | if len(data_list) > 0:
20 | report = ArtifactHtmlReport('Image Manager Cache')
21 | report.start_artifact_report(report_folder, f'Image Manager Cache')
22 | report.add_script()
23 | data_headers = ('Timestamp Last Modified', 'Media', 'Filename', 'Source File')
24 | report.write_artifact_data_table(data_headers, data_list, 'See paths in report', html_escape=False)
25 | report.end_artifact_report()
26 |
27 | tsvname = f'Image Manager Cache'
28 | tsv(report_folder, data_headers, data_list, tsvname)
29 |
30 | tlactivity = 'Image Manager Cache'
31 | timeline(report_folder, tlactivity, data_list, data_headers)
32 | else:
33 | logfunc(f'No Image Manager Cache files available')
34 |
35 | __artifacts__ = {
36 | "ImagemngCache": (
37 | "Image Manager Cache",
38 | ('*/cache/image_manager_disk_cache/*.*','*/*.cnt'),
39 | get_imagemngCache)
40 | }
--------------------------------------------------------------------------------
/scripts/artifacts/installedappsGass.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 |
3 | from scripts.artifact_report import ArtifactHtmlReport
4 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
5 |
6 | def get_installedappsGass(files_found, report_folder, seeker, wrap_text):
7 |
8 | slash = '\\' if is_platform_windows() else '/'
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 | if file_found.endswith('.db'):
13 |
14 | db = open_sqlite_db_readonly(file_found)
15 | cursor = db.cursor()
16 | cursor.execute('''
17 | SELECT
18 | distinct(package_name),
19 | version_code,
20 | digest_sha256
21 | FROM
22 | app_info
23 | ''')
24 |
25 | if 'user' in file_found:
26 | usernum = file_found.split(slash)
27 | usernum = str(usernum[-4])
28 | else:
29 | usernum = '0'
30 |
31 | all_rows = cursor.fetchall()
32 | usageentries = len(all_rows)
33 | if usageentries > 0:
34 | report = ArtifactHtmlReport('Installed Apps')
35 | report.start_artifact_report(report_folder, f'Installed Apps (GMS) for user {usernum}')
36 | report.add_script()
37 | data_headers = ('Bundle ID','Version Code','SHA-256 Hash') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
38 | data_list = []
39 | for row in all_rows:
40 | data_list.append((row[0],row[1],row[2]))
41 |
42 | report.write_artifact_data_table(data_headers, data_list, file_found)
43 | report.end_artifact_report()
44 |
45 | tsvname = f'installed apps - GMS for user {usernum}'
46 | tsv(report_folder, data_headers, data_list, tsvname)
47 | else:
48 | logfunc('No Installed Apps data available for user {usernum}')
49 |
50 | db.close()
51 |
52 | __artifacts__ = {
53 | "installedappsGass": (
54 | "Installed Apps",
55 | ('*/com.google.android.gms/databases/gass.db*', '*/user/*/com.google.android.gms/databases/gass.db*'),
56 | get_installedappsGass)
57 | }
58 |
--------------------------------------------------------------------------------
/scripts/artifacts/installedappsLibrary.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | from pathlib import Path
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
6 |
7 | def get_installedappsLibrary(files_found, report_folder, seeker, wrap_text):
8 |
9 |
10 | for file_found in files_found:
11 | file_name = str(file_found)
12 | fullpath = Path(file_found)
13 | user = fullpath.parts[-4]
14 | if user == 'data':
15 | user = '0'
16 | if file_found.endswith('library.db'):
17 | db = open_sqlite_db_readonly(file_found)
18 | cursor = db.cursor()
19 | cursor.execute('''
20 | SELECT
21 | case
22 | when purchase_time = 0 THEN ''
23 | when purchase_time > 0 THEN datetime(purchase_time / 1000, "unixepoch")
24 | END as pt,
25 | account,
26 | doc_id
27 | FROM
28 | ownership
29 | ''')
30 |
31 | all_rows = cursor.fetchall()
32 | usageentries = len(all_rows)
33 | if usageentries > 0:
34 | report = ArtifactHtmlReport(f'Installed Apps (Library) for user {user}')
35 | report.start_artifact_report(report_folder, f'Installed Apps (Library) for user {user}')
36 | report.add_script()
37 | data_headers = ('Purchase Time', 'Account', 'Doc ID')
38 | data_list = []
39 | for row in all_rows:
40 | data_list.append((row[0], row[1], row[2]))
41 |
42 | report.write_artifact_data_table(data_headers, data_list, file_found)
43 | report.end_artifact_report()
44 |
45 | tsvname = f'installed apps library for user {user}'
46 | tsv(report_folder, data_headers, data_list, tsvname)
47 |
48 | tlactivity = f'Installed Apps Library for user {user}'
49 | timeline(report_folder, tlactivity, data_list, data_headers)
50 | else:
51 | logfunc(f'No Installed Apps (Library) data available for user {user}')
52 |
53 | db.close()
54 |
55 | __artifacts__ = {
56 | "InstalledappsLibrary": (
57 | "Installed Apps",
58 | ('*/com.android.vending/databases/library.db*'),
59 | get_installedappsLibrary)
60 | }
--------------------------------------------------------------------------------
/scripts/artifacts/last_boot_time.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | from scripts.artifact_report import ArtifactHtmlReport
4 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, logdevinfo
5 |
6 | def get_last_boot_time(files_found, report_folder, seeker, wrap_text):
7 |
8 | for file_found in files_found:
9 | file_found = str(file_found)
10 | if not file_found.endswith('last_boot_time_utc'):
11 | continue # Skip all other files
12 |
13 | data_list = []
14 | file_name = 'last_boot_time_utc'
15 |
16 | modTimesinceEpoc = os.path.getmtime(file_found)
17 |
18 | last_boot_time = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(modTimesinceEpoc))
19 |
20 | logdevinfo(f"Last Boot Timestamp: {last_boot_time}")
21 | data_list.append((last_boot_time, file_name))
22 |
23 | if data_list:
24 | report = ArtifactHtmlReport('Last Boot Time')
25 | report.start_artifact_report(report_folder, 'Last Boot Time')
26 | report.add_script()
27 | data_headers = ('Timestamp', 'File Name')
28 | report.write_artifact_data_table(data_headers, data_list, file_found)
29 | report.end_artifact_report()
30 |
31 | tsvname = f'Last Boot Time'
32 | tsv(report_folder, data_headers, data_list, tsvname)
33 |
34 | tlactivity = f'Last Boot Time'
35 | timeline(report_folder, tlactivity, data_list, data_headers)
36 |
37 | else:
38 | logfunc('No Last Boot Time data available')
39 |
40 | __artifacts__ = {
41 | "last_boot_time": (
42 | "Power Events",
43 | ('*/misc/bootstat/last_boot_time_utc'),
44 | get_last_boot_time)
45 | }
--------------------------------------------------------------------------------
/scripts/artifacts/libretorrent.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 | from datetime import datetime
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, kmlgen
7 |
8 | def get_libretorrent(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_name = str(file_found)
12 | if file_found.endswith('libretorrent.db'):
13 | break # Skip all other files
14 |
15 | db = open_sqlite_db_readonly(file_found)
16 | cursor = db.cursor()
17 | cursor.execute('''
18 | SELECT
19 | id,
20 | name,
21 | downloadPath,
22 | dateAdded,
23 | error,
24 | manuallyPaused,
25 | magnet,
26 | downloadingMetadata,
27 | visibility
28 | FROM Torrent
29 | ''')
30 |
31 | all_rows = cursor.fetchall()
32 | usageentries = len(all_rows)
33 | if usageentries > 0:
34 | report = ArtifactHtmlReport('Libre Torrent - Torrents')
35 | report.start_artifact_report(report_folder, 'Libre Torrent - Torrents')
36 | report.add_script()
37 | data_headers = ('Timestamp','ID','Name','Download Path','Error','Manually Paused','Magnet','Downloading Metadata','Visibility')
38 | data_list = []
39 | for row in all_rows:
40 | timestamp = datetime.utcfromtimestamp(row[3]/1000)
41 | data_list.append((timestamp,row[0],row[1],row[2],row[4],row[5],row[6],row[7],row[8]))
42 |
43 | report.write_artifact_data_table(data_headers, data_list, file_found)
44 | report.end_artifact_report()
45 |
46 | tsvname = f'Libre Torrent - Torrents'
47 | tsv(report_folder, data_headers, data_list, tsvname)
48 |
49 | tlactivity = f'Libre Torrent - Torrents'
50 | timeline(report_folder, tlactivity, data_list, data_headers)
51 |
52 | else:
53 | logfunc('No Libre Torrents data available')
54 |
55 | db.close()
56 |
57 | __artifacts__ = {
58 | "Libretorrent": (
59 | "Libre Torrent",
60 | ('*/data/com.houseoflife.bitlord/databases/libretorrent.db*','*/libretorrent.db*'),
61 | get_libretorrent)
62 | }
63 |
--------------------------------------------------------------------------------
/scripts/artifacts/offlinePages.py:
--------------------------------------------------------------------------------
1 | from datetime import *
2 | import email
3 | import os
4 | import pytz
5 |
6 | from scripts.artifact_report import ArtifactHtmlReport
7 | from scripts.ilapfuncs import timeline, logfunc, tsv, is_platform_windows, media_to_html
8 |
9 | def convert_utc_int_to_timezone(utc_time, time_offset):
10 | #fetch the timezone information
11 | timezone = pytz.timezone(time_offset)
12 |
13 | #convert utc to timezone
14 | timezone_time = utc_time.astimezone(timezone)
15 |
16 | #return the converted value
17 | return timezone_time
18 |
19 | def get_offlinePages(files_found, report_folder, seeker, wrap_text):
20 |
21 | data_list = []
22 |
23 | for file_found in files_found:
24 | file_found = str(file_found)
25 |
26 | modified_time = os.path.getmtime(file_found)
27 | utc_modified_date = datetime.utcfromtimestamp(modified_time, tz=timezone.utc)
28 |
29 | timestamp = convert_utc_int_to_timezone(utc_modified_date, 'UTC')
30 |
31 |
32 | with open(file_found,'r', errors='replace') as fp:
33 | message = email.message_from_file(fp)
34 | sourced = (message['Snapshot-Content-Location'])
35 | subjectd = (message['Subject'])
36 | dated = (message['Date'])
37 | media = media_to_html(file_found, files_found, report_folder)
38 |
39 | data_list.append((timestamp, media, sourced, subjectd, dated, file_found))
40 |
41 | if len(data_list) > 0:
42 | note = 'Source location in extraction found in the report for each item.'
43 | report = ArtifactHtmlReport('Offline Pages')
44 | report.start_artifact_report(report_folder, f'Offline Pages')
45 | report.add_script()
46 | data_headers = ('Timestamp', 'File', 'Web Source', 'Subject', 'MIME Date', 'Source in Extraction')
47 | report.write_artifact_data_table(data_headers, data_list, note, html_no_escape=['File'])
48 | report.end_artifact_report()
49 |
50 | tsvname = f'Offline Pages'
51 | tsv(report_folder, data_headers, data_list, tsvname)
52 |
53 | tlactivity = 'Offline Pages'
54 | timeline(report_folder, tlactivity, data_list, data_headers)
55 |
56 | __artifacts__ = {
57 | "pages": (
58 | "Offline Pages",
59 | ('*/*.mhtml', '*/*.mht'),
60 | get_offlinePages)
61 | }
62 |
--------------------------------------------------------------------------------
/scripts/artifacts/oldpowerOffReset.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | import os
3 | from pathlib import Path
4 |
5 | from scripts.artifact_report import ArtifactHtmlReport
6 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
7 |
8 | def get_oldpowerOffReset(files_found, report_folder, seeker, wrap_text):
9 | data_list = []
10 | for file_found in files_found:
11 |
12 | filename = os.path.basename(file_found)
13 | location = os.path.dirname(file_found)
14 |
15 | with open(file_found, 'r') as f:
16 | for line in f:
17 | if '/' in line and len(line) == 18:
18 | fecha = line.strip()
19 | fecha = (datetime.strptime(fecha,'%y/%m/%d %H:%M:%S'))
20 |
21 | reason = next(f)
22 | reason = reason.split(':')[1].replace('\n','')
23 |
24 | data_list.append((fecha, reason, filename))
25 |
26 | if len(data_list) > 0:
27 | report = ArtifactHtmlReport('Power Off Reset')
28 | report.start_artifact_report(report_folder, f'Power Off Reset')
29 | report.add_script()
30 | data_headers = ('Timestamp', 'Reason', 'Filename')
31 | report.write_artifact_data_table(data_headers, data_list, location)
32 | report.end_artifact_report()
33 |
34 | tsvname = f'Power Off Reset'
35 | tsv(report_folder, data_headers, data_list, tsvname)
36 |
37 | else:
38 | logfunc(f'Power Off Reset file available')
39 |
40 | __artifacts__ = {
41 | "oldpowerOffReset": (
42 | "Power Events",
43 | ('*/log/power_off_reset_reason.txt','*/log/power_off_reset_reason_backup.txt'),
44 | get_oldpowerOffReset)
45 | }
--------------------------------------------------------------------------------
/scripts/artifacts/pSettings.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_pSettings(files_found, report_folder, seeker, wrap_text):
8 |
9 | data_list = []
10 |
11 | for file_found in files_found:
12 | file_found = str(file_found)
13 | if file_found.endswith('googlesettings.db'):
14 | db = open_sqlite_db_readonly(file_found)
15 | cursor = db.cursor()
16 | cursor.execute('''
17 | select
18 | name,
19 | value
20 | from partner
21 | ''')
22 |
23 | all_rows = cursor.fetchall()
24 | usageentries = len(all_rows)
25 | if usageentries > 0:
26 | for row in all_rows:
27 | data_list.append((row[0],row[1],file_found))
28 | db.close()
29 | else:
30 | continue
31 | if data_list:
32 | report = ArtifactHtmlReport('Partner Settings')
33 | report.start_artifact_report(report_folder, 'Partner Settings')
34 | report.add_script()
35 | data_headers = ('Name','Value','Source File') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
36 |
37 | report.write_artifact_data_table(data_headers, data_list, file_found)
38 | report.end_artifact_report()
39 |
40 | tsvname = f'partner settings'
41 | tsv(report_folder, data_headers, data_list, tsvname)
42 | else:
43 | logfunc('No Partner Settings data available')
44 |
45 | __artifacts__ = {
46 | "pSettings": (
47 | "Device Info",
48 | ('*/com.google.android.gsf/databases/googlesettings.db*'),
49 | get_pSettings)
50 | }
51 |
--------------------------------------------------------------------------------
/scripts/artifacts/packageGplinks.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from scripts.artifact_report import ArtifactHtmlReport
4 | from scripts.ilapfuncs import logfunc, tsv, timeline, open_sqlite_db_readonly
5 |
6 | def get_packageGplinks(files_found, report_folder, seeker, wrap_text):
7 | data_list =[]
8 |
9 | for file_found in files_found:
10 | if 'sbin' not in file_found:
11 | file_found = str(file_found)
12 | source_file = file_found.replace(seeker.data_folder, '')
13 |
14 | with open(file_found) as data:
15 | values = data.readlines()
16 |
17 | for x in values:
18 | bundleid = x.split(' ', 1)
19 | url = f'https://play.google.com/store/apps/details?id={bundleid[0]}'
20 | data_list.append((bundleid[0], url))
21 |
22 | usageentries = len(data_list)
23 | if usageentries > 0:
24 | report = ArtifactHtmlReport('Google Play Links for Apps')
25 | report.start_artifact_report(report_folder, 'Google Play Links for Apps')
26 | report.add_script()
27 | data_headers = ('Bundle ID', 'Possible Google Play Store Link')
28 | report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
29 | report.end_artifact_report()
30 |
31 | tsvname = f'Google Play Links for Apps'
32 | tsv(report_folder, data_headers, data_list, tsvname, source_file)
33 |
34 | else:
35 | logfunc('No Google Play Links for Apps data available')
36 |
37 | __artifacts__ = {
38 | "packageGplinks": (
39 | "Installed Apps",
40 | ('*/system/packages.list'),
41 | get_packageGplinks)
42 | }
--------------------------------------------------------------------------------
/scripts/artifacts/pikpakCloudlist.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import os
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 |
8 | def get_pikpakCloudlist(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 |
13 | if file_found.endswith('.db'):
14 | break
15 |
16 | db = open_sqlite_db_readonly(file_found)
17 | cursor = db.cursor()
18 | cursor.execute('''
19 | SELECT
20 | create_time,
21 | modify_time,
22 | delete_time,
23 | datetime(local_update_time/1000, 'unixepoch' ),
24 | user_id,
25 | name,
26 | kind,
27 | url,
28 | thumbnail_link
29 | FROM xpan_files
30 | ''')
31 |
32 | all_rows = cursor.fetchall()
33 | usageentries = len(all_rows)
34 | data_list = []
35 |
36 | if usageentries > 0:
37 | for row in all_rows:
38 | link = f'{row[8]}'
39 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],link))
40 |
41 | description = 'PikPak Cloud List links are clickable!!!!! If connected to the internet and pressed the browser will try to open them in a new tab.'
42 | report = ArtifactHtmlReport('PikPak Cloud List')
43 | report.start_artifact_report(report_folder, 'PikPak Cloud List', description)
44 | report.add_script()
45 | data_headers = ('Create Time', 'Modify Time','Delete Time', 'Local Update Time', 'User ID', 'Name', 'Kind', 'URL','Thumbnail Link')
46 | report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Thumbnail Link'])
47 | report.end_artifact_report()
48 |
49 | tsvname = 'PikPak Cloud List'
50 | tsv(report_folder, data_headers, data_list, tsvname)
51 |
52 | tlactivity = 'PikPak Cloud List'
53 | timeline(report_folder, tlactivity, data_list, data_headers)
54 | else:
55 | logfunc('No PikPak Cloud List data available')
56 |
57 | __artifacts__ = {
58 | "PikPak Cloud List": (
59 | "PikPak",
60 | ('*/com.pikcloud.pikpak/databases/pikpak_files_*.db*'),
61 | get_pikpakCloudlist)
62 | }
--------------------------------------------------------------------------------
/scripts/artifacts/pikpakDownloads.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import os
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 |
8 | def get_pikpakDownloads(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 |
13 | if file_found.endswith('.db'):
14 | break
15 |
16 | db = open_sqlite_db_readonly(file_found)
17 | cursor = db.cursor()
18 | cursor.execute('''
19 | SELECT
20 | datetime(create_time/1000, 'unixepoch'),
21 | datetime(lastmod/1000, 'unixepoch'),
22 | title,
23 | _data,
24 | uri
25 | from xl_downloads
26 | ''')
27 |
28 | all_rows = cursor.fetchall()
29 | usageentries = len(all_rows)
30 | data_list = []
31 |
32 | if usageentries > 0:
33 | for row in all_rows:
34 | data_list.append((row[0],row[1],row[2],row[3],row[4]))
35 |
36 | description = 'PikPak Downloads'
37 | report = ArtifactHtmlReport('PikPak Downloads')
38 | report.start_artifact_report(report_folder, 'PikPak Downloads', description)
39 | report.add_script()
40 | data_headers = ('Create Time', 'Modify Time','Title', 'Local Storage', 'URL')
41 | report.write_artifact_data_table(data_headers, data_list, file_found)
42 | report.end_artifact_report()
43 |
44 | tsvname = 'PikPak Downloads'
45 | tsv(report_folder, data_headers, data_list, tsvname)
46 |
47 | tlactivity = 'PikPak Downloads'
48 | timeline(report_folder, tlactivity, data_list, data_headers)
49 | else:
50 | logfunc('No PikPak Downloads data available')
51 |
52 | __artifacts__ = {
53 | "PikPak Downloads": (
54 | "PikPak",
55 | ('*/com.pikcloud.pikpak/databases/pikpak_downloads.db*'),
56 | get_pikpakDownloads)
57 | }
--------------------------------------------------------------------------------
/scripts/artifacts/pikpakPlay.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import os
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 |
8 | def get_pikpakPlay(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 |
13 | if file_found.endswith('greendao.db'):
14 | break
15 |
16 | db = open_sqlite_db_readonly(file_found)
17 | cursor = db.cursor()
18 | cursor.execute('''
19 | SELECT
20 | datetime(last_play_timestamp/1000, 'unixepoch'),
21 | duration,
22 | played_time,
23 | max_played_time,
24 | name
25 | from VIDEO_PLAY_RECORD
26 | ''')
27 |
28 | all_rows = cursor.fetchall()
29 | usageentries = len(all_rows)
30 | data_list = []
31 |
32 | if usageentries > 0:
33 | for row in all_rows:
34 | data_list.append((row[0],row[1],row[2],row[3],row[4]))
35 |
36 | description = 'PikPak Play'
37 | report = ArtifactHtmlReport('PikPak Play')
38 | report.start_artifact_report(report_folder, 'PikPak Play', description)
39 | report.add_script()
40 | data_headers = ('Last Play Timestamp', 'Duration','Played Time', 'Max Played Time', 'Name')
41 | report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Thumbnail Link'])
42 | report.end_artifact_report()
43 |
44 | tsvname = 'PikPak Play'
45 | tsv(report_folder, data_headers, data_list, tsvname)
46 |
47 | tlactivity = 'PikPak Play'
48 | timeline(report_folder, tlactivity, data_list, data_headers)
49 | else:
50 | logfunc('No PikPak Play data available')
51 |
52 | __artifacts__ = {
53 | "PikPak Play": (
54 | "PikPak",
55 | ('*/com.pikcloud.pikpak/databases/greendao.db*'),
56 | get_pikpakPlay)
57 | }
--------------------------------------------------------------------------------
/scripts/artifacts/powerOffReset.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
6 |
7 | def get_powerOffReset(files_found, report_folder, seeker, wrap_text):
8 |
9 | data_list = []
10 | pattern = 'REASON:'
11 |
12 | for file_found in files_found:
13 | file_found = str(file_found)
14 |
15 | with open(file_found, "r") as f:
16 | data = f.readlines()
17 | for line in data:
18 | if pattern in line:
19 | entry = [x.strip() for x in line.split("|")]
20 |
21 | time_split = entry[0].split()
22 |
23 | timestamp = time_split[1]+' '+time_split[2]
24 |
25 | timezone_split = []
26 |
27 | for index in range(0, len(timestamp), 19):
28 | timezone_split.append(timestamp[index : index + 19])
29 |
30 | timestamp1 = timezone_split[0]
31 | timezone = timezone_split[1]
32 |
33 | action = entry[1]
34 | reason_split = entry[3].split(": ")
35 | reason = reason_split[1]
36 |
37 | data_list.append((timestamp1,timezone,action,reason))
38 | else:
39 | continue
40 |
41 | num_entries = len(data_list)
42 | if num_entries > 0:
43 | report = ArtifactHtmlReport('Power Off Reset')
44 | report.start_artifact_report(report_folder, 'Power Off Reset')
45 | report.add_script()
46 | data_headers = ('Timestamp (Local)','Timezone Offset','Action','Reason')
47 |
48 | report.write_artifact_data_table(data_headers, data_list, file_found)
49 | report.end_artifact_report()
50 |
51 | tsvname = f'Power Off Reset'
52 | tsv(report_folder, data_headers, data_list, tsvname)
53 |
54 | tlactivity = f'Power Off Reset'
55 | timeline(report_folder, tlactivity, data_list, data_headers)
56 |
57 | else:
58 | logfunc('No Power Off Reset data available')
59 |
60 | __artifacts__ = {
61 | "powerOffReset": (
62 | "Power Events",
63 | ('*/log/power_off_reset_reason.txt','*/log/power_off_reset_reason_backup.txt'),
64 | get_powerOffReset)
65 | }
--------------------------------------------------------------------------------
/scripts/artifacts/rarlabPreferences.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | import json
4 |
5 | import xml.etree.ElementTree as ET
6 | from scripts.artifact_report import ArtifactHtmlReport
7 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, abxread, checkabx, logdevinfo
8 |
9 | def get_rarlabPreferences(files_found, report_folder, seeker, wrap_text):
10 | data_list = []
11 |
12 | for file_found in files_found:
13 | file_found = str(file_found)
14 | if file_found.endswith('com.rarlab.rar_preferences.xml'):
15 |
16 | #check if file is abx
17 | if (checkabx(file_found)):
18 | multi_root = False
19 | tree = abxread(file_found, multi_root)
20 | else:
21 | tree = ET.parse(file_found)
22 | root = tree.getroot()
23 |
24 | for elem in root.iter():
25 | name = elem.attrib.get('name')
26 | value = elem.attrib.get('value')
27 | text = elem.text
28 | if name is not None:
29 | if name == 'ArcHistory' or name == 'ExtrPathHistory' :
30 | items = json.loads(text)
31 | agg = ''
32 | for x in items:
33 | agg = agg + f'{x}
'
34 | data_list.append((name,agg,value))
35 | else:
36 | data_list.append((name,text,value))
37 |
38 |
39 | if data_list:
40 | report = ArtifactHtmlReport(f'RAR Lab Preferences')
41 | report.start_artifact_report(report_folder, f'RAR Lab Preferences')
42 | report.add_script()
43 | data_headers = ('Key','Text','Value')
44 | report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Text'])
45 | report.end_artifact_report()
46 |
47 | tsvname = f'RAR Lab Preferences'
48 | tsv(report_folder, data_headers, data_list, tsvname)
49 |
50 | else:
51 | logfunc(f'No RAR Lab Preferences data available')
52 |
53 | __artifacts__ = {
54 | "rarlabPreferences": (
55 | "RAR Lab Prefs",
56 | ('*/com.rarlab.rar_preferences.xml'),
57 | get_rarlabPreferences)
58 | }
--------------------------------------------------------------------------------
/scripts/artifacts/roles.py:
--------------------------------------------------------------------------------
1 | import xml.etree.ElementTree as ET
2 |
3 | from scripts.artifact_report import ArtifactHtmlReport
4 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
5 |
6 | def get_roles(files_found, report_folder, seeker, wrap_text):
7 |
8 | run = 0
9 | slash = '\\' if is_platform_windows() else '/'
10 |
11 | for file_found in files_found:
12 | file_found = str(file_found)
13 |
14 | data_list = []
15 | run = run + 1
16 | err = 0
17 |
18 |
19 | parts = file_found.split(slash)
20 | if 'mirror' in parts:
21 | user = 'mirror'
22 | elif 'users' in parts:
23 | user = parts[-2]
24 | ver = 'Android 10'
25 | elif 'misc_de' in parts:
26 | user = parts[-4]
27 | ver = 'Android 11'
28 |
29 | if user == 'mirror':
30 | continue
31 | else:
32 | try:
33 | ET.parse(file_found)
34 | except ET.ParseError:
35 | print('Parse error - Non XML file.') #change to logfunc
36 | err = 1
37 |
38 | if err == 0:
39 | tree = ET.parse(file_found)
40 | root = tree.getroot()
41 |
42 | for elem in root:
43 | holder = ''
44 | role = elem.attrib['name']
45 | for subelem in elem:
46 | holder = subelem.attrib['name']
47 |
48 | data_list.append((role, holder))
49 |
50 | if len(data_list) > 0:
51 | report = ArtifactHtmlReport('App Roles')
52 | report.start_artifact_report(report_folder, f'{ver} Roles_{user}')
53 | report.add_script()
54 | data_headers = ('Role', 'Holder')
55 | report.write_artifact_data_table(data_headers, data_list, file_found)
56 | report.end_artifact_report()
57 |
58 | tsvname = f'App Roles_{user}'
59 | tsv(report_folder, data_headers, data_list, tsvname)
60 |
61 | __artifacts__ = {
62 | "roles": (
63 | "App Roles",
64 | ('*/system/users/*/roles.xml','*/misc_de/*/apexdata/com.android.permission/roles.xml'),
65 | get_roles)
66 | }
67 |
--------------------------------------------------------------------------------
/scripts/artifacts/samsungSmartThings.py:
--------------------------------------------------------------------------------
1 | # Samsung SmartThings
2 | # Author: Kevin Pagano (@KevinPagno3)
3 | # Date: 2022-06-13
4 | # Artifact version: 0.0.1
5 | # Requirements: none
6 |
7 | import sqlite3
8 | import textwrap
9 | import scripts.artifacts.artGlobals
10 |
11 | from scripts.artifact_report import ArtifactHtmlReport
12 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
13 |
14 | def get_samsungSmartThings(files_found, report_folder, seeker, wrap_text):
15 |
16 | for file_found in files_found:
17 | file_found = str(file_found)
18 | if not file_found.endswith('QcDb.db'):
19 | continue # Skip all other files
20 |
21 | db = open_sqlite_db_readonly(file_found)
22 | cursor = db.cursor()
23 | cursor.execute('''
24 | select
25 | datetime(timeStamp/1000,'unixepoch'),
26 | deviceName,
27 | deviceType,
28 | netType,
29 | wifiP2pMac,
30 | btMac,
31 | bleMac
32 | from devices
33 | ''')
34 |
35 | all_rows = cursor.fetchall()
36 | usageentries = len(all_rows)
37 | if usageentries > 0:
38 | report = ArtifactHtmlReport('Samsung SmartThings - Quick Connect')
39 | report.start_artifact_report(report_folder, 'Samsung SmartThings - Quick Connect')
40 | report.add_script()
41 | data_headers = ('Connection Timestamp','Device Name','Device Type','Net Type','Wifi P2P MAC','Bluetooth MAC','Bluetooth (LE) MAC')
42 | data_list = []
43 | for row in all_rows:
44 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6]))
45 |
46 | report.write_artifact_data_table(data_headers, data_list, file_found)
47 | report.end_artifact_report()
48 |
49 | tsvname = f'Samsung SmartThings - Quick Connect'
50 | tsv(report_folder, data_headers, data_list, tsvname)
51 |
52 | tlactivity = f'Samsung SmartThings - Quick Connect'
53 | timeline(report_folder, tlactivity, data_list, data_headers)
54 | else:
55 | logfunc('No Samsung SmartThings - Quick Connect data available')
56 |
57 | db.close()
58 |
59 | __artifacts__ = {
60 | "samsungSmartThings": (
61 | "Samsung SmartThings",
62 | ('*/com.samsung.android.oneconnect/databases/QcDB.db*'),
63 | get_samsungSmartThings)
64 | }
65 |
--------------------------------------------------------------------------------
/scripts/artifacts/scontextLog.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_scontextLog(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | SELECT
14 | CASE WHEN starttime>0 THEN datetime(starttime /1000, 'UNIXEPOCH')
15 | ELSE ""
16 | END as date2,
17 | CASE WHEN stoptime>0 THEN datetime(stoptime /1000, 'UNIXEPOCH')
18 | ELSE ""
19 | END as date3,
20 | time_zone,
21 | app_id,
22 | app_sub_id,
23 | duration,
24 | duration/1000 as duraton_in_secs
25 | from use_app
26 | ''')
27 |
28 | all_rows = cursor.fetchall()
29 | usageentries = len(all_rows)
30 | if usageentries > 0:
31 | report = ArtifactHtmlReport('Samsung Context Log')
32 | report.start_artifact_report(report_folder, 'Samsung Context Log')
33 | report.add_script()
34 | data_headers = ('Start Time', 'Stop Time','Timezone', 'App ID', 'APP Sub ID', 'Duration', 'Duration in Secs')
35 | data_list = []
36 | for row in all_rows:
37 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6]))
38 |
39 | report.write_artifact_data_table(data_headers, data_list, file_found)
40 | report.end_artifact_report()
41 |
42 | tsvname = f'samsung contextlog'
43 | tsv(report_folder, data_headers, data_list, tsvname)
44 |
45 | tlactivity = f'Samsung Context Log'
46 | timeline(report_folder, tlactivity, data_list, data_headers)
47 | else:
48 | logfunc('No Samsung Context Log data available')
49 |
50 | db.close()
51 |
52 | __artifacts__ = {
53 | "scontextLog": (
54 | "App Interaction",
55 | ('*/com.samsung.android.providers.context/databases/ContextLog.db'),
56 | get_scontextLog)
57 | }
--------------------------------------------------------------------------------
/scripts/artifacts/setupWizardinfo.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | import xml.etree.ElementTree as ET
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
6 |
7 | def get_setupWizardinfo(files_found, report_folder, seeker, wrap_text):
8 |
9 | for file_found in files_found:
10 | file_found = str(file_found)
11 | if not file_found.endswith('setup_wizard_info.xml'):
12 | continue # Skip all other files
13 |
14 | data_list = []
15 | tree = ET.parse(file_found)
16 | root = tree.getroot()
17 |
18 | for elem in root:
19 | item = elem.attrib
20 | if item['name'] == 'suw_finished_time_ms':
21 | timestamp = (datetime.datetime.utcfromtimestamp(int(item['value'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
22 | data_list.append((timestamp, item['name']))
23 |
24 | if data_list:
25 | report = ArtifactHtmlReport('Setup_Wizard_Info.xml')
26 | report.start_artifact_report(report_folder, 'Setup_Wizard_Info.xml')
27 | report.add_script()
28 | data_headers = ('Timestamp','Name')
29 | report.write_artifact_data_table(data_headers, data_list, file_found)
30 | report.end_artifact_report()
31 |
32 | tsvname = f'Setup_Wizard_Info XML data'
33 | tsv(report_folder, data_headers, data_list, tsvname)
34 |
35 | tlactivity = f'Setup_Wizard_Info XML data'
36 | timeline(report_folder, tlactivity, data_list, data_headers)
37 | else:
38 | logfunc('No Setup_Wizard_Info XML data available')
39 |
40 | __artifacts__ = {
41 | "setupWizardinfo": (
42 | "Wipe & Setup",
43 | ('*/com.google.android.settings.intelligence/shared_prefs/setup_wizard_info.xml'),
44 | get_setupWizardinfo)
45 | }
--------------------------------------------------------------------------------
/scripts/artifacts/shutdown_checkpoints.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
6 |
7 | def get_shutdown_checkpoints(files_found, report_folder, seeker, wrap_text):
8 |
9 | data_list = []
10 | pattern = 'Shutdown request from '
11 | pattern2 = 'epoch='
12 |
13 | for file_found in files_found:
14 | file_found = str(file_found)
15 |
16 | with open(file_found, "r") as f:
17 | data = f.readlines()
18 | for line in data:
19 | if pattern in line:
20 |
21 | line = line.replace('\n','')
22 | entry = line.split(" ")
23 | request = entry[3]
24 |
25 | entry_epoch = line.split("epoch=")
26 | epoch = int(entry_epoch[1].replace(')',''))
27 | shutdown_timestamp = datetime.datetime.utcfromtimestamp(epoch/1000).strftime('%Y-%m-%d %H:%M:%S')
28 |
29 | data_list.append((shutdown_timestamp, request, line, file_found))
30 |
31 | else:
32 | continue
33 |
34 | num_entries = len(data_list)
35 | if num_entries > 0:
36 | report = ArtifactHtmlReport('Shutdown Checkpoints')
37 | report.start_artifact_report(report_folder, 'Shutdown Checkpoints')
38 | report.add_script()
39 | data_headers = ('Timestamp','Requestor','Entry','Source File')
40 |
41 | report.write_artifact_data_table(data_headers, data_list, file_found)
42 | report.end_artifact_report()
43 |
44 | tsvname = f'Shutdown Checkpoints'
45 | tsv(report_folder, data_headers, data_list, tsvname)
46 |
47 | tlactivity = f'Shutdown Checkpoints'
48 | timeline(report_folder, tlactivity, data_list, data_headers)
49 |
50 | else:
51 | logfunc('No Shutdown Checkpoints data available')
52 |
53 | __artifacts__ = {
54 | "shutdown_checkpoints": (
55 | "Power Events",
56 | ('*/system/shutdown-checkpoints/checkpoints-*'),
57 | get_shutdown_checkpoints)
58 | }
--------------------------------------------------------------------------------
/scripts/artifacts/smanagerCrash.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_smanagerCrash(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | SELECT
14 | datetime(crash_time / 1000, "unixepoch"),
15 | package_name
16 | from crash_info
17 | ''')
18 |
19 | all_rows = cursor.fetchall()
20 | usageentries = len(all_rows)
21 | if usageentries > 0:
22 | report = ArtifactHtmlReport('Samsung Smart Manager - Crash')
23 | report.start_artifact_report(report_folder, 'Samsung Smart Manager - Crash')
24 | report.add_script()
25 | data_headers = ('Timestamp','Package Name')
26 | data_list = []
27 | for row in all_rows:
28 | data_list.append((row[0],row[1]))
29 |
30 | report.write_artifact_data_table(data_headers, data_list, file_found)
31 | report.end_artifact_report()
32 |
33 | tsvname = f'samsung smart manager - crash'
34 | tsv(report_folder, data_headers, data_list, tsvname)
35 |
36 | tlactivity = f'Samsung Smart Manager - Crash'
37 | timeline(report_folder, tlactivity, data_list, data_headers)
38 | else:
39 | logfunc('No Samsung Smart Manager - Crash data available')
40 |
41 | db.close()
42 |
43 | __artifacts__ = {
44 | "smanagerCrash": (
45 | "App Interaction",
46 | ('*/com.samsung.android.sm/databases/sm.db'),
47 | get_smanagerCrash)
48 | }
--------------------------------------------------------------------------------
/scripts/artifacts/smanagerLow.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_smanagerLow(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | SELECT
14 | datetime(start_time /1000, "unixepoch"),
15 | datetime(end_time /1000, "unixepoch"),
16 | id,
17 | package_name,
18 | uploaded,
19 | datetime(created_at /1000, "unixepoch"),
20 | datetime(modified_at /1000, "unixepoch")
21 | from usage_log
22 | ''')
23 |
24 | all_rows = cursor.fetchall()
25 | usageentries = len(all_rows)
26 | if usageentries > 0:
27 | report = ArtifactHtmlReport('Samsung Smart Manager - Usage')
28 | report.start_artifact_report(report_folder, 'Samsung Smart Manager - Usage')
29 | report.add_script()
30 | data_headers = ('Start Time','End Time','ID','Package Name', 'Uploaded?', 'Created', 'Modified' )
31 | data_list = []
32 | for row in all_rows:
33 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6]))
34 |
35 | report.write_artifact_data_table(data_headers, data_list, file_found)
36 | report.end_artifact_report()
37 |
38 | tsvname = f'samsung smart manager - usage'
39 | tsv(report_folder, data_headers, data_list, tsvname)
40 |
41 | tlactivity = f'Samsung Smart Manager - Usage'
42 | timeline(report_folder, tlactivity, data_list, data_headers)
43 | else:
44 | logfunc('No Samsung Smart Manager - Usage data available')
45 |
46 | db.close()
47 |
48 | __artifacts__ = {
49 | "smanagerLow": (
50 | "App Interaction",
51 | ('*/com.samsung.android.sm/databases/lowpowercontext-system-db'),
52 | get_smanagerLow)
53 | }
--------------------------------------------------------------------------------
/scripts/artifacts/smembersAppInv.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_smembersAppInv(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | select
14 | datetime(last_used / 1000, "unixepoch"),
15 | display_name,
16 | package_name,
17 | system_app,
18 | confidence_hash,
19 | sha1,
20 | classification
21 | from android_app
22 | ''')
23 |
24 | all_rows = cursor.fetchall()
25 | usageentries = len(all_rows)
26 | if usageentries > 0:
27 | report = ArtifactHtmlReport('Samsung Members - Apps')
28 | report.start_artifact_report(report_folder, 'Samsung Members - Apps')
29 | report.add_script()
30 | data_headers = ('Timestamp','Display Name','Package Name','System App?','Confidence Hash','SHA1','Classification' )
31 | data_list = []
32 | for row in all_rows:
33 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6]))
34 |
35 | report.write_artifact_data_table(data_headers, data_list, file_found)
36 | report.end_artifact_report()
37 |
38 | tsvname = f'samsung members - apps'
39 | tsv(report_folder, data_headers, data_list, tsvname)
40 |
41 | tlactivity = f'Samsung Members - Apps'
42 | timeline(report_folder, tlactivity, data_list, data_headers)
43 | else:
44 | logfunc('No Samsung Members - Apps data available')
45 |
46 | db.close()
47 |
48 | __artifacts__ = {
49 | "smembersAppInv": (
50 | "App Interaction",
51 | ('*/com.samsung.oh/databases/com_pocketgeek_sdk_app_inventory.db'),
52 | get_smembersAppInv)
53 | }
--------------------------------------------------------------------------------
/scripts/artifacts/smembersEvents.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_smembersEvents(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | select
14 | datetime(created_at /1000, "unixepoch"),
15 | type,
16 | value,
17 | in_snapshot
18 | FROM device_events
19 | ''')
20 |
21 | all_rows = cursor.fetchall()
22 | usageentries = len(all_rows)
23 | if usageentries > 0:
24 | report = ArtifactHtmlReport('Samsung Members - Events')
25 | report.start_artifact_report(report_folder, 'Samsung Members - Events')
26 | report.add_script()
27 | data_headers = ('Created At','Type','Value','Snapshot?' )
28 | data_list = []
29 | for row in all_rows:
30 | data_list.append((row[0],row[1],row[2],row[3]))
31 |
32 | report.write_artifact_data_table(data_headers, data_list, file_found)
33 | report.end_artifact_report()
34 |
35 | tsvname = f'samsung members - events'
36 | tsv(report_folder, data_headers, data_list, tsvname)
37 |
38 | tlactivity = f'Samsung Members - Events'
39 | timeline(report_folder, tlactivity, data_list, data_headers)
40 | else:
41 | logfunc('No Samsung Members - Events data available')
42 |
43 | db.close()
44 |
45 | __artifacts__ = {
46 | "smembersEvents": (
47 | "App Interaction",
48 | ('*/com.samsung.oh/databases/com_pocketgeek_sdk.db'),
49 | get_smembersEvents)
50 | }
--------------------------------------------------------------------------------
/scripts/artifacts/suggestions.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | import xml.etree.ElementTree as ET
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows
6 |
7 | def get_suggestions(files_found, report_folder, seeker, wrap_text):
8 |
9 | for file_found in files_found:
10 | file_found = str(file_found)
11 | if not file_found.endswith('suggestions.xml'):
12 | continue # Skip all other files
13 |
14 | data_list = []
15 | tree = ET.parse(file_found)
16 | root = tree.getroot()
17 |
18 | for elem in root:
19 | item = elem.attrib
20 | if item['name'] == 'com.android.settings.suggested.category.DEFERRED_SETUP_setup_time':
21 | timestamp = (datetime.datetime.utcfromtimestamp(int(item['value'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
22 | data_list.append((timestamp, item['name']))
23 | if item['name'] == 'com.android.settings/com.android.settings.biometrics.fingerprint.FingerprintEnrollSuggestionActivity_setup_time':
24 | timestamp = (datetime.datetime.utcfromtimestamp(int(item['value'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
25 | data_list.append((timestamp, item['name']))
26 | if item['name'] == 'com.google.android.setupwizard/com.google.android.setupwizard.deferred.DeferredSettingsSuggestionActivity_setup_time':
27 | timestamp = (datetime.datetime.utcfromtimestamp(int(item['value'])/1000).strftime('%Y-%m-%d %H:%M:%S'))
28 | data_list.append((timestamp, item['name']))
29 |
30 | if data_list:
31 | report = ArtifactHtmlReport('Suggestions.xml')
32 | report.start_artifact_report(report_folder, 'Suggestions.xml')
33 | report.add_script()
34 | data_headers = ('Timestamp','Name')
35 | report.write_artifact_data_table(data_headers, data_list, file_found)
36 | report.end_artifact_report()
37 |
38 | tsvname = f'Suggestions XML data'
39 | tsv(report_folder, data_headers, data_list, tsvname)
40 |
41 | tlactivity = f'Suggestions XML data'
42 | timeline(report_folder, tlactivity, data_list, data_headers)
43 | else:
44 | logfunc('No Suggestions XML data available')
45 |
46 | __artifacts__ = {
47 | "suggestions": (
48 | "Wipe & Setup",
49 | ('*/com.google.android.settings.intelligence/shared_prefs/suggestions.xml'),
50 | get_suggestions)
51 | }
--------------------------------------------------------------------------------
/scripts/artifacts/torThumbs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import datetime
3 | from pathlib import Path
4 | from PIL import Image
5 |
6 | from scripts.artifact_report import ArtifactHtmlReport
7 | from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly, is_platform_windows, media_to_html
8 |
9 |
10 | def get_torThumbs(files_found, report_folder, seeker, wrap_text):
11 | data_list = []
12 | for file_found in files_found:
13 | file_found = str(file_found)
14 |
15 | data_file_real_path = file_found
16 | modifiedtime = os.path.getmtime(file_found)
17 | modifiedtime = (datetime.datetime.utcfromtimestamp(int(modifiedtime)).strftime('%Y-%m-%d %H:%M:%S'))
18 |
19 | filename = os.path.basename(file_found)
20 | location = os.path.dirname(file_found)
21 | newfilename = filename + '.png'
22 | savepath = os.path.join(report_folder, newfilename)
23 |
24 | img = Image.open(file_found)
25 | img.save(savepath,'png')
26 |
27 | medialist = (savepath,)
28 | thumb = media_to_html(savepath, medialist, report_folder)
29 | #thumb = f'
'
30 |
31 | platform = is_platform_windows()
32 | if platform:
33 | thumb = thumb.replace('?', '')
34 |
35 | data_list.append((modifiedtime, thumb, filename, location))
36 |
37 | path_to_files = os.path.dirname(filename)
38 |
39 | if data_list:
40 | description = 'TOR Thumbnails'
41 | report = ArtifactHtmlReport('TOR Thumbnails')
42 | report.start_artifact_report(report_folder, 'TOR Thumbnails', description)
43 | report.add_script()
44 | data_headers = ('Modified Time','Thumbnail','Filename','Location' )
45 | report.write_artifact_data_table(data_headers, data_list, path_to_files, html_escape=False)
46 | report.end_artifact_report()
47 |
48 | tsvname = 'TOR Thumbnails'
49 | tsv(report_folder, data_headers, data_list, tsvname)
50 |
51 | tlactivity = f'TOR Thumbnails'
52 | timeline(report_folder, tlactivity, data_list, data_headers)
53 | else:
54 | logfunc('No TOR Thumbnails data available')
55 |
56 | __artifacts__ = {
57 | "torThumbs": (
58 | "TOR",
59 | ('*/org.torproject.torbrowser/cache/mozac_browser_thumbnails/thumbnails/*.0'),
60 | get_torThumbs)
61 | }
--------------------------------------------------------------------------------
/scripts/artifacts/torrentResumeinfo.py:
--------------------------------------------------------------------------------
1 | import bencoding
2 | import hashlib
3 | import datetime
4 | import textwrap
5 |
6 | from scripts.artifact_report import ArtifactHtmlReport
7 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
8 |
9 | def timestampcalc(timevalue):
10 | timestamp = (datetime.datetime.utcfromtimestamp(int(timevalue)).strftime('%Y-%m-%d %H:%M:%S'))
11 | return timestamp
12 |
13 | def get_torrentResumeinfo(files_found, report_folder, seeker, wrap_text):
14 |
15 | data_list = []
16 | for file_found in files_found:
17 | file_found = str(file_found)
18 |
19 | with open(file_found, 'rb') as f:
20 | decodedDict = bencoding.bdecode(f.read())
21 |
22 | aggregate = ''
23 | try:
24 | infoh= hashlib.sha1(bencoding.bencode(decodedDict[b"info"])).hexdigest()
25 | infohash = infoh
26 | except:
27 | infohash = ''
28 |
29 | for key, value in decodedDict.items():
30 | if key.decode() == 'info':
31 | for x, y in value.items():
32 | if x == b'pieces':
33 | pass
34 | else:
35 | aggregate = aggregate + f'{x.decode()}: {y}
'
36 | elif key.decode() == 'pieces':
37 | pass
38 | elif key.decode() == 'creation date':
39 | aggregate = aggregate + f'{key.decode()}: {timestampcalc(value)}
'
40 | else:
41 | aggregate = aggregate + f'{key.decode()}: {value}
' #add if value is binary decode
42 |
43 | data_list.append((textwrap.fill(file_found, width=25),infohash,aggregate.strip()))
44 |
45 | # Reporting
46 | title = "Torrent Resume Info"
47 | report = ArtifactHtmlReport(title)
48 | report.start_artifact_report(report_folder, title)
49 | report.add_script()
50 | data_headers = ('File', 'InfoHash', 'Data')
51 | report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Data'])
52 | report.end_artifact_report()
53 |
54 | tsv(report_folder, data_headers, data_list, title)
55 |
56 | __artifacts__ = {
57 | "torrentResumeinfo": (
58 | "BitTorrent",
59 | ('*/*.resume'),
60 | get_torrentResumeinfo)
61 | }
--------------------------------------------------------------------------------
/scripts/artifacts/ulrUserprefs.py:
--------------------------------------------------------------------------------
1 | __artifacts_v2__ = {
2 | "urlUserprefs": {
3 | "name": "ULR User Prefs",
4 | "description": "ULR User Prefs",
5 | "author": "Alexis 'Brigs' Brignoni",
6 | "version": "1",
7 | "date": "2024/06/21",
8 | "requirements": "",
9 | "category": "App Semantic Locations",
10 | "notes": "Thanks to Josh Hickman for the research",
11 | "paths": (
12 | '*/com.google.android.gms/shared_prefs/ULR_USER_PREFS.xml'
13 | ),
14 | "function": "get_urluser"
15 | }
16 | }
17 | import json
18 | import xml.etree.ElementTree as ET
19 | from scripts.artifact_report import ArtifactHtmlReport
20 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, timeline, kmlgen
21 |
22 | def get_urluser(files_found, report_folder, seeker, wrap_text):
23 |
24 | data_list = []
25 |
26 | for file_found in files_found:
27 | file_found = str(file_found)
28 | if file_found.endswith('ULR_USER_PREFS.xml'):
29 |
30 | data_list = []
31 | tree = ET.parse(file_found)
32 | root = tree.getroot()
33 | #print('Processed: '+filename)
34 |
35 | for child in root:
36 | jsondata = (child.attrib)
37 | name = (jsondata['name'])
38 | value = (jsondata.get('value',''))
39 | data_list.append((name,value))
40 |
41 |
42 |
43 | if len(data_list) > 0:
44 | description = ''
45 | report = ArtifactHtmlReport('ULR User Preferences')
46 | report.start_artifact_report(report_folder, 'ULR User Preferences', description)
47 | report.add_script()
48 | data_headers = ('Name','Value')
49 | report.write_artifact_data_table(data_headers, data_list, file_found)
50 | report.end_artifact_report()
51 |
52 | tsvname = 'ULR User Preferences'
53 | tsv(report_folder, data_headers, data_list, tsvname)
54 |
55 | tlactivity = 'ULR User Preferences'
56 | timeline(report_folder, tlactivity, data_list, data_headers)
57 |
58 | else:
59 | logfunc('No ULR User Preferences Data available')
60 |
61 |
62 |
--------------------------------------------------------------------------------
/scripts/artifacts/usageHistory.py:
--------------------------------------------------------------------------------
1 | import xml.etree.ElementTree as ET
2 | import datetime
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 |
8 | def get_usageHistory(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 |
13 | if file_found.endswith('usage-history.xml'):
14 | break
15 |
16 | data_list = []
17 |
18 | tree = ET.parse(file_found)
19 | root = tree.getroot()
20 |
21 | for elem in root:
22 | for subelem in elem:
23 | pkg = elem.attrib['name']
24 | subitem = subelem.attrib['name']
25 | time = subelem.attrib['lrt']
26 | if time != ' ':
27 | time = int(time)
28 | time = datetime.datetime.utcfromtimestamp(time/1000)
29 | data_list.append((time, pkg, subitem))
30 |
31 | if len(data_list) > 0:
32 |
33 | description = 'Usage History'
34 | report = ArtifactHtmlReport('Usage History')
35 | report.start_artifact_report(report_folder, 'Usage History', description)
36 | report.add_script()
37 | data_headers = ('Timestamp', 'Package', 'Subitem', )
38 | report.write_artifact_data_table(data_headers, data_list, file_found)
39 | report.end_artifact_report()
40 |
41 | tsvname = 'Usage History'
42 | tsv(report_folder, data_headers, data_list, tsvname)
43 |
44 | tlactivity = 'Usage History'
45 | timeline(report_folder, tlactivity, data_list, data_headers)
46 | else:
47 | logfunc('Usage History data available')
48 |
49 | __artifacts__ = {
50 | "Usagehistory": (
51 | "App Interaction",
52 | ('*/usage-history.xml'),
53 | get_usageHistory)
54 | }
--------------------------------------------------------------------------------
/scripts/artifacts/usagestatsVersion.py:
--------------------------------------------------------------------------------
1 | __artifacts_v2__ = {
2 | "usagestatsVersion": {
3 | "name": "OS Version",
4 | "description": "Extracts OS Version from Usagestats",
5 | "author": "@AlexisBrignoni",
6 | "creation_date": "2021-04-15",
7 | "last_update_date": "2025-03-07",
8 | "requirements": "none",
9 | "category": "Device Information",
10 | "notes": "",
11 | "paths": ('*/system/usagestats/*/version', '*/system_ce/*/usagestats/version'),
12 | "output_types": ["html", "tsv", "lava"],
13 | "artifact_icon": "bar-chart-2"
14 | }
15 | }
16 |
17 |
18 | import scripts.artifacts.artGlobals
19 | from scripts.ilapfuncs import artifact_processor, \
20 | get_file_path, get_txt_file_content, \
21 | logfunc, device_info
22 |
23 |
24 | @artifact_processor
25 | def usagestatsVersion(files_found, report_folder, seeker, wrap_text):
26 | source_path = get_file_path(files_found, "version")
27 | data_list = []
28 |
29 | text_file = get_txt_file_content(source_path)
30 | for line in text_file:
31 | splits = line.split(';')
32 | totalvalues = len(splits)
33 | if totalvalues >= 3:
34 | device_info("Usagestats", "Android version", splits[0])
35 | logfunc(f"Android version {str(splits[0])}")
36 | scripts.artifacts.artGlobals.versionf = splits[0]
37 | data_list.append(('Android Version', splits[0]))
38 |
39 | device_info("Usagestats", "Codename", splits[1])
40 | data_list.append(('Codename', splits[1]))
41 |
42 | device_info("Usagestats", "Build version", splits[2])
43 | data_list.append(('Build version', splits[2]))
44 |
45 | if totalvalues == 5:
46 | device_info("Usagestats", "Country Specific Code", splits[3])
47 | data_list.append(('Country Specific Code', splits[3]))
48 |
49 | data_headers = ('Property', 'Property Value')
50 | return data_headers, data_list, source_path
51 |
--------------------------------------------------------------------------------
/scripts/artifacts/userDict.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 | def get_userDict(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 | cursor = db.cursor()
12 | cursor.execute('''
13 | select
14 | word,
15 | frequency,
16 | locale,
17 | appid,
18 | shortcut
19 | from words
20 | ''')
21 |
22 | all_rows = cursor.fetchall()
23 | usageentries = len(all_rows)
24 | if usageentries > 0:
25 | report = ArtifactHtmlReport('User Dictionary')
26 | report.start_artifact_report(report_folder, 'User Dictionary')
27 | report.add_script()
28 | data_headers = ('Word','Frequency','Locale','AppID','Shortcut' ) # Don't remove the comma, that is required to make this a tuple as there is only 1 element
29 | data_list = []
30 | for row in all_rows:
31 | data_list.append((row[0],row[1],row[2],row[3],row[4]))
32 |
33 | report.write_artifact_data_table(data_headers, data_list, file_found)
34 | report.end_artifact_report()
35 |
36 | tsvname = f'user dictionary'
37 | tsv(report_folder, data_headers, data_list, tsvname)
38 | else:
39 | logfunc('No User Dictionary data available')
40 |
41 | db.close()
42 |
43 | __artifacts__ = {
44 | "userDict": (
45 | "User Dictionary",
46 | ('*/com.android.providers.userdictionary/databases/user_dict.db*'),
47 | get_userDict)
48 | }
49 |
50 |
--------------------------------------------------------------------------------
/scripts/artifacts/vaulty_files.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 |
3 | from scripts.artifact_report import ArtifactHtmlReport
4 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
5 | from scripts.parse3 import ParseProto
6 |
7 | def get_vaulty_files(files_found, report_folder, seeker, wrap_text):
8 |
9 | title = "Vaulty - Files"
10 |
11 | # Media database
12 | db_filepath = str(files_found[0])
13 | conn = sqlite3.connect(db_filepath)
14 | c = conn.cursor()
15 | sql = """SELECT Media._id, datetime(Media.date_added, 'unixepoch'), datetime(Media.date_modified / 1000, 'unixepoch'), Media.path, Media._data FROM Media"""
16 | c.execute(sql)
17 | results = c.fetchall()
18 | conn.close()
19 |
20 | # Data results
21 | data_headers = ('ID', 'Date Created', 'Date Added', 'Original Path', 'Vault Path')
22 | data_list = results
23 |
24 | # Reporting
25 | description = "Vaulty (com.theronrogers.vaultyfree) - Research at https://kibaffo33.data.blog/2022/03/05/decoding-vaulty/"
26 | report = ArtifactHtmlReport(title)
27 | report.start_artifact_report(report_folder, title, description)
28 | report.add_script()
29 | report.write_artifact_data_table(data_headers, data_list, db_filepath, html_escape=False)
30 | report.end_artifact_report()
31 |
32 | tsv(report_folder, data_headers, data_list, title)
33 |
34 | __artifacts__ = {
35 | "vaulty_files": (
36 | "Vaulty",
37 | ('*/com.theronrogers.vaultyfree/databases/media.db'),
38 | get_vaulty_files)
39 | }
--------------------------------------------------------------------------------
/scripts/artifacts/vlcMedia.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import os
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly
6 |
7 |
8 | def get_vlcMedia(files_found, report_folder, seeker, wrap_text):
9 |
10 | for file_found in files_found:
11 | file_found = str(file_found)
12 |
13 | if file_found.endswith('vlc_media.db'):
14 | break
15 |
16 | db = open_sqlite_db_readonly(file_found)
17 | cursor = db.cursor()
18 | cursor.execute('''
19 | SELECT
20 | datetime(insertion_date, 'unixepoch'),
21 | datetime(last_played_date,'unixepoch'),
22 | filename,
23 | path,
24 | is_favorite
25 | from Media
26 | left join Folder
27 | on Media.folder_id = Folder.id_folder
28 | ''')
29 |
30 | all_rows = cursor.fetchall()
31 | usageentries = len(all_rows)
32 | data_list = []
33 |
34 | if usageentries > 0:
35 | for row in all_rows:
36 | data_list.append((row[0], row[1], row[2], row[3], row[4]))
37 |
38 | description = 'VLC Media List'
39 | report = ArtifactHtmlReport('VLC Media List')
40 | report.start_artifact_report(report_folder, 'VLC Media List', description)
41 | report.add_script()
42 | data_headers = ('Insertion Date', 'Last Played Date', 'Filename', 'Path', 'Is Favorite?' )
43 | report.write_artifact_data_table(data_headers, data_list, file_found)
44 | report.end_artifact_report()
45 |
46 | tsvname = 'VLC Media'
47 | tsv(report_folder, data_headers, data_list, tsvname)
48 |
49 | tlactivity = 'VLC Media'
50 | timeline(report_folder, tlactivity, data_list, data_headers)
51 | else:
52 | logfunc('No VLC Media data available')
53 |
54 | __artifacts__ = {
55 | "VLC": (
56 | "VLC",
57 | ('*vlc_media.db*'),
58 | get_vlcMedia)
59 | }
--------------------------------------------------------------------------------
/scripts/artifacts/waze.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import textwrap
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly, kmlgen
6 |
7 | def get_waze(files_found, report_folder, seeker, wrap_text):
8 |
9 | file_found = str(files_found[0])
10 | db = open_sqlite_db_readonly(file_found)
11 |
12 | cursor = db.cursor()
13 | cursor.execute('''
14 | select
15 | datetime(PLACES.created_time, 'unixepoch'),
16 | datetime(RECENTS.access_time, 'unixepoch'),
17 | RECENTS.name,
18 | PLACES.name as "Address",
19 | round(PLACES.latitude*.000001,6),
20 | round(PLACES.longitude*.000001,6)
21 | from PLACES
22 | join RECENTS on PLACES.id = RECENTS.place_id
23 | ''')
24 |
25 | all_rows = cursor.fetchall()
26 | usageentries = len(all_rows)
27 | if usageentries > 0:
28 | report = ArtifactHtmlReport('Waze - Recently Searched Locations')
29 | report.start_artifact_report(report_folder, 'Waze - Recently Searched Locations')
30 | report.add_script()
31 | data_headers = ('Created Timestamp','Accessed Timestamp','Location Name','Address','Latitude','Longitude')
32 | data_headers_kml = ('Timestamp','Accessed Timestamp','Location Name','Address','Latitude','Longitude')
33 | data_list = []
34 | for row in all_rows:
35 | data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))
36 |
37 | report.write_artifact_data_table(data_headers, data_list, file_found)
38 | report.end_artifact_report()
39 |
40 | tsvname = f'Waze - Recently Searched Locations'
41 | tsv(report_folder, data_headers, data_list, tsvname)
42 |
43 | tlactivity = f'Waze - Recently Searched Locations'
44 | timeline(report_folder, tlactivity, data_list, data_headers)
45 |
46 | kmlactivity = 'Waze - Recently Searched Locations'
47 | kmlgen(report_folder, kmlactivity, data_list, data_headers_kml)
48 |
49 | else:
50 | logfunc('No Waze - Recently Searched Locations data available')
51 |
52 | db.close()
53 |
54 | __artifacts__ = {
55 | "waze": (
56 | "Waze",
57 | ('*/com.waze/user.db*'),
58 | get_waze)
59 | }
60 |
--------------------------------------------------------------------------------
/scripts/artifacts/wellbeingaccount.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
6 | from scripts.parse3 import ParseProto
7 |
8 | def get_wellbeingaccount(files_found, report_folder, seeker, wrap_text):
9 | file_found = str(files_found[0])
10 | content = ParseProto(file_found)
11 |
12 | content_json_dump = json.dumps(content, indent=4, sort_keys=True, ensure_ascii=False)
13 | parsedContent = str(content_json_dump).encode(encoding='UTF-8',errors='ignore')
14 |
15 | report = ArtifactHtmlReport('Wellbeing Account')
16 | report.start_artifact_report(report_folder, 'Account Data')
17 | report.add_script()
18 | data_headers = ('Protobuf Parsed Data', 'Protobuf Data')
19 | data_list = []
20 | data_list.append((''+str(parsedContent).replace("\\n", "
")+'
', str(content)))
21 | report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
22 | report.end_artifact_report()
23 |
24 | tsvname = f'wellbeing account'
25 | tsv(report_folder, data_headers, data_list, tsvname)
26 |
27 | __artifacts__ = {
28 | "wellbeingaccount": (
29 | "Digital Wellbeing",
30 | ('*/com.google.android.apps.wellbeing/files/AccountData.pb'),
31 | get_wellbeingaccount)
32 | }
--------------------------------------------------------------------------------
/scripts/artifacts/wifiHotspot.py:
--------------------------------------------------------------------------------
1 | import struct
2 | import xml.etree.ElementTree as ET
3 |
4 | from scripts.artifact_report import ArtifactHtmlReport
5 | from scripts.ilapfuncs import logfunc, tsv, timeline
6 |
7 | def get_wifiHotspot(files_found, report_folder, seeker, wrap_text):
8 | data_list = []
9 | for file_found in files_found:
10 | file_found = str(file_found)
11 |
12 | ssid = ''
13 | security_type = ''
14 | passphrase = ''
15 |
16 | if file_found.endswith('.conf'):
17 | with open(file_found, 'rb') as f:
18 | data = f.read()
19 | ssid_len = data[5]
20 | ssid = data[6 : 6 + ssid_len].decode('utf8', 'ignore')
21 |
22 | data_len = len(data)
23 | start_pos = -1
24 | while data[start_pos] != 0 and (-start_pos < data_len):
25 | start_pos -= 1
26 | passphrase = data[start_pos + 2:].decode('utf8', 'ignore')
27 | else:
28 | tree = ET.parse(file_found)
29 | for node in tree.iter('SoftAp'):
30 | for elem in node.iter():
31 | if not elem.tag==node.tag:
32 | #print(elem.attrib)
33 | data = elem.attrib
34 | name = data.get('name', '')
35 | if name == 'SSID' or name == 'WifiSsid':
36 | ssid = elem.text
37 | elif name == 'SecurityType':
38 | security_type = data.get('value', '')
39 | elif name == 'Passphrase':
40 | passphrase = elem.text
41 | if ssid:
42 | data_list.append((ssid, passphrase, security_type))
43 |
44 | if data_list:
45 | report = ArtifactHtmlReport('Wi-Fi Hotspot')
46 | report.start_artifact_report(report_folder, 'Wi-Fi Hotspot')
47 | report.add_script()
48 | data_headers = ('SSID', 'Passphrase', 'SecurityType')
49 | report.write_artifact_data_table(data_headers, data_list, ", ".join(files_found))
50 | report.end_artifact_report()
51 |
52 | tsvname = f'wifi hotspot'
53 | tsv(report_folder, data_headers, data_list, tsvname)
54 | else:
55 | logfunc('No Wi-Fi Hotspot data available')
56 |
57 | __artifacts__ = {
58 | "wifiHotspot": (
59 | "WiFi Profiles",
60 | ('*/misc/wifi/softap.conf', '*/misc**/apexdata/com.android.wifi/WifiConfigStoreSoftAp.xml'),
61 | get_wifiHotspot)
62 | }
--------------------------------------------------------------------------------
/scripts/filetypes/application.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from .base import Type
4 |
5 |
6 | class Wasm(Type):
7 | """Implements the Wasm image type matcher."""
8 |
9 | MIME = 'application/wasm'
10 | EXTENSION = 'wasm'
11 |
12 | def __init__(self):
13 | super(Wasm, self).__init__(
14 | mime=Wasm.MIME,
15 | extension=Wasm.EXTENSION
16 | )
17 |
18 | def match(self, buf):
19 | return buf[:8] == bytearray([0x00, 0x61, 0x73, 0x6d,
20 | 0x01, 0x00, 0x00, 0x00])
21 |
--------------------------------------------------------------------------------
/scripts/filetypes/base.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 |
4 | class Type(object):
5 | """
6 | Represents the file type object inherited by
7 | specific file type matchers.
8 | Provides convenient accessor and helper methods.
9 | """
10 | def __init__(self, mime, extension):
11 | self.__mime = mime
12 | self.__extension = extension
13 |
14 | @property
15 | def mime(self):
16 | return self.__mime
17 |
18 | @property
19 | def extension(self):
20 | return self.__extension
21 |
22 | def is_extension(self, extension):
23 | return self.__extension is extension
24 |
25 | def is_mime(self, mime):
26 | return self.__mime is mime
27 |
28 | def match(self, buf):
29 | raise NotImplementedError
30 |
--------------------------------------------------------------------------------
/scripts/filetypes/isobmff.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import codecs
3 |
4 | from .base import Type
5 |
6 |
7 | class IsoBmff(Type):
8 | """
9 | Implements the ISO-BMFF base type.
10 | """
11 | def __init__(self, mime, extension):
12 | super(IsoBmff, self).__init__(
13 | mime=mime,
14 | extension=extension
15 | )
16 |
17 | def _is_isobmff(self, buf):
18 | if len(buf) < 16 or buf[4:8] != b'ftyp':
19 | return False
20 | if len(buf) < int(codecs.encode(buf[0:4], 'hex'), 16):
21 | return False
22 | return True
23 |
24 | def _get_ftyp(self, buf):
25 | ftyp_len = int(codecs.encode(buf[0:4], 'hex'), 16)
26 | major_brand = buf[8:12].decode(errors='ignore')
27 | minor_version = int(codecs.encode(buf[12:16], 'hex'), 16)
28 | compatible_brands = []
29 | for i in range(16, ftyp_len, 4):
30 | compatible_brands.append(buf[i:i+4].decode(errors='ignore'))
31 |
32 | return major_brand, minor_version, compatible_brands
33 |
--------------------------------------------------------------------------------
/scripts/modules_to_exclude.py:
--------------------------------------------------------------------------------
1 | # This list contains the filenames of artifact scripts that take a long time to run.
2 | # These modules are deselected by default in the GUI.
3 |
4 | modules_to_exclude = [
5 | 'walStrings'
6 | ]
--------------------------------------------------------------------------------
/scripts/pyinstaller/aleapp-file_version_info.txt:
--------------------------------------------------------------------------------
1 | VSVersionInfo(
2 | ffi=FixedFileInfo(
3 | filevers=(3, 4, 0, 0),
4 | prodvers=(3, 4, 0, 0),
5 | mask=0x3f,
6 | flags=0x0,
7 | OS=0x40004,
8 | fileType=0x1,
9 | subtype=0x0,
10 | date=(0, 0)
11 | ),
12 | kids=[
13 | StringFileInfo(
14 | [
15 | StringTable(
16 | '040904b0',
17 | [StringStruct('CompanyName', 'Alexis Brignoni'),
18 | StringStruct('FileDescription', 'ALEAPP CLI'),
19 | StringStruct('FileVersion', '3.4.0'),
20 | StringStruct('InternalName', 'ALEAPP'),
21 | StringStruct('LegalCopyright', 'Result of a collaborative effort in the DFIR community.'),
22 | StringStruct('OriginalFilename', 'aleapp.exe'),
23 | StringStruct('ProductName', 'ALEAPP'),
24 | StringStruct('ProductVersion', '3.4.0')])
25 | ]),
26 | VarFileInfo([VarStruct('Translation', [1033, 1200])])
27 | ]
28 | )
--------------------------------------------------------------------------------
/scripts/pyinstaller/aleapp.spec:
--------------------------------------------------------------------------------
1 | # -*- mode: python ; coding: utf-8 -*-
2 |
3 | block_cipher = None
4 |
5 | a = Analysis(['..\\..\\aleapp.py'],
6 | pathex=['..\\scripts\\artifacts'],
7 | binaries=[],
8 | datas=[('..\\', '.\\scripts')],
9 | hiddenimports=[
10 | 'bcrypt',
11 | 'bencoding',
12 | 'blackboxprotobuf',
13 | 'bs4',
14 | 'Crypto.Cipher.AES',
15 | 'Crypto.Util.Padding',
16 | 'fitdecode',
17 | 'folium',
18 | 'html.parser',
19 | 'PIL.Image',
20 | 'polyline',
21 | 'simplekml',
22 | 'xlsxwriter',
23 | 'xmltodict'
24 | ],
25 | hookspath=['.\\'],
26 | runtime_hooks=[],
27 | excludes=[],
28 | win_no_prefer_redirects=False,
29 | win_private_assemblies=False,
30 | cipher=block_cipher,
31 | noarchive=False)
32 | pyz = PYZ(a.pure, a.zipped_data,
33 | cipher=block_cipher)
34 | exe = EXE(pyz,
35 | a.scripts,
36 | a.binaries,
37 | a.zipfiles,
38 | a.datas,
39 | [],
40 | name='aleapp',
41 | debug=False,
42 | bootloader_ignore_signals=False,
43 | strip=False,
44 | upx=True,
45 | upx_exclude=[],
46 | runtime_tmpdir=None,
47 | version='aleapp-file_version_info.txt',
48 | console=True )
49 |
--------------------------------------------------------------------------------
/scripts/pyinstaller/aleappGUI-file_version_info.txt:
--------------------------------------------------------------------------------
1 | VSVersionInfo(
2 | ffi=FixedFileInfo(
3 | filevers=(3, 4, 0, 0),
4 | prodvers=(3, 4, 0, 0),
5 | mask=0x3f,
6 | flags=0x0,
7 | OS=0x40004,
8 | fileType=0x1,
9 | subtype=0x0,
10 | date=(0, 0)
11 | ),
12 | kids=[
13 | StringFileInfo(
14 | [
15 | StringTable(
16 | '040904b0',
17 | [StringStruct('CompanyName', 'Alexis Brignoni'),
18 | StringStruct('FileDescription', 'ALEAPP GUI'),
19 | StringStruct('FileVersion', '3.4.0'),
20 | StringStruct('InternalName', 'ALEAPP'),
21 | StringStruct('LegalCopyright', 'Result of a collaborative effort in the DFIR community.'),
22 | StringStruct('OriginalFilename', 'AleappGUI.exe'),
23 | StringStruct('ProductName', 'ALEAPP'),
24 | StringStruct('ProductVersion', '3.4.0')])
25 | ]),
26 | VarFileInfo([VarStruct('Translation', [1033, 1200])])
27 | ]
28 | )
--------------------------------------------------------------------------------
/scripts/pyinstaller/aleappGUI.spec:
--------------------------------------------------------------------------------
1 | # -*- mode: python ; coding: utf-8 -*-
2 |
3 | block_cipher = None
4 |
5 | a = Analysis(['..\\..\\aleappGUI.py'],
6 | pathex=['..\\scripts\\artifacts'],
7 | binaries=[],
8 | datas=[('..\\', '.\\scripts'), ('..\\..\\assets', '.\\assets')],
9 | hiddenimports=[
10 | 'bcrypt',
11 | 'bencoding',
12 | 'blackboxprotobuf',
13 | 'bs4',
14 | 'Crypto.Cipher.AES',
15 | 'Crypto.Util.Padding',
16 | 'fitdecode',
17 | 'folium',
18 | 'html.parser',
19 | 'PIL.Image',
20 | 'polyline',
21 | 'simplekml',
22 | 'xlsxwriter',
23 | 'xmltodict'
24 | ],
25 | hookspath=['.\\'],
26 | runtime_hooks=[],
27 | excludes=[],
28 | win_no_prefer_redirects=False,
29 | win_private_assemblies=False,
30 | cipher=block_cipher,
31 | noarchive=False)
32 | pyz = PYZ(a.pure, a.zipped_data,
33 | cipher=block_cipher)
34 | exe = EXE(pyz,
35 | a.scripts,
36 | a.binaries,
37 | a.zipfiles,
38 | a.datas,
39 | [],
40 | name='aleappGUI',
41 | debug=False,
42 | bootloader_ignore_signals=False,
43 | strip=False,
44 | upx=True,
45 | console=True,
46 | hide_console='hide-early',
47 | disable_windowed_traceback=False,
48 | upx_exclude=[],
49 | version='aleappGUI-file_version_info.txt',
50 | runtime_tmpdir=None )
--------------------------------------------------------------------------------
/scripts/pyinstaller/aleappGUI_macOS.spec:
--------------------------------------------------------------------------------
1 | # -*- mode: python ; coding: utf-8 -*-
2 |
3 |
4 | a = Analysis(
5 | ['../../aleappGUI.py'],
6 | pathex=['../scripts/artifacts'],
7 | binaries=[],
8 | datas=[('../', 'scripts'), ('../../assets', 'assets')],
9 | hiddenimports=[
10 | 'bcrypt',
11 | 'bencoding',
12 | 'blackboxprotobuf',
13 | 'bs4',
14 | 'Crypto.Cipher.AES',
15 | 'Crypto.Util.Padding',
16 | 'fitdecode',
17 | 'folium',
18 | 'html.parser',
19 | 'PIL.Image',
20 | 'polyline',
21 | 'xmltodict',
22 | 'xlsxwriter',
23 | ],
24 | hookspath=[],
25 | hooksconfig={},
26 | runtime_hooks=[],
27 | excludes=[],
28 | noarchive=False,
29 | )
30 | pyz = PYZ(a.pure)
31 |
32 | exe = EXE(
33 | pyz,
34 | a.scripts,
35 | [],
36 | exclude_binaries=True,
37 | name='aleappGUI',
38 | debug=False,
39 | bootloader_ignore_signals=False,
40 | strip=False,
41 | upx=True,
42 | console=False,
43 | disable_windowed_traceback=False,
44 | argv_emulation=False,
45 | target_arch=None,
46 | codesign_identity=None,
47 | entitlements_file=None,
48 | )
49 | coll = COLLECT(
50 | exe,
51 | a.binaries,
52 | a.datas,
53 | strip=False,
54 | upx=True,
55 | upx_exclude=[],
56 | name='aleappGUI',
57 | )
58 | app = BUNDLE(
59 | coll,
60 | name='aleappGUI.app',
61 | icon='../../assets/icon.icns',
62 | bundle_identifier='4n6.brigs.ALEAPP',
63 | version='3.4.0',
64 | )
65 |
--------------------------------------------------------------------------------
/scripts/pyinstaller/aleapp_macOS.spec:
--------------------------------------------------------------------------------
1 | # -*- mode: python ; coding: utf-8 -*-
2 |
3 |
4 | a = Analysis(
5 | ['../../aleapp.py'],
6 | pathex=['../scripts/artifacts'],
7 | binaries=[],
8 | datas=[('../', 'scripts')],
9 | hiddenimports=[
10 | 'bcrypt',
11 | 'bencoding',
12 | 'blackboxprotobuf',
13 | 'bs4',
14 | 'Crypto.Cipher.AES',
15 | 'Crypto.Util.Padding',
16 | 'fitdecode',
17 | 'folium',
18 | 'html.parser',
19 | 'PIL.Image',
20 | 'polyline',
21 | 'xmltodict',
22 | 'xlsxwriter',
23 | ],
24 | hookspath=[],
25 | hooksconfig={},
26 | runtime_hooks=[],
27 | excludes=[],
28 | noarchive=False,
29 | )
30 | pyz = PYZ(a.pure)
31 |
32 | exe = EXE(
33 | pyz,
34 | a.scripts,
35 | a.binaries,
36 | a.datas,
37 | [],
38 | name='aleapp',
39 | debug=False,
40 | bootloader_ignore_signals=False,
41 | strip=False,
42 | upx=True,
43 | upx_exclude=[],
44 | runtime_tmpdir=None,
45 | console=True,
46 | disable_windowed_traceback=False,
47 | argv_emulation=False,
48 | target_arch=None,
49 | codesign_identity=None,
50 | entitlements_file=None,
51 | )
52 |
--------------------------------------------------------------------------------
/scripts/pyinstaller/hook-plugin_loader.py:
--------------------------------------------------------------------------------
1 | import plugin_loader
2 |
3 | # Hey PyInstaller? Yeah you! Take a look at these plugins! I know they're not actually imported anywhere but you
4 | # better believe that they will be a runtime, so, if you wouldn't mind, it'd be fantastic if you pretended that
5 | # they're imported normally and pick up *their* imports. OK? Great. Fantastic.
6 |
7 | print("Hooking plugins for pyinstaller")
8 |
9 | loader = plugin_loader.PluginLoader()
10 |
11 | tmp = []
12 |
13 | for py_file in plugin_loader.PLUGINPATH.glob("*.py"):
14 | mod = plugin_loader.PluginLoader.load_module_lazy(py_file)
15 | try:
16 | mod_artifacts = mod.__artifacts__
17 | except AttributeError:
18 | pass # any unconverted plugins still get checked out so they don't break the loader during runtime
19 |
20 | tmp.append("scripts.artifacts." + mod.__name__) # TODO this is a hack, if we ever move plugins this breaks
21 |
22 | print(f"{len(tmp)} plugins loaded as hidden imports")
23 |
24 | hiddenimports = list(tmp)
25 |
--------------------------------------------------------------------------------
/scripts/version_info.py:
--------------------------------------------------------------------------------
1 | aleapp_version = '3.4.0'
2 |
3 | # Contributors List
4 | # Format = [ Name, Blog-url, Twitter-handle, Github-url]
5 | # Leave blank if not available
6 | aleapp_contributors = [
7 | [ 'Alexis Brignoni', 'https://abrignoni.com', '@AlexisBrignoni', 'https://github.com/abrignoni'],
8 | [ 'Yogesh Khatri', 'https://swiftforensics.com', '@SwiftForensics', 'https://github.com/ydkhatri'],
9 | [ 'Alex Caithness', 'https://www.linkedin.com/in/alex-caithness-a7504151/', '@kviddy', 'https://github.com/cclgroupltd'],
10 | [ 'Kevin Pagano', 'https://stark4n6.com', '@KevinPagano3', 'https://github.com/stark4n6'],
11 | [ 'Josh Hickman', 'https://thebinaryhick.blog/', '@josh_hickman1', ''],
12 | [ 'Troy Schnack', 'https://troy4n6.blogspot.com/', '@TroySchnack',''],
13 | [ 'B Krishna Sai Nihith', 'https://g4rud4.gitlab.io', '@_Nihith', 'https://github.com/bolisettynihith'],
14 | [ 'Geraldine Blay', 'https://gforce4n6.blogspot.com', '@i_am_the_gia', ''],
15 | [ 'Bo Amos', '', '@Bo_Knows_65', ''],
16 | [ 'Andrea Canepa', '', '', 'https://github.com/A-725-K'],
17 | [ 'Incidentalchewtoy', 'https://theincidentalchewtoy.wordpress.com/', '@4n6chewtoy', ''],
18 | [ 'LoicForensics', '', '', ''],
19 | [ 'Fabian Nunes', 'https://www.linkedin.com/in/fabian-nunes/', '', 'https://github.com/fabian-nunes'],
20 | ['Evangelos Dragonas', 'https://atropos4n6.com/','@theAtropos4n6','https://github.com/theAtropos4n6'],
21 | ['James Habben', 'https://4n6ir.com/','@JamesHabben','https://github.com/JamesHabben'],
22 | ['Matt Beers', 'https://www.linkedin.com/in/mattbeersii','','https://github.com/dabeersboys'],
23 | ['Heather Charpentier', 'https://www.linkedin.com/in/heather-charpentier-bb28b031/','','https://github.com/charpy4n6'],
24 | ['Panos Nakoutis', '', '@4n6equals10',''],
25 | ['Johann Polewczyk', 'https://www.linkedin.com/in/johann-polewczyk-6a905425/', '@johannplw', 'https://github.com/Johann-PLW'],
26 | ['Bruno Fischer', 'https://german4n6.blogspot.com/', '', 'https://github.com/BrunoFischerGermany'],
27 | ['Marco Neumann', 'https://bebinary4n6.blogspot.com/', '@kalinko4n6', 'https://github.com/kalink0'],
28 | ['Marc Seguin', 'https://segumarc.com', '@segumarc', 'https://github.com/segumarc'],
29 | ['Anthony Reince', 'https://www.linkedin.com/in/anthony-reince-a60115239/','','']
30 | ]
31 |
--------------------------------------------------------------------------------
/zCaseDataExample.lcasedata:
--------------------------------------------------------------------------------
1 | {"leapp": "case_data", "case_data_values": {"Case Number": "0123-45-6789", "Agency": "The Justice League", "Examiner": "Victor Stone"}}
--------------------------------------------------------------------------------
/zProfileExample.alprofile:
--------------------------------------------------------------------------------
1 | {"leapp": "aleapp", "format_version": 1, "plugins": ["AirGuard", "atrackerdetect", "BadooChat", "BadooConnections", "Call Logs", "Call logs "]}
--------------------------------------------------------------------------------