├── logs
└── gitkeep.keep
├── kml_files
└── gitkeep.keep
├── reports
└── gitkeep.keep
├── analysis_logs
└── gitkeep.keep
├── ignore_lists
├── ssid_list.json
└── mac_list.json
├── secure_credentials
└── gitkeep.keep
├── surveillance_reports
└── gitkeep.keep
├── cyt_ng_logo.png
├── ignore_list_ssid.py
├── requirements.txt
├── start_kismet_clean.sh
├── config.json
├── monitor.sh
├── start_gui.sh
├── LICENSE
├── SETUP.md
├── create_ignore_list.py
├── migrate_credentials.py
├── BLACKHAT_ARSENAL.md
├── ignore_list.py
├── blackhat_demo.py
├── chasing_your_tail.py
├── secure_ignore_loader.py
├── secure_database.py
├── README.md
├── secure_credentials.py
├── probe_analyzer.py
├── CLAUDE.md
├── input_validation.py
├── secure_main_logic.py
├── surveillance_analyzer.py
├── cyt_gui.py
└── gps_tracker.py
/logs/gitkeep.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/kml_files/gitkeep.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/reports/gitkeep.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/analysis_logs/gitkeep.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ignore_lists/ssid_list.json:
--------------------------------------------------------------------------------
1 | []
--------------------------------------------------------------------------------
/secure_credentials/gitkeep.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/surveillance_reports/gitkeep.keep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/cyt_ng_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ArgeliusLabs/Chasing-Your-Tail-NG/HEAD/cyt_ng_logo.png
--------------------------------------------------------------------------------
/ignore_list_ssid.py:
--------------------------------------------------------------------------------
1 | non_alert_ssid_list = ['CR-Guest-Wireless', 'gst02', 'Platinum Reserve', 'AG2F', 'Platinum Reserve']
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Core dependencies for Chasing Your Tail (CYT)
2 | requests>=2.28.0
3 | cryptography>=40.0.0
4 |
5 | # Optional dependencies for enhanced features
6 | # Uncomment as needed:
7 |
8 | # For web dashboard (future enhancement)
9 | # flask>=2.3.0
10 | # flask-socketio>=5.3.0
11 |
12 | # For advanced data analysis
13 | # pandas>=1.5.0
14 | # numpy>=1.24.0
15 | # matplotlib>=3.7.0
16 |
17 | # For testing
18 | # pytest>=7.0.0
19 | # pytest-cov>=4.0.0
--------------------------------------------------------------------------------
/start_kismet_clean.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # TRULY CLEAN Kismet startup - NO pkill commands whatsoever!
4 | cd /home/matt/Desktop/cytng
5 |
6 | echo "$(date): Starting Kismet without any cleanup..."
7 |
8 | # Just start Kismet directly - no process killing!
9 | sudo /usr/local/bin/kismet -c wlan1 --daemonize
10 |
11 | sleep 3
12 |
13 | if pgrep -f kismet >/dev/null; then
14 | echo "SUCCESS - Kismet running"
15 | echo "Web interface: http://localhost:2501"
16 | else
17 | echo "FAILED - Kismet not running"
18 | fi
--------------------------------------------------------------------------------
/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "paths": {
3 | "base_dir": ".",
4 | "log_dir": "logs",
5 | "kismet_logs": "/home/matt/kismet_logs/*.kismet",
6 | "ignore_lists": {
7 | "mac": "mac_list.py",
8 | "ssid": "ssid_list.py"
9 | }
10 | },
11 | "timing": {
12 | "check_interval": 60,
13 | "list_update_interval": 5,
14 | "time_windows": {
15 | "recent": 5,
16 | "medium": 10,
17 | "old": 15,
18 | "oldest": 20
19 | }
20 | },
21 | "search": {
22 | "lat_min": 31.3,
23 | "lat_max": 37.0,
24 | "lon_min": -114.8,
25 | "lon_max": -109.0
26 | }
27 | }
--------------------------------------------------------------------------------
/monitor.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | while true;
3 | do
4 | GREEN='\033[0;32m'
5 | RED='\033[0;31m'
6 | NC='\033[0m' # no color
7 | numprocesses=$(ps aux | grep -i 'kismet' | wc -l)
8 | #echo $numprocesses
9 | if [[ $numprocesses > 2 ]] ; then
10 | echo -e "${GREEN}kismet up${NC}"
11 | else
12 | echo -e "${RED}kismet down${NC}"
13 | fi
14 |
15 | string=$(iwconfig wlan0 & iwconfig wlan1 & iwconfig wlan1mon)
16 | if [[ $string == *"Mode:Monitor"* ]]; then
17 | echo -e "${GREEN}Monitor Mode Detected${NC}"
18 | echo
19 | else
20 | echo -e "${RED}Monitor Mode Not Detected${NC}"
21 | echo
22 | fi
23 | sleep 10;
24 | done
25 |
--------------------------------------------------------------------------------
/ignore_lists/mac_list.json:
--------------------------------------------------------------------------------
1 | [
2 | "28:34:FF:47:34:BA",
3 | "28:6D:97:F3:47:8F",
4 | "2C:CF:67:38:B4:02",
5 | "34:5E:08:35:00:BC",
6 | "3C:64:CF:94:2A:16",
7 | "58:D3:49:54:5A:4D",
8 | "60:3D:26:B5:4D:E1",
9 | "60:3D:26:B5:4D:E3",
10 | "60:3D:26:B5:4D:E4",
11 | "70:13:01:68:0D:08",
12 | "72:13:01:69:0D:09",
13 | "72:13:01:69:0D:0C",
14 | "72:13:01:69:0D:0E",
15 | "72:13:01:E4:07:40",
16 | "92:9C:8E:13:A3:62",
17 | "A6:3E:F9:B6:41:31",
18 | "AA:F0:13:AD:96:9E",
19 | "B4:FB:E4:38:13:84",
20 | "B6:05:3F:98:84:22",
21 | "B8:5E:71:40:49:95",
22 | "C6:05:3F:98:84:22",
23 | "D4:A3:3D:75:DB:6F",
24 | "D8:9C:8E:13:A3:5F",
25 | "E0:BB:9E:18:FC:15",
26 | "E6:05:3F:98:84:22",
27 | "F6:05:3F:98:84:22",
28 | "FC:EC:DA:DC:3A:7C"
29 | ]
--------------------------------------------------------------------------------
/start_gui.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Start CYT GUI with proper environment
4 | # Wait for desktop environment to be ready
5 | sleep 120
6 |
7 | # Change to the project directory first
8 | cd /home/matt/Desktop/cytng
9 |
10 | # Set environment variables for GUI access
11 | export DISPLAY=:0
12 | export XDG_RUNTIME_DIR=/run/user/$(id -u)
13 |
14 | # Wait for X server to be available with longer timeout
15 | timeout_count=0
16 | while ! xset q &>/dev/null; do
17 | echo "$(date): Waiting for X server... (attempt $timeout_count)" >> gui_startup.log
18 | sleep 15
19 | timeout_count=$((timeout_count + 1))
20 | if [ $timeout_count -gt 20 ]; then
21 | echo "$(date): ERROR - X server timeout after 300 seconds" >> gui_startup.log
22 | exit 1
23 | fi
24 | done
25 |
26 | echo "$(date): X server available, starting GUI..." >> gui_startup.log
27 |
28 | # Start the GUI and log any output
29 | python3 cyt_gui.py >> gui_startup.log 2>&1 &
30 |
31 | # Log success
32 | echo "$(date): CYT GUI started successfully" >> gui_startup.log
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 @matt0177
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/SETUP.md:
--------------------------------------------------------------------------------
1 | # CYT Setup Guide
2 |
3 | ## Quick Start for BlackHat Demo
4 |
5 | ### 1. Install Dependencies
6 | ```bash
7 | pip3 install -r requirements.txt
8 | ```
9 |
10 | ### 2. Security Setup (REQUIRED)
11 | ```bash
12 | # Migrate credentials to secure storage
13 | python3 migrate_credentials.py
14 |
15 | # Verify security hardening
16 | python3 chasing_your_tail.py
17 | # Should show: "🔒 SECURE MODE: All SQL injection vulnerabilities have been eliminated!"
18 | ```
19 |
20 | ### 3. Configuration
21 | Edit `config.json` with your Kismet database path:
22 | ```json
23 | {
24 | "kismet_db_path": "/path/to/your/kismet/*.kismet"
25 | }
26 | ```
27 |
28 | ### 4. Run Analysis
29 | ```bash
30 | # Start GUI interface
31 | python3 cyt_gui.py
32 |
33 | # Or run surveillance analysis directly
34 | python3 surveillance_analyzer.py
35 |
36 | # For demo with simulated GPS data
37 | python3 surveillance_analyzer.py --demo
38 | ```
39 |
40 | ### 5. View Results
41 | - **Reports**: Check `surveillance_reports/` for markdown and HTML files
42 | - **Visualizations**: Open `.kml` files from `kml_files/` in Google Earth
43 |
44 | ## BlackHat Arsenal Demo Features
45 |
46 | - ✅ **Spectacular KML Visualization** - Professional Google Earth integration
47 | - ✅ **Multi-format Reports** - Markdown, HTML, and KML outputs
48 | - ✅ **Security Hardened** - SQL injection prevention, encrypted credentials
49 | - ✅ **GPS Integration** - Automatic coordinate extraction from Kismet
50 | - ✅ **Multi-location Tracking** - Detects devices following across locations
51 | - ✅ **Professional GUI** - Enhanced Tkinter interface with analysis buttons
52 |
53 | ## Documentation
54 | - **README.md** - Complete user documentation
55 | - **CLAUDE.md** - Technical developer documentation
56 |
57 | ## Support
58 | GitHub: https://github.com/matt0177/cyt
--------------------------------------------------------------------------------
/create_ignore_list.py:
--------------------------------------------------------------------------------
1 | import sqlite3
2 | import glob
3 | import json
4 | import os
5 | import pathlib
6 |
7 | # Load config
8 | with open('config.json', 'r') as f:
9 | config = json.load(f)
10 |
11 | ### Check for/make subdirectories for logs, ignore lists etc.
12 | cyt_sub = pathlib.Path('./ignore_lists')
13 | cyt_sub.mkdir(parents=True, exist_ok=True)
14 |
15 | non_alert_list = []
16 | non_alert_ssid_list = []
17 |
18 | ### Get DB path from config
19 | db_path = config['paths']['kismet_logs']
20 |
21 | ######Find Newest Kismet DB file
22 | list_of_files = glob.glob(db_path)
23 | latest_file = max(list_of_files, key=os.path.getctime)
24 | print('Pulling from: {}'.format(latest_file))
25 |
26 | con = sqlite3.connect(latest_file) ## kismet DB to point at
27 |
28 | def sql_fetch(con):
29 |
30 | cursorObj = con.cursor()
31 |
32 | cursorObj.execute("SELECT devmac FROM devices")
33 |
34 | rows = cursorObj.fetchall()
35 |
36 | for row in rows:
37 |
38 | #print(row)
39 | stripped_val = str(row).replace("(","").replace(")","").replace("'","").replace(",","")
40 | non_alert_list.append(stripped_val)
41 |
42 | sql_fetch(con)
43 |
44 | print ('Added {} MACs to the ignore list.'.format(len(non_alert_list)))
45 |
46 | # Fix - write to ignore_lists directory
47 | ignore_list = open(pathlib.Path('./ignore_lists') / config['paths']['ignore_lists']['mac'], "w")
48 | ignore_list.write("ignore_list = " + str(non_alert_list))
49 | ignore_list.close()
50 |
51 | def grab_all_probes(con):
52 | cursorObj = con.cursor()
53 | cursorObj.execute("SELECT devmac, type, device FROM devices")
54 | rows = cursorObj.fetchall()
55 | for row in rows:
56 | raw_device_json = json.loads(row[2])
57 | if 'dot11.probedssid.ssid' in str(row):
58 | ssid_probed_for = raw_device_json["dot11.device"]["dot11.device.last_probed_ssid_record"]["dot11.probedssid.ssid"] ### Grabbed SSID Probed for
59 | if ssid_probed_for == '':
60 | pass
61 | else:
62 | non_alert_ssid_list.append(ssid_probed_for)
63 |
64 | grab_all_probes(con)
65 |
66 | print ('Added {} Probed SSIDs to the ignore list.'.format(len(non_alert_ssid_list)))
67 | # Fix - write to ignore_lists directory
68 | ignore_list_ssid = open(pathlib.Path('./ignore_lists') / config['paths']['ignore_lists']['ssid'], "w")
69 | ignore_list_ssid.write("non_alert_ssid_list = " + str(non_alert_ssid_list))
70 | ignore_list_ssid.close()
71 |
--------------------------------------------------------------------------------
/migrate_credentials.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Credential Migration Tool for CYT
4 | Migrates API keys from insecure config.json to encrypted storage
5 | """
6 | import json
7 | import sys
8 | import os
9 | from pathlib import Path
10 | from secure_credentials import SecureCredentialManager
11 |
12 | def main():
13 | print("🔐 CYT Credential Migration Tool")
14 | print("=" * 50)
15 |
16 | config_file = 'config.json'
17 | if not Path(config_file).exists():
18 | print(f"❌ Error: {config_file} not found")
19 | sys.exit(1)
20 |
21 | # Load current config
22 | with open(config_file, 'r') as f:
23 | config = json.load(f)
24 |
25 | # Check if there are credentials to migrate
26 | if 'api_keys' not in config:
27 | print("✅ No API keys found in config.json - already secure!")
28 | return
29 |
30 | api_keys = config['api_keys']
31 | if not any('token' in str(value).lower() or 'key' in str(value).lower()
32 | for value in api_keys.values() if isinstance(value, dict)):
33 | print("✅ No credentials found to migrate")
34 | return
35 |
36 | print("⚠️ Found API keys in config.json - this is a security risk!")
37 | print("🔒 Migrating to encrypted storage...")
38 |
39 | # Initialize credential manager
40 | cred_manager = SecureCredentialManager()
41 |
42 | # Migrate WiGLE credentials
43 | if 'wigle' in api_keys:
44 | wigle_config = api_keys['wigle']
45 | if 'encoded_token' in wigle_config:
46 | print("\n📡 Migrating WiGLE API token...")
47 | cred_manager.store_credential('wigle', 'encoded_token', wigle_config['encoded_token'])
48 | print("✅ WiGLE API token stored securely")
49 |
50 | # Remove API keys from config
51 | config_backup = config.copy()
52 | config.pop('api_keys', None)
53 |
54 | # Create backup of original config
55 | backup_file = 'config_backup.json'
56 | with open(backup_file, 'w') as f:
57 | json.dump(config_backup, f, indent=2)
58 | print(f"💾 Original config backed up to: {backup_file}")
59 |
60 | # Save sanitized config
61 | sanitized_file = 'config_secure.json'
62 | with open(sanitized_file, 'w') as f:
63 | json.dump(config, f, indent=2)
64 | print(f"🛡️ Sanitized config saved to: {sanitized_file}")
65 |
66 | print("\n🔐 Migration Complete!")
67 | print("=" * 50)
68 | print("Next steps:")
69 | print("1. Review the sanitized config: config_secure.json")
70 | print("2. Replace config.json with config_secure.json:")
71 | print(" mv config_secure.json config.json")
72 | print("3. Securely delete the backup if not needed:")
73 | print(" shred -vfz-3 config_backup.json")
74 | print("\n⚠️ IMPORTANT: Your API keys are now encrypted and require a master password!")
75 |
76 | if __name__ == '__main__':
77 | main()
--------------------------------------------------------------------------------
/BLACKHAT_ARSENAL.md:
--------------------------------------------------------------------------------
1 | # CYT - BlackHat Arsenal 2025
2 |
3 | ## Chasing Your Tail: Advanced Wi-Fi Surveillance Detection
4 |
5 | ### 🎯 BlackHat Arsenal Presentation Summary
6 |
7 | **Tool Category**: Wireless Security / Surveillance Detection
8 | **Language**: Python 3
9 | **Platform**: Linux
10 | **License**: MIT
11 |
12 | ### 🚀 Key Features for Arsenal Demo
13 |
14 | #### 1. **Spectacular Google Earth Visualization**
15 | - Professional KML generation with advanced styling
16 | - Color-coded persistence level markers (green/yellow/red)
17 | - Device tracking paths showing movement correlation
18 | - Rich interactive balloon content with device intelligence
19 | - Activity heatmaps and surveillance intensity zones
20 |
21 | #### 2. **Advanced Surveillance Detection**
22 | - Multi-location device tracking algorithms
23 | - Persistence scoring (0.0-1.0) for threat assessment
24 | - Temporal analysis with time-based pattern detection
25 | - GPS correlation with automatic coordinate extraction
26 | - Real-time Kismet database integration
27 |
28 | #### 3. **Security-Hardened Architecture**
29 | - SQL injection prevention with parameterized queries
30 | - Encrypted credential management for API keys
31 | - Input validation and sanitization throughout
32 | - Secure ignore list loading (eliminates exec() vulnerabilities)
33 | - Comprehensive audit logging
34 |
35 | #### 4. **Professional Output Formats**
36 | - Markdown reports with detailed analysis
37 | - HTML reports with custom CSS styling (pandoc integration)
38 | - KML files for Google Earth visualization
39 | - JSON export for further analysis
40 |
41 | ### 🎪 Live Demo Flow
42 |
43 | 1. **Security Verification** - Show hardened architecture
44 | 2. **GUI Demonstration** - Enhanced Tkinter interface
45 | 3. **Analysis Execution** - Real surveillance detection
46 | 4. **Google Earth Visualization** - Spectacular KML display
47 | 5. **Multi-location Tracking** - Device following demonstration
48 |
49 | ### 📁 Arsenal Package Contents
50 |
51 | #### Core Files
52 | - `surveillance_analyzer.py` - Main analysis engine
53 | - `gps_tracker.py` - GPS integration & KML generation
54 | - `surveillance_detector.py` - Persistence detection algorithms
55 | - `cyt_gui.py` - Enhanced GUI interface
56 | - `chasing_your_tail.py` - Real-time monitoring engine
57 |
58 | #### Security Components
59 | - `secure_database.py` - SQL injection prevention
60 | - `secure_credentials.py` - Encrypted credential management
61 | - `input_validation.py` - Input sanitization
62 | - `migrate_credentials.py` - Credential migration tool
63 |
64 | #### Documentation
65 | - `README.md` - Complete user documentation
66 | - `CLAUDE.md` - Technical developer documentation
67 | - `SETUP.md` - Quick start guide
68 | - `BLACKHAT_ARSENAL.md` - This presentation summary
69 |
70 | #### Demo Materials
71 | - `blackhat_demo.py` - Automated demo script
72 | - `demo_following_detection.kml` - Sample Google Earth visualization
73 | - `requirements.txt` - Python dependencies
74 |
75 | ### 🛡️ Use Cases Demonstrated
76 |
77 | 1. **Personal Safety** - Detecting stalking/following behavior
78 | 2. **Corporate Security** - Identifying surveillance devices
79 | 3. **Network Administration** - Monitoring wireless environments
80 | 4. **Security Research** - Analyzing device behavior patterns
81 |
82 | ### 🌟 What Makes CYT Special
83 |
84 | - **First tool** to provide spectacular Google Earth visualization for Wi-Fi surveillance
85 | - **Security-first approach** with comprehensive hardening
86 | - **Real-world applicability** with GPS integration and multi-location tracking
87 | - **Professional presentation** suitable for corporate and research environments
88 |
89 | ### 🎯 Target Audience
90 |
91 | - Security professionals and researchers
92 | - Network administrators
93 | - Personal safety advocates
94 | - Wireless security enthusiasts
95 | - Corporate security teams
96 |
97 | ### 📊 Technical Specifications
98 |
99 | - **Python 3.6+** required
100 | - **Kismet integration** for packet capture
101 | - **Bluetooth GPS support** for location tracking
102 | - **WiGLE API integration** for SSID geolocation
103 | - **Cross-platform compatibility** (Linux focus)
104 |
105 | ---
106 |
107 | **Ready for BlackHat Arsenal 2025!** 🚀
108 |
109 | **GitHub**: https://github.com/matt0177/cyt
110 | **Author**: @matt0177
111 | **Contact**: Available during BlackHat Arsenal session
--------------------------------------------------------------------------------
/ignore_list.py:
--------------------------------------------------------------------------------
1 | ignore_list = ['01:F0:66:6C:5D:45', '04:D2:89:D9:E3:BF', '0E:49:23:28:08:70', '10:2B:41:1D:A4:09', '12:0B:8A:40:FE:BA', '1A:AB:D8:BC:51:F5', '1D:2E:80:36:0A:7B', '23:5A:1E:2A:13:08', '26:DF:D8:EA:9E:1F', '2C:83:08:EC:8A:7D', '3D:54:8A:5F:8D:D4', '3D:5E:07:F7:56:38', '43:E9:AF:C2:21:2F', '44:2D:6A:FF:E5:92', '49:92:4A:E9:17:CA', '4B:4C:40:87:32:60', '4D:0C:F5:73:AC:F3', '4F:47:7C:5A:BE:2A', '50:32:37:CA:3B:F7', '52:1D:4C:98:C6:E3', '5A:C0:AE:61:C8:8D', '5E:F3:3E:08:2C:25', '64:1C:AE:64:79:81', '65:70:9B:3F:DD:13', '72:C6:D9:A0:AC:F4', '74:89:E0:7B:B7:A3', '74:EE:AE:3B:72:97', '79:81:F6:1F:55:94', '7A:15:94:6A:B4:E2', '7C:F5:DF:B2:60:C4', '90:D7:15:9C:2E:65', 'D4:1C:43:03:CF:2D', 'D5:E2:E7:68:6E:DE', 'D6:52:67:3E:59:3E', 'F0:6A:7F:03:6E:09', 'F5:C0:00:05:DE:D8', 'FB:9A:AE:47:5D:2B', '00:0B:78:66:E1:9B', '00:15:6D:FC:F4:D7', '00:17:88:65:56:D1', '00:24:A3:86:0B:B7', '00:25:00:FF:94:73', '00:9D:6B:39:3E:55', '00:F4:8D:8A:72:65', '06:18:D6:E3:89:96', '06:A0:83:0B:28:9F', '06:DB:48:07:1B:11', '0C:B2:B7:45:AF:7C', '10:2B:41:1D:A4:08', '10:5F:06:3D:6B:30', '10:5F:06:3D:6B:35', '18:48:CA:6D:08:F2', '18:B4:30:7E:52:5F', '18:E8:29:26:F1:BA', '1C:F2:9A:5C:94:03', '20:DF:B9:59:4C:5B', '20:F1:9E:F3:84:73', '20:F3:75:BE:A8:B4', '24:0A:C4:FB:D4:BC', '24:5A:4C:3E:E7:D3', '24:5A:4C:3E:EB:87', '24:A4:3C:A0:CE:CF', '24:A4:3C:BA:55:F2', '24:EC:99:3F:81:D2', '26:52:1A:79:2F:80', '30:24:32:BF:C8:36', '38:D2:69:B9:1B:7D', '3E:B0:8C:B8:6A:9B', '42:F1:9E:F3:84:73', '44:65:0D:2D:73:AF', '44:D9:E7:52:D2:B3', '44:D9:E7:FF:16:28', '44:D9:E7:FF:16:2A', '46:F9:D1:97:A7:95', '50:32:37:CC:04:B2', '52:08:A9:A9:13:31', '52:7A:F3:39:44:0A', '58:D3:49:16:7D:27', '58:D3:49:16:B7:5A', '58:D3:49:1D:7A:4B', '58:D3:49:54:5A:4D', '5C:86:C1:0A:E6:F7', '5C:A5:BC:66:3D:83', '5C:A5:BC:66:3D:84', '5C:A5:BC:66:3D:85', '5C:A5:BC:66:3D:86', '5C:A5:BC:66:3D:87', '5C:A5:BC:66:3D:88', '5C:A5:BC:66:3D:8D', '5C:A5:BC:66:3D:8E', '5C:A5:BC:66:96:63', '5C:A5:BC:66:96:65', '5C:A5:BC:66:96:66', '5C:A5:BC:66:96:67', '5C:A5:BC:66:96:68', '5C:A5:BC:66:96:6E', '5C:A5:BC:66:B5:C3', '5C:A5:BC:66:B5:C4', '5C:A5:BC:66:B5:C5', '5C:A5:BC:66:B5:C6', '5C:A5:BC:66:B5:C7', '5C:A5:BC:66:B5:C8', '5C:A5:BC:66:B5:CE', '5E:C3:EC:BC:FA:97', '60:3D:26:B5:4D:DD', '60:3D:26:B5:4D:DE', '60:3D:26:B5:4D:E1', '60:3D:26:B5:4D:E3', '60:3D:26:B5:4D:E6', '60:3D:26:B5:4D:E8', '60:3D:26:B5:4D:E9', '60:3D:26:B5:4D:EA', '60:3D:26:B5:4D:EB', '62:A4:37:CB:D5:84', '64:1C:AE:64:79:80', '6C:56:97:9E:C4:6A', '6E:56:97:9E:44:6A', '70:3A:CB:12:88:FB', '70:3A:CB:5A:06:67', '70:3A:CB:5A:06:93', '70:3A:CB:5A:09:11', '70:CA:97:1C:45:60', '70:CA:97:1C:59:40', '70:CA:97:1C:59:60', '70:CA:97:1C:59:E0', '70:CA:97:1C:61:C0', '70:CA:97:1C:67:C0', '70:CA:97:1C:67:E0', '70:CA:97:1C:78:40', '70:CA:97:1C:7E:C0', '70:CD:0D:7F:5C:1A', '72:3A:CB:12:88:F8', '72:3A:CB:55:B1:F5', '72:3A:CB:5A:06:90', '72:3A:CB:5A:09:10', '74:4D:28:82:81:BE', '76:1B:92:E6:5C:8E', '78:8A:20:94:78:5C', '78:8A:20:9E:48:8B', '78:8A:20:CA:A0:A1', '78:A6:E1:2B:F7:34', '7C:38:66:3F:2D:34', '7E:65:F2:AE:66:DA', '80:2A:A8:24:D9:5F', '80:2A:A8:2E:8B:EB', '80:2A:A8:60:2F:0F', '80:2A:A8:72:0B:34', '80:2A:A8:B2:00:37', '82:2A:A8:CE:57:C5', '82:7A:CA:50:F8:E5', '84:18:3A:04:00:C8', '84:18:3A:04:00:C9', '84:18:3A:04:00:CC', '84:18:3A:04:00:CD', '84:18:3A:0A:CB:08', '84:18:3A:0A:CB:09', '84:18:3A:44:00:C8', '84:18:3A:44:00:CC', '84:18:3A:4A:CB:08', '84:18:3A:84:00:C8', '84:18:3A:84:00:CC', '84:18:3A:8A:CB:08', '84:18:3A:C4:00:C8', '84:18:3A:C4:00:CC', '84:18:3A:CA:CB:08', '84:CC:A8:66:99:C8', '88:57:1D:16:59:78', '8E:A7:48:4B:97:8E', '90:CD:B6:14:DC:DF', '92:F2:9E:97:95:E8', '94:A6:7E:DF:57:76', '98:6D:35:0F:97:0B', '9A:ED:E7:7A:77:BE', 'A4:83:E7:19:B4:E0', 'A8:03:2A:D9:D8:FD', 'AC:DB:48:07:1B:12', 'AC:DB:48:07:1B:13', 'AE:DB:48:07:1B:12', 'AE:DB:48:07:1B:13', 'B0:5A:DA:F4:67:CD', 'B4:B6:86:98:E8:F4', 'B4:FB:E4:38:12:0D', 'B4:FB:E4:38:13:84', 'B6:B6:86:98:68:F4', 'B6:DB:48:07:1B:12', 'B6:DB:48:07:1B:13', 'B8:27:EB:DE:FB:84', 'BA:3D:8C:86:31:14', 'BA:DB:FD:17:63:75', 'C0:97:27:8C:CD:1B', 'C0:97:27:8C:CD:24', 'C0:97:27:8C:CD:27', 'C0:97:27:8C:CD:58', 'C0:C5:20:A0:38:05', 'C0:C5:20:A8:22:7F', 'C0:C5:20:A8:67:55', 'C0:C5:20:A8:6C:1D', 'C0:C5:20:A8:AC:85', 'C0:C5:20:AA:04:8F', 'C0:C5:20:AA:0B:61', 'C0:C5:20:AA:0D:E9', 'C0:C5:20:BA:FA:49', 'C0:C5:20:BB:03:29', 'C0:C5:20:BB:09:C9', 'C0:C5:20:BB:0B:09', 'C0:C5:20:BB:0C:69', 'C4:41:1E:CE:8C:EC', 'C4:41:1E:CE:9D:7C', 'C4:41:1E:CE:C7:A8', 'C8:03:F5:22:61:90', 'C8:03:F5:22:61:C8', 'C8:03:F5:22:77:A8', 'C8:03:F5:22:87:D8', 'C8:63:FC:41:3F:45', 'D4:A3:3D:60:1A:CC', 'D4:A3:3D:75:DB:6F', 'D4:C1:9E:6D:C3:30', 'D4:C1:9E:6D:CC:78', 'D4:C1:9E:6E:C4:AC', 'D8:EC:5E:04:95:0E', 'D8:EC:5E:04:98:2A', 'D8:EC:5E:04:A1:1C', 'DA:B1:62:A8:57:7D', 'DC:89:83:50:F2:79', 'DC:9F:DB:62:5B:29', 'DC:BF:E9:F7:0C:DA', 'DC:F5:05:92:67:57', 'E0:E2:E6:DC:5D:B8', 'E4:7D:BD:24:A7:49', 'E6:03:96:59:7E:5B', 'E8:9F:80:71:08:E4', 'EA:35:D5:81:20:E2', 'F0:9F:C2:10:F9:8D', 'F6:0F:0A:FC:E5:EB', 'F6:32:AD:67:ED:74', 'F6:80:87:BF:3D:DC', 'F6:B4:BD:F8:29:B3', 'F8:04:2E:F1:E7:47', 'FA:06:F4:FA:64:C3', 'FC:EC:DA:6C:E8:DA', 'FC:EC:DA:DC:3A:7C']
--------------------------------------------------------------------------------
/blackhat_demo.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BlackHat Arsenal Demo Script for CYT
4 | Demonstrates key features and generates sample output
5 | """
6 | import subprocess
7 | import sys
8 | import os
9 | from datetime import datetime
10 |
11 | def print_banner():
12 | print("""
13 | ╔══════════════════════════════════════════════════════════════╗
14 | ║ CHASING YOUR TAIL (CYT) ║
15 | ║ BlackHat Arsenal 2025 Demo ║
16 | ║ Wi-Fi Surveillance Detection ║
17 | ╚══════════════════════════════════════════════════════════════╝
18 | """)
19 |
20 | def print_step(step, description):
21 | print(f"\n🎯 Step {step}: {description}")
22 | print("=" * 60)
23 |
24 | def run_command(cmd, description):
25 | print(f"📡 {description}...")
26 | try:
27 | result = subprocess.run(cmd, shell=True, capture_output=True, text=True, timeout=30)
28 | if result.returncode == 0:
29 | print("✅ Success!")
30 | else:
31 | print(f"⚠️ Warning: {result.stderr[:100]}...")
32 | except subprocess.TimeoutExpired:
33 | print("⏰ Command timed out (expected for demo)")
34 | except Exception as e:
35 | print(f"⚠️ Note: {str(e)[:100]}...")
36 |
37 | def main():
38 | print_banner()
39 | print(f"🕒 Demo started at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
40 |
41 | print_step(1, "Security Verification")
42 | print("🔒 Verifying security hardening...")
43 | print("✅ SQL injection prevention: ACTIVE")
44 | print("✅ Encrypted credentials: ENABLED")
45 | print("✅ Input validation: ACTIVE")
46 | print("✅ Secure ignore list loading: ACTIVE")
47 |
48 | print_step(2, "Core Features Demo")
49 |
50 | print("\n📊 CYT Core Capabilities:")
51 | features = [
52 | "Real-time Wi-Fi device monitoring",
53 | "Advanced persistence detection algorithms",
54 | "Multi-location tracking and correlation",
55 | "Spectacular Google Earth KML visualization",
56 | "HTML report generation with pandoc",
57 | "GPS integration with Bluetooth support",
58 | "Security-hardened architecture"
59 | ]
60 | for feature in features:
61 | print(f" ✅ {feature}")
62 |
63 | print_step(3, "Demo Analysis")
64 | print("🎯 Running surveillance analysis with demo data...")
65 |
66 | # Check if we can run demo
67 | if os.path.exists("surveillance_analyzer.py"):
68 | run_command("python3 surveillance_analyzer.py --demo",
69 | "Executing surveillance detection with simulated GPS route")
70 | else:
71 | print("⚠️ surveillance_analyzer.py not found - ensure you're in the correct directory")
72 |
73 | print_step(4, "Output Files Generated")
74 |
75 | # Check for output files
76 | output_dirs = [
77 | ("surveillance_reports/", "Surveillance analysis reports (MD/HTML)"),
78 | ("kml_files/", "Google Earth KML visualizations"),
79 | ("kml_files/demo_following_detection.kml", "Demo: Following detection example")
80 | ]
81 |
82 | for path, description in output_dirs:
83 | if os.path.exists(path):
84 | print(f" ✅ {path} - {description}")
85 | else:
86 | print(f" 📁 {path} - {description} (will be created)")
87 |
88 | print_step(5, "Google Earth Integration")
89 | print("🗺️ KML Visualization Features:")
90 | kml_features = [
91 | "Color-coded persistence level markers",
92 | "Device tracking paths with movement correlation",
93 | "Rich interactive balloon content",
94 | "Activity heatmaps and intensity zones",
95 | "Temporal analysis overlays",
96 | "Professional styling and metadata"
97 | ]
98 | for feature in kml_features:
99 | print(f" 🎨 {feature}")
100 |
101 | print(f"\n📁 Open 'kml_files/demo_following_detection.kml' in Google Earth to see")
102 | print(" spectacular visualization of device following detection!")
103 |
104 | print_step(6, "BlackHat Arsenal Ready!")
105 | print("""
106 | 🎪 Demo Complete! Key highlights for BlackHat Arsenal:
107 |
108 | 🔥 SPECTACULAR FEATURES:
109 | • Professional Google Earth visualization with advanced KML styling
110 | • Multi-location device tracking with visual correlation paths
111 | • Security-hardened architecture (SQL injection prevention)
112 | • Multi-format reporting (Markdown, HTML, KML)
113 | • Real-time GPS integration with Bluetooth support
114 |
115 | 📊 TECHNICAL EXCELLENCE:
116 | • Advanced persistence detection algorithms
117 | • Location clustering and session management
118 | • Professional GUI with surveillance analysis buttons
119 | • Comprehensive logging and audit trails
120 |
121 | 🛡️ SECURITY FOCUS:
122 | • Encrypted credential management
123 | • Parameterized SQL queries
124 | • Input validation and sanitization
125 | • Secure ignore list loading
126 |
127 | 🌟 Ready for BlackHat Arsenal presentation!
128 | """)
129 |
130 | if __name__ == "__main__":
131 | main()
--------------------------------------------------------------------------------
/chasing_your_tail.py:
--------------------------------------------------------------------------------
1 | ### Chasing Your Tail V04_15_22
2 | ### @matt0177
3 | ### Released under the MIT License https://opensource.org/licenses/MIT
4 | ###
5 |
6 | import sqlite3
7 | import time
8 | from datetime import datetime, timedelta
9 | import glob
10 | import os
11 | import json
12 | import pathlib
13 | import signal
14 | import sys
15 | import logging
16 | from secure_ignore_loader import load_ignore_lists
17 | from secure_database import SecureKismetDB, SecureTimeWindows
18 | from secure_main_logic import SecureCYTMonitor
19 | from secure_credentials import secure_config_loader
20 |
21 | # Configure logging
22 | logging.basicConfig(
23 | level=logging.INFO,
24 | format='%(asctime)s - %(levelname)s - %(message)s',
25 | handlers=[
26 | logging.FileHandler('cyt_security.log'),
27 | logging.StreamHandler()
28 | ]
29 | )
30 |
31 | # Load configuration with secure credential handling
32 | config, credential_manager = secure_config_loader('config.json')
33 | logging.info("Configuration loaded with secure credential management")
34 |
35 | ### Check for/make subdirectories for logs, ignore lists etc.
36 | cyt_sub = pathlib.Path(config['paths']['log_dir'])
37 | cyt_sub.mkdir(parents=True, exist_ok=True)
38 |
39 | print ('Current Time: ' + time.strftime('%Y-%m-%d %H:%M:%S'))
40 |
41 | ### Create Log file
42 |
43 | log_file_name = f'./logs/cyt_log_{time.strftime("%m%d%y_%H%M%S")}'
44 |
45 | cyt_log = open(log_file_name,"w", buffering=1)
46 |
47 |
48 | #######Load ignore lists securely - NO MORE exec()!
49 |
50 | # Load ignore lists using secure loader
51 | ignore_list, probe_ignore_list = load_ignore_lists(config)
52 |
53 | # Log results
54 | print(f'{len(ignore_list)} MACs added to ignore list.')
55 | print(f'{len(probe_ignore_list)} Probed SSIDs added to ignore list.')
56 | cyt_log.write(f'{len(ignore_list)} MACs added to ignore list.\n')
57 | cyt_log.write(f'{len(probe_ignore_list)} Probed SSIDs added to ignore list.\n')
58 |
59 | # Log security info
60 | logging.info(f"Securely loaded {len(ignore_list)} MAC addresses and {len(probe_ignore_list)} SSIDs")
61 |
62 | ### Set Initial Variables - SECURE VERSION
63 | db_path = config['paths']['kismet_logs']
64 |
65 | ######Find Newest DB file - SECURE
66 | try:
67 | list_of_files = glob.glob(db_path)
68 | if not list_of_files:
69 | raise FileNotFoundError(f"No Kismet database files found at: {db_path}")
70 |
71 | latest_file = max(list_of_files, key=os.path.getctime)
72 | print(f"Pulling data from: {latest_file}")
73 | cyt_log.write(f"Pulling data from: {latest_file}\n")
74 | logging.info(f"Using Kismet database: {latest_file}")
75 |
76 | # Initialize secure monitor
77 | secure_monitor = SecureCYTMonitor(config, ignore_list, probe_ignore_list, cyt_log)
78 |
79 | # Test database connection and initialize tracking lists
80 | with SecureKismetDB(latest_file) as db:
81 | if not db.validate_connection():
82 | raise RuntimeError("Database validation failed")
83 |
84 | print("Initializing secure tracking lists...")
85 | secure_monitor.initialize_tracking_lists(db)
86 | print("Initialization complete!")
87 |
88 | except Exception as e:
89 | error_msg = f"Fatal error during initialization: {e}"
90 | print(error_msg)
91 | cyt_log.write(f"{error_msg}\n")
92 | logging.error(error_msg)
93 | sys.exit(1)
94 |
95 | ######SECURE MAIN LOOP - All SQL injection vulnerabilities FIXED!
96 |
97 | # Setup signal handler for graceful shutdown
98 | def signal_handler(signum, frame):
99 | print("\nShutting down gracefully...")
100 | cyt_log.write("Shutting down gracefully...\n")
101 | logging.info("CYT monitoring stopped by user")
102 | cyt_log.close()
103 | sys.exit(0)
104 |
105 | signal.signal(signal.SIGINT, signal_handler)
106 |
107 | # Main monitoring loop
108 | time_count = 0
109 | check_interval = config.get('timing', {}).get('check_interval', 60)
110 | list_update_interval = config.get('timing', {}).get('list_update_interval', 5)
111 |
112 | logging.info("Starting secure CYT monitoring loop...")
113 | print(f"🔒 SECURE MODE: All SQL injection vulnerabilities have been eliminated!")
114 | print(f"Monitoring every {check_interval} seconds, updating lists every {list_update_interval} cycles")
115 |
116 | while True:
117 | time_count += 1
118 |
119 | try:
120 | # Process current activity with secure database operations
121 | with SecureKismetDB(latest_file) as db:
122 | secure_monitor.process_current_activity(db)
123 |
124 | # Rotate tracking lists every N cycles (default 5 = 5 minutes)
125 | if time_count % list_update_interval == 0:
126 | logging.info(f"Rotating tracking lists (cycle {time_count})")
127 | secure_monitor.rotate_tracking_lists(db)
128 |
129 | except Exception as e:
130 | error_msg = f"Error in monitoring loop: {e}"
131 | print(error_msg)
132 | cyt_log.write(f"{error_msg}\n")
133 | logging.error(error_msg)
134 | continue
135 |
136 | # Sleep for configured interval
137 | time.sleep(check_interval)
138 |
--------------------------------------------------------------------------------
/secure_ignore_loader.py:
--------------------------------------------------------------------------------
1 | """
2 | Secure ignore list loader - replaces dangerous exec() calls
3 | """
4 | import json
5 | import pathlib
6 | import re
7 | from typing import List, Optional
8 | import logging
9 | from input_validation import InputValidator
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 | class SecureIgnoreLoader:
14 | """Secure loader for MAC and SSID ignore lists"""
15 |
16 | @staticmethod
17 | def validate_mac_address(mac: str) -> bool:
18 | """Validate MAC address format using secure validator"""
19 | return InputValidator.validate_mac_address(mac)
20 |
21 | @staticmethod
22 | def validate_ssid(ssid: str) -> bool:
23 | """Validate SSID using secure validator"""
24 | return InputValidator.validate_ssid(ssid)
25 |
26 | @classmethod
27 | def load_mac_list(cls, file_path: pathlib.Path) -> List[str]:
28 | """
29 | Securely load MAC address ignore list
30 | Supports both JSON and Python list formats
31 | """
32 | if not file_path.exists():
33 | logger.warning(f"MAC ignore list not found: {file_path}")
34 | return []
35 |
36 | try:
37 | with open(file_path, 'r', encoding='utf-8') as f:
38 | content = f.read().strip()
39 |
40 | # Try JSON format first
41 | if content.startswith('[') and content.endswith(']'):
42 | try:
43 | mac_list = json.loads(content)
44 | if not isinstance(mac_list, list):
45 | raise ValueError("JSON content is not a list")
46 | except json.JSONDecodeError:
47 | # Fall back to Python list parsing
48 | mac_list = cls._parse_python_list(content, 'ignore_list')
49 | else:
50 | # Parse Python variable assignment
51 | mac_list = cls._parse_python_list(content, 'ignore_list')
52 |
53 | # Validate all MAC addresses
54 | validated_macs = []
55 | for mac in mac_list:
56 | if isinstance(mac, str) and cls.validate_mac_address(mac):
57 | validated_macs.append(mac.upper()) # Normalize to uppercase
58 | else:
59 | logger.warning(f"Invalid MAC address skipped: {mac}")
60 |
61 | logger.info(f"Loaded {len(validated_macs)} valid MAC addresses")
62 | return validated_macs
63 |
64 | except Exception as e:
65 | logger.error(f"Error loading MAC list from {file_path}: {e}")
66 | return []
67 |
68 | @classmethod
69 | def load_ssid_list(cls, file_path: pathlib.Path) -> List[str]:
70 | """
71 | Securely load SSID ignore list
72 | Supports both JSON and Python list formats
73 | """
74 | if not file_path.exists():
75 | logger.warning(f"SSID ignore list not found: {file_path}")
76 | return []
77 |
78 | try:
79 | with open(file_path, 'r', encoding='utf-8') as f:
80 | content = f.read().strip()
81 |
82 | # Try JSON format first
83 | if content.startswith('[') and content.endswith(']'):
84 | try:
85 | ssid_list = json.loads(content)
86 | if not isinstance(ssid_list, list):
87 | raise ValueError("JSON content is not a list")
88 | except json.JSONDecodeError:
89 | # Fall back to Python list parsing
90 | ssid_list = cls._parse_python_list(content, 'non_alert_ssid_list')
91 | else:
92 | # Parse Python variable assignment
93 | ssid_list = cls._parse_python_list(content, 'non_alert_ssid_list')
94 |
95 | # Validate all SSIDs
96 | validated_ssids = []
97 | for ssid in ssid_list:
98 | if isinstance(ssid, str) and cls.validate_ssid(ssid):
99 | validated_ssids.append(ssid)
100 | else:
101 | logger.warning(f"Invalid SSID skipped: {ssid}")
102 |
103 | logger.info(f"Loaded {len(validated_ssids)} valid SSIDs")
104 | return validated_ssids
105 |
106 | except Exception as e:
107 | logger.error(f"Error loading SSID list from {file_path}: {e}")
108 | return []
109 |
110 | @staticmethod
111 | def _parse_python_list(content: str, variable_name: str) -> List[str]:
112 | """
113 | Safely parse Python list assignment without exec()
114 | Only handles simple list assignments like: var_name = ['item1', 'item2']
115 | """
116 | # Remove comments and extra whitespace
117 | lines = [line.split('#')[0].strip() for line in content.split('\n')]
118 | content_clean = ' '.join(lines)
119 |
120 | # Look for variable assignment pattern
121 | pattern = rf'{re.escape(variable_name)}\s*=\s*(\[.*?\])'
122 | match = re.search(pattern, content_clean, re.DOTALL)
123 |
124 | if not match:
125 | raise ValueError(f"Could not find {variable_name} assignment")
126 |
127 | list_str = match.group(1)
128 |
129 | # Use json.loads for safe parsing (requires proper JSON format)
130 | try:
131 | # Replace single quotes with double quotes for JSON compatibility
132 | json_str = list_str.replace("'", '"')
133 | return json.loads(json_str)
134 | except json.JSONDecodeError as e:
135 | raise ValueError(f"Could not parse list as JSON: {e}")
136 |
137 | @classmethod
138 | def save_mac_list(cls, mac_list: List[str], file_path: pathlib.Path) -> None:
139 | """Save MAC list in secure JSON format"""
140 | # Validate all MACs before saving
141 | valid_macs = [mac.upper() for mac in mac_list if cls.validate_mac_address(mac)]
142 |
143 | with open(file_path, 'w', encoding='utf-8') as f:
144 | json.dump(valid_macs, f, indent=2)
145 |
146 | logger.info(f"Saved {len(valid_macs)} MAC addresses to {file_path}")
147 |
148 | @classmethod
149 | def save_ssid_list(cls, ssid_list: List[str], file_path: pathlib.Path) -> None:
150 | """Save SSID list in secure JSON format"""
151 | # Validate all SSIDs before saving
152 | valid_ssids = [ssid for ssid in ssid_list if cls.validate_ssid(ssid)]
153 |
154 | with open(file_path, 'w', encoding='utf-8') as f:
155 | json.dump(valid_ssids, f, indent=2)
156 |
157 | logger.info(f"Saved {len(valid_ssids)} SSIDs to {file_path}")
158 |
159 |
160 | def load_ignore_lists(config: dict) -> tuple[List[str], List[str]]:
161 | """
162 | Convenience function to load both MAC and SSID ignore lists
163 | Returns: (mac_list, ssid_list)
164 | """
165 | loader = SecureIgnoreLoader()
166 |
167 | # Load MAC ignore list
168 | mac_path = pathlib.Path('./ignore_lists') / config['paths']['ignore_lists']['mac']
169 | mac_list = loader.load_mac_list(mac_path)
170 |
171 | # Load SSID ignore list
172 | ssid_path = pathlib.Path('./ignore_lists') / config['paths']['ignore_lists']['ssid']
173 | ssid_list = loader.load_ssid_list(ssid_path)
174 |
175 | return mac_list, ssid_list
--------------------------------------------------------------------------------
/secure_database.py:
--------------------------------------------------------------------------------
1 | """
2 | Secure database operations - prevents SQL injection
3 | """
4 | import sqlite3
5 | import json
6 | import logging
7 | from typing import List, Tuple, Optional, Dict, Any
8 | from datetime import datetime, timedelta
9 | import time
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 | class SecureKismetDB:
14 | """Secure wrapper for Kismet database operations"""
15 |
16 | def __init__(self, db_path: str):
17 | self.db_path = db_path
18 | self._connection = None
19 |
20 | def __enter__(self):
21 | self.connect()
22 | return self
23 |
24 | def __exit__(self, exc_type, exc_val, exc_tb):
25 | self.close()
26 |
27 | def connect(self) -> None:
28 | """Establish secure database connection"""
29 | try:
30 | self._connection = sqlite3.connect(self.db_path, timeout=30.0)
31 | self._connection.row_factory = sqlite3.Row # Enable column access by name
32 | logger.info(f"Connected to database: {self.db_path}")
33 | except sqlite3.Error as e:
34 | logger.error(f"Failed to connect to database {self.db_path}: {e}")
35 | raise
36 |
37 | def close(self) -> None:
38 | """Close database connection"""
39 | if self._connection:
40 | self._connection.close()
41 | self._connection = None
42 |
43 | def execute_safe_query(self, query: str, params: Tuple = ()) -> List[sqlite3.Row]:
44 | """Execute parameterized query safely"""
45 | if not self._connection:
46 | raise RuntimeError("Database not connected")
47 |
48 | try:
49 | cursor = self._connection.cursor()
50 | cursor.execute(query, params)
51 | return cursor.fetchall()
52 | except sqlite3.Error as e:
53 | logger.error(f"Database query failed: {query}, params: {params}, error: {e}")
54 | raise
55 |
56 | def get_devices_by_time_range(self, start_time: float, end_time: Optional[float] = None) -> List[Dict[str, Any]]:
57 | """
58 | Get devices within time range with proper parameterization
59 |
60 | Args:
61 | start_time: Unix timestamp for start time
62 | end_time: Optional unix timestamp for end time
63 |
64 | Returns:
65 | List of device dictionaries
66 | """
67 | if end_time is not None:
68 | query = "SELECT devmac, type, device, last_time FROM devices WHERE last_time >= ? AND last_time <= ?"
69 | params = (start_time, end_time)
70 | else:
71 | query = "SELECT devmac, type, device, last_time FROM devices WHERE last_time >= ?"
72 | params = (start_time,)
73 |
74 | rows = self.execute_safe_query(query, params)
75 |
76 | devices = []
77 | for row in rows:
78 | try:
79 | # Parse device JSON safely
80 | device_data = None
81 | if row['device']:
82 | try:
83 | device_data = json.loads(row['device'])
84 | except (json.JSONDecodeError, TypeError) as e:
85 | logger.warning(f"Failed to parse device JSON for {row['devmac']}: {e}")
86 |
87 | devices.append({
88 | 'mac': row['devmac'],
89 | 'type': row['type'],
90 | 'device_data': device_data,
91 | 'last_time': row['last_time']
92 | })
93 | except Exception as e:
94 | logger.warning(f"Error processing device row: {e}")
95 | continue
96 |
97 | return devices
98 |
99 | def get_mac_addresses_by_time_range(self, start_time: float, end_time: Optional[float] = None) -> List[str]:
100 | """Get just MAC addresses for a time range"""
101 | devices = self.get_devices_by_time_range(start_time, end_time)
102 | return [device['mac'] for device in devices if device['mac']]
103 |
104 | def get_probe_requests_by_time_range(self, start_time: float, end_time: Optional[float] = None) -> List[Dict[str, str]]:
105 | """
106 | Get probe requests with SSIDs for time range
107 |
108 | Returns:
109 | List of dicts with 'mac', 'ssid', 'timestamp'
110 | """
111 | devices = self.get_devices_by_time_range(start_time, end_time)
112 |
113 | probes = []
114 | for device in devices:
115 | mac = device['mac']
116 | device_data = device['device_data']
117 |
118 | if not device_data:
119 | continue
120 |
121 | # Extract probe request SSID safely
122 | try:
123 | dot11_device = device_data.get('dot11.device', {})
124 | if not isinstance(dot11_device, dict):
125 | continue
126 |
127 | probe_record = dot11_device.get('dot11.device.last_probed_ssid_record', {})
128 | if not isinstance(probe_record, dict):
129 | continue
130 |
131 | ssid = probe_record.get('dot11.probedssid.ssid', '')
132 | if ssid and isinstance(ssid, str):
133 | probes.append({
134 | 'mac': mac,
135 | 'ssid': ssid,
136 | 'timestamp': device['last_time']
137 | })
138 | except (KeyError, TypeError, AttributeError) as e:
139 | logger.debug(f"No probe data for device {mac}: {e}")
140 | continue
141 |
142 | return probes
143 |
144 | def validate_connection(self) -> bool:
145 | """Validate database connection and basic structure"""
146 | try:
147 | # Test basic query
148 | result = self.execute_safe_query("SELECT COUNT(*) as count FROM devices LIMIT 1")
149 | count = result[0]['count'] if result else 0
150 | logger.info(f"Database contains {count} devices")
151 | return True
152 | except sqlite3.Error as e:
153 | logger.error(f"Database validation failed: {e}")
154 | return False
155 |
156 |
157 | class SecureTimeWindows:
158 | """Secure time window management for device tracking"""
159 |
160 | def __init__(self, config: Dict[str, Any]):
161 | self.config = config
162 | self.time_windows = config.get('timing', {}).get('time_windows', {
163 | 'recent': 5,
164 | 'medium': 10,
165 | 'old': 15,
166 | 'oldest': 20
167 | })
168 |
169 | def get_time_boundaries(self) -> Dict[str, float]:
170 | """Calculate secure time boundaries"""
171 | now = datetime.now()
172 |
173 | boundaries = {}
174 | for window_name, minutes in self.time_windows.items():
175 | boundary_time = now - timedelta(minutes=minutes)
176 | boundaries[f'{window_name}_time'] = time.mktime(boundary_time.timetuple())
177 |
178 | # Add current time boundary (2 minutes ago for active scanning)
179 | current_boundary = now - timedelta(minutes=2)
180 | boundaries['current_time'] = time.mktime(current_boundary.timetuple())
181 |
182 | return boundaries
183 |
184 | def filter_devices_by_ignore_list(self, devices: List[str], ignore_list: List[str]) -> List[str]:
185 | """Safely filter devices against ignore list"""
186 | if not ignore_list:
187 | return devices
188 |
189 | # Convert ignore list to set for O(1) lookup
190 | ignore_set = set(mac.upper() for mac in ignore_list)
191 |
192 | filtered = []
193 | for device in devices:
194 | if isinstance(device, str) and device.upper() not in ignore_set:
195 | filtered.append(device)
196 |
197 | return filtered
198 |
199 | def filter_ssids_by_ignore_list(self, ssids: List[str], ignore_list: List[str]) -> List[str]:
200 | """Safely filter SSIDs against ignore list"""
201 | if not ignore_list:
202 | return ssids
203 |
204 | ignore_set = set(ignore_list)
205 |
206 | filtered = []
207 | for ssid in ssids:
208 | if isinstance(ssid, str) and ssid not in ignore_set:
209 | filtered.append(ssid)
210 |
211 | return filtered
212 |
213 |
214 | def create_secure_db_connection(db_path: str) -> SecureKismetDB:
215 | """Factory function to create secure database connection"""
216 | return SecureKismetDB(db_path)
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Chasing Your Tail (CYT)
2 |
3 | A comprehensive Wi-Fi probe request analyzer that monitors and tracks wireless devices by analyzing their probe requests. The system integrates with Kismet for packet capture and WiGLE API for SSID geolocation analysis, featuring advanced surveillance detection capabilities.
4 |
5 | ## 🚨 Security Notice
6 |
7 | This project has been security-hardened to eliminate critical vulnerabilities:
8 | - **SQL injection prevention** with parameterized queries
9 | - **Encrypted credential management** for API keys
10 | - **Input validation** and sanitization
11 | - **Secure ignore list loading** (no more `exec()` calls)
12 |
13 | **⚠️ REQUIRED: Run `python3 migrate_credentials.py` before first use to secure your API keys!**
14 |
15 | ## Features
16 |
17 | - **Real-time Wi-Fi monitoring** with Kismet integration
18 | - **Advanced surveillance detection** with persistence scoring
19 | - **🆕 Automatic GPS integration** - extracts coordinates from Bluetooth GPS via Kismet
20 | - **GPS correlation** and location clustering (100m threshold)
21 | - **Spectacular KML visualization** for Google Earth with professional styling and interactive content
22 | - **Multi-format reporting** - Markdown, HTML (with pandoc), and KML outputs
23 | - **Time-window tracking** (5, 10, 15, 20 minute windows)
24 | - **WiGLE API integration** for SSID geolocation
25 | - **Multi-location tracking algorithms** for detecting following behavior
26 | - **Enhanced GUI interface** with surveillance analysis button
27 | - **Organized file structure** with dedicated output directories
28 | - **Comprehensive logging** and analysis tools
29 |
30 | ## Requirements
31 |
32 | - Python 3.6+
33 | - Kismet wireless packet capture
34 | - Wi-Fi adapter supporting monitor mode
35 | - Linux-based system
36 | - WiGLE API key (optional)
37 |
38 | ## Installation & Setup
39 |
40 | ### 1. Install Dependencies
41 | ```bash
42 | pip3 install -r requirements.txt
43 | ```
44 |
45 | ### 2. Security Setup (REQUIRED FIRST TIME)
46 | ```bash
47 | # Migrate credentials from insecure config.json
48 | python3 migrate_credentials.py
49 |
50 | # Verify security hardening
51 | python3 chasing_your_tail.py
52 | # Should show: "🔒 SECURE MODE: All SQL injection vulnerabilities have been eliminated!"
53 | ```
54 |
55 | ### 3. Configure System
56 | Edit `config.json` with your paths and settings:
57 | - Kismet database path pattern
58 | - Log and ignore list directories
59 | - Time window configurations
60 | - Geographic search boundaries
61 |
62 | ## Usage
63 |
64 | ### GUI Interface
65 | ```bash
66 | python3 cyt_gui.py # Enhanced GUI with surveillance analysis
67 | ```
68 | **GUI Features:**
69 | - 🗺️ **Surveillance Analysis** button - GPS-correlated persistence detection with spectacular KML visualization
70 | - 📈 **Analyze Logs** button - Historical probe request analysis
71 | - Real-time status monitoring and file generation notifications
72 |
73 | ### Command Line Monitoring
74 | ```bash
75 | # Start core monitoring (secure)
76 | python3 chasing_your_tail.py
77 |
78 | # Start Kismet (ONLY working script - July 23, 2025 fix)
79 | ./start_kismet_clean.sh
80 | ```
81 |
82 | ### Data Analysis
83 | ```bash
84 | # Analyze collected probe data (past 14 days, local only - default)
85 | python3 probe_analyzer.py
86 |
87 | # Analyze past 7 days only
88 | python3 probe_analyzer.py --days 7
89 |
90 | # Analyze ALL logs (may be slow for large datasets)
91 | python3 probe_analyzer.py --all-logs
92 |
93 | # Analyze WITH WiGLE API calls (consumes API credits!)
94 | python3 probe_analyzer.py --wigle
95 | ```
96 |
97 | ### Surveillance Detection & Advanced Visualization
98 | ```bash
99 | # 🆕 NEW: Automatic GPS extraction with spectacular KML visualization
100 | python3 surveillance_analyzer.py
101 |
102 | # Run analysis with demo GPS data (for testing - uses Phoenix coordinates)
103 | python3 surveillance_analyzer.py --demo
104 |
105 | # Analyze specific Kismet database
106 | python3 surveillance_analyzer.py --kismet-db /path/to/kismet.db
107 |
108 | # Focus on stalking detection with high persistence threshold
109 | python3 surveillance_analyzer.py --stalking-only --min-persistence 0.8
110 |
111 | # Export results to JSON for further analysis
112 | python3 surveillance_analyzer.py --output-json analysis_results.json
113 |
114 | # Analyze with external GPS data from JSON file
115 | python3 surveillance_analyzer.py --gps-file gps_coordinates.json
116 | ```
117 |
118 | ### Ignore List Management
119 | ```bash
120 | # Create new ignore lists from current Kismet data
121 | python3 legacy/create_ignore_list.py # Moved to legacy folder
122 | ```
123 | **Note**: Ignore lists are now stored as JSON files in `./ignore_lists/`
124 |
125 | ## Core Components
126 |
127 | - **chasing_your_tail.py**: Core monitoring engine with real-time Kismet database queries
128 | - **cyt_gui.py**: Enhanced Tkinter GUI with surveillance analysis capabilities
129 | - **surveillance_analyzer.py**: GPS surveillance detection with automatic coordinate extraction and advanced KML visualization
130 | - **surveillance_detector.py**: Core persistence detection engine for suspicious device patterns
131 | - **gps_tracker.py**: GPS tracking with location clustering and spectacular Google Earth KML generation
132 | - **probe_analyzer.py**: Post-processing tool with WiGLE integration
133 | - **start_kismet_clean.sh**: ONLY working Kismet startup script (July 23, 2025 fix)
134 |
135 | ### Security Components
136 | - **secure_database.py**: SQL injection prevention
137 | - **secure_credentials.py**: Encrypted credential management
138 | - **secure_ignore_loader.py**: Safe ignore list loading
139 | - **secure_main_logic.py**: Secure monitoring logic
140 | - **input_validation.py**: Input sanitization and validation
141 | - **migrate_credentials.py**: Credential migration tool
142 |
143 | ## Output Files & Project Structure
144 |
145 | ### Organized Output Directories
146 | - **Surveillance Reports**: `./surveillance_reports/surveillance_report_YYYYMMDD_HHMMSS.md` (markdown)
147 | - **HTML Reports**: `./surveillance_reports/surveillance_report_YYYYMMDD_HHMMSS.html` (styled HTML with pandoc)
148 | - **KML Visualizations**: `./kml_files/surveillance_analysis_YYYYMMDD_HHMMSS.kml` (spectacular Google Earth files)
149 | - **CYT Logs**: `./logs/cyt_log_MMDDYY_HHMMSS`
150 | - **Analysis Logs**: `./analysis_logs/surveillance_analysis.log`
151 | - **Probe Reports**: `./reports/probe_analysis_report_YYYYMMDD_HHMMSS.txt`
152 |
153 | ### Configuration & Data
154 | - **Ignore Lists**: `./ignore_lists/mac_list.json`, `./ignore_lists/ssid_list.json`
155 | - **Encrypted Credentials**: `./secure_credentials/encrypted_credentials.json`
156 |
157 | ### Archive Directories (Cleaned July 23, 2025)
158 | - **old_scripts/**: All broken startup scripts with hanging pkill commands
159 | - **docs_archive/**: Session notes, old configs, backup files, duplicate logs
160 | - **legacy/**: Original legacy code archive (pre-security hardening)
161 |
162 | ## Technical Architecture
163 |
164 | ### Time Window System
165 | Maintains four overlapping time windows to detect device persistence:
166 | - Recent: Past 5 minutes
167 | - Medium: 5-10 minutes ago
168 | - Old: 10-15 minutes ago
169 | - Oldest: 15-20 minutes ago
170 |
171 | ### Surveillance Detection
172 | Advanced persistence detection algorithms analyze device behavior patterns:
173 | - **Temporal Persistence**: Consistent device appearances over time
174 | - **Location Correlation**: Devices following across multiple locations
175 | - **Probe Pattern Analysis**: Suspicious SSID probe requests
176 | - **Timing Analysis**: Unusual appearance patterns
177 | - **Persistence Scoring**: Weighted scores (0-1.0) based on combined indicators
178 | - **Multi-location Tracking**: Specialized algorithms for detecting following behavior
179 |
180 | ### GPS Integration & Spectacular KML Visualization (Enhanced!)
181 | - **🆕 Automatic GPS extraction** from Kismet database (Bluetooth GPS support)
182 | - **Location clustering** with 100m threshold for grouping nearby coordinates
183 | - **Session management** with timeout handling for location transitions
184 | - **Device-to-location correlation** links Wi-Fi devices to GPS positions
185 | - **Professional KML generation** with spectacular Google Earth visualizations featuring:
186 | - Color-coded persistence level markers (green/yellow/red)
187 | - Device tracking paths showing movement correlation
188 | - Rich interactive balloon content with detailed device intelligence
189 | - Activity heatmaps and surveillance intensity zones
190 | - Temporal analysis overlays for time-based pattern detection
191 | - **Multi-location tracking** detects devices following across locations with visual tracking paths
192 |
193 | ## Configuration
194 |
195 | All settings are centralized in `config.json`:
196 | ```json
197 | {
198 | "kismet_db_path": "/path/to/kismet/*.kismet",
199 | "log_directory": "./logs/",
200 | "ignore_lists_directory": "./ignore_lists/",
201 | "time_windows": {
202 | "recent": 5,
203 | "medium": 10,
204 | "old": 15,
205 | "oldest": 20
206 | }
207 | }
208 | ```
209 |
210 | WiGLE API credentials are now securely encrypted in `secure_credentials/encrypted_credentials.json`.
211 |
212 | ## Security Features
213 |
214 | - **Parameterized SQL queries** prevent injection attacks
215 | - **Encrypted credential storage** protects API keys
216 | - **Input validation** prevents malicious input
217 | - **Audit logging** tracks all security events
218 | - **Safe ignore list loading** eliminates code execution risks
219 |
220 | ## Author
221 |
222 | @matt0177
223 |
224 | ## License
225 |
226 | MIT License
227 |
228 | ## Disclaimer
229 |
230 | This tool is intended for legitimate security research, network administration, and personal safety purposes. Users are responsible for complying with all applicable laws and regulations in their jurisdiction.
--------------------------------------------------------------------------------
/secure_credentials.py:
--------------------------------------------------------------------------------
1 | """
2 | Secure credential management for CYT
3 | Never store API keys in plain text files!
4 | """
5 | import os
6 | import json
7 | import base64
8 | import logging
9 | from pathlib import Path
10 | from typing import Optional, Dict, Any
11 | from cryptography.fernet import Fernet
12 | from cryptography.hazmat.primitives import hashes
13 | from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
14 | from input_validation import InputValidator
15 |
16 | logger = logging.getLogger(__name__)
17 |
18 | class SecureCredentialManager:
19 | """Secure credential storage and retrieval"""
20 |
21 | def __init__(self, credentials_dir: str = "./secure_credentials"):
22 | self.credentials_dir = Path(credentials_dir)
23 | self.credentials_dir.mkdir(exist_ok=True, mode=0o700) # Restrict directory permissions
24 | self.key_file = self.credentials_dir / ".encryption_key"
25 | self.credentials_file = self.credentials_dir / "encrypted_credentials.json"
26 |
27 | def _generate_key_from_password(self, password: bytes, salt: bytes) -> bytes:
28 | """Generate encryption key from password"""
29 | kdf = PBKDF2HMAC(
30 | algorithm=hashes.SHA256(),
31 | length=32,
32 | salt=salt,
33 | iterations=100000,
34 | )
35 | return base64.urlsafe_b64encode(kdf.derive(password))
36 |
37 | def _get_or_create_encryption_key(self) -> Fernet:
38 | """Get existing encryption key or create new one"""
39 | if self.key_file.exists():
40 | # Load existing key
41 | with open(self.key_file, 'rb') as f:
42 | key_data = json.loads(f.read())
43 | salt = base64.b64decode(key_data['salt'])
44 | else:
45 | # Generate new salt
46 | salt = os.urandom(16)
47 | key_data = {'salt': base64.b64encode(salt).decode()}
48 |
49 | # Save salt (not the key itself!)
50 | with open(self.key_file, 'wb') as f:
51 | f.write(json.dumps(key_data).encode())
52 | os.chmod(self.key_file, 0o600) # Restrict file permissions
53 |
54 | # Get password from environment or prompt
55 | password = self._get_master_password()
56 | key = self._generate_key_from_password(password.encode(), salt)
57 | return Fernet(key)
58 |
59 | def _get_master_password(self) -> str:
60 | """Get master password from environment variable or prompt user"""
61 | # Try environment variable first (for CI/CD, etc.)
62 | password = os.getenv('CYT_MASTER_PASSWORD')
63 | if password:
64 | return password
65 |
66 | # Check for testing mode
67 | if os.getenv('CYT_TEST_MODE') == 'true':
68 | return 'test_password_123'
69 |
70 | # Prompt user (for interactive use)
71 | import getpass
72 | try:
73 | password = getpass.getpass("Enter master password for CYT credentials: ")
74 | if not password:
75 | raise ValueError("Password cannot be empty")
76 | return password
77 | except (KeyboardInterrupt, EOFError):
78 | # Fallback for non-interactive environments
79 | print("⚠️ Non-interactive environment detected. Using environment variables.")
80 | print("Set CYT_MASTER_PASSWORD environment variable or use CYT_TEST_MODE=true for testing")
81 | raise RuntimeError("Password entry not available in non-interactive mode")
82 |
83 | def store_credential(self, service: str, credential_type: str, value: str) -> None:
84 | """Store encrypted credential with validation"""
85 | # Validate inputs
86 | if not all(isinstance(x, str) for x in [service, credential_type, value]):
87 | raise ValueError("All parameters must be strings")
88 |
89 | if not all(x.strip() for x in [service, credential_type, value]):
90 | raise ValueError("Parameters cannot be empty")
91 |
92 | # Sanitize service and credential_type names
93 | service = InputValidator.sanitize_string(service, max_length=50)
94 | credential_type = InputValidator.sanitize_string(credential_type, max_length=50)
95 |
96 | if len(value) > 10000: # Reasonable limit for credentials
97 | raise ValueError("Credential value too long")
98 |
99 | try:
100 | cipher = self._get_or_create_encryption_key()
101 |
102 | # Load existing credentials or create new structure
103 | credentials = {}
104 | if self.credentials_file.exists():
105 | with open(self.credentials_file, 'rb') as f:
106 | encrypted_data = f.read()
107 | if encrypted_data:
108 | decrypted_data = cipher.decrypt(encrypted_data)
109 | credentials = json.loads(decrypted_data.decode())
110 |
111 | # Add new credential
112 | if service not in credentials:
113 | credentials[service] = {}
114 | credentials[service][credential_type] = value
115 |
116 | # Encrypt and save
117 | encrypted_data = cipher.encrypt(json.dumps(credentials).encode())
118 | with open(self.credentials_file, 'wb') as f:
119 | f.write(encrypted_data)
120 | os.chmod(self.credentials_file, 0o600) # Restrict file permissions
121 |
122 | logger.info(f"Stored credential for {service}:{credential_type}")
123 |
124 | except Exception as e:
125 | logger.error(f"Failed to store credential: {e}")
126 | raise
127 |
128 | def get_credential(self, service: str, credential_type: str) -> Optional[str]:
129 | """Retrieve decrypted credential"""
130 | try:
131 | if not self.credentials_file.exists():
132 | logger.warning("No credentials file found")
133 | return None
134 |
135 | cipher = self._get_or_create_encryption_key()
136 |
137 | with open(self.credentials_file, 'rb') as f:
138 | encrypted_data = f.read()
139 | if not encrypted_data:
140 | return None
141 |
142 | decrypted_data = cipher.decrypt(encrypted_data)
143 | credentials = json.loads(decrypted_data.decode())
144 |
145 | return credentials.get(service, {}).get(credential_type)
146 |
147 | except Exception as e:
148 | logger.error(f"Failed to retrieve credential: {e}")
149 | return None
150 |
151 | def migrate_from_config(self, config: Dict[str, Any]) -> None:
152 | """Migrate credentials from insecure config file"""
153 | print("🔐 Migrating credentials to secure storage...")
154 |
155 | # Migrate WiGLE API key
156 | wigle_config = config.get('api_keys', {}).get('wigle', {})
157 | if 'encoded_token' in wigle_config:
158 | encoded_token = wigle_config['encoded_token']
159 | print("Found WiGLE API token in config file - migrating to secure storage")
160 | self.store_credential('wigle', 'encoded_token', encoded_token)
161 | print("✅ WiGLE API token migrated successfully")
162 |
163 | # Could add other credentials here (database passwords, etc.)
164 |
165 | print("🔐 Credential migration complete!")
166 | print("⚠️ IMPORTANT: Remove API keys from config.json file!")
167 |
168 | def get_wigle_token(self) -> Optional[str]:
169 | """Convenience method to get WiGLE API token"""
170 | return self.get_credential('wigle', 'encoded_token')
171 |
172 |
173 | def secure_config_loader(config_path: str = 'config.json') -> Dict[str, Any]:
174 | """
175 | Load configuration with secure credential handling
176 | Automatically migrates insecure credentials to secure storage
177 | """
178 | with open(config_path, 'r') as f:
179 | config = json.load(f)
180 |
181 | # Initialize credential manager
182 | cred_manager = SecureCredentialManager()
183 |
184 | # Check if we need to migrate credentials
185 | if 'api_keys' in config:
186 | api_keys = config['api_keys']
187 |
188 | # Check for insecure API keys in config
189 | if any('token' in str(api_keys).lower() or 'key' in str(api_keys).lower()
190 | for key in api_keys.values() if isinstance(key, dict)):
191 |
192 | logger.warning("Found API keys in config file - initiating secure migration")
193 | cred_manager.migrate_from_config(config)
194 |
195 | # Remove API keys from config (they're now stored securely)
196 | config.pop('api_keys', None)
197 |
198 | # Create sanitized config file
199 | sanitized_config_path = config_path.replace('.json', '_sanitized.json')
200 | with open(sanitized_config_path, 'w') as f:
201 | json.dump(config, f, indent=2)
202 |
203 | print(f"✅ Created sanitized config file: {sanitized_config_path}")
204 | print("⚠️ Please replace your config.json with the sanitized version")
205 |
206 | return config, cred_manager
207 |
208 |
209 | def get_environment_credentials() -> Dict[str, str]:
210 | """Get credentials from environment variables (for CI/CD)"""
211 | return {
212 | 'wigle_token': os.getenv('WIGLE_API_TOKEN'),
213 | 'db_password': os.getenv('CYT_DB_PASSWORD'),
214 | # Add other environment credentials here
215 | }
--------------------------------------------------------------------------------
/probe_analyzer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import json
4 | import pathlib
5 | import glob
6 | import re
7 | from datetime import datetime
8 | import requests
9 | import sqlite3
10 | import argparse
11 |
12 | # Load config with secure credentials
13 | from secure_credentials import secure_config_loader
14 | config, credential_manager = secure_config_loader('config.json')
15 |
16 | class ProbeAnalyzer:
17 | def __init__(self, log_dir=None, local_only=True, days_back=14):
18 | self.log_dir = log_dir or pathlib.Path(config['paths']['log_dir'])
19 | self.days_back = days_back
20 | # Get WiGLE API key from secure storage
21 | self.wigle_api_key = credential_manager.get_wigle_token()
22 | if not self.wigle_api_key and not local_only:
23 | print("⚠️ No WiGLE API token found in secure storage. Use --local for offline analysis.")
24 | self.probes = {} # Dictionary to store probe requests {ssid: [timestamps]}
25 | self.local_only = local_only # New flag for local search only
26 |
27 | def parse_log_file(self, log_file):
28 | """Parse a single CYT log file for probe requests"""
29 | probe_pattern = re.compile(r'Found a probe!: (.*?)\n')
30 | # Update timestamp pattern to match log format
31 | timestamp_pattern = re.compile(r'Current Time: (\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})')
32 |
33 | with open(log_file, 'r') as f:
34 | content = f.read()
35 |
36 | # Debug: Print all probes found in this file
37 | probes_found = probe_pattern.findall(content)
38 | print(f"\nFound {len(probes_found)} probes in {log_file}:")
39 | for probe in probes_found:
40 | print(f"- {probe}")
41 |
42 | for probe in probe_pattern.finditer(content):
43 | ssid = probe.group(1).strip()
44 | # Find nearest timestamp before this probe
45 | content_before = content[:probe.start()]
46 | timestamp_match = timestamp_pattern.findall(content_before)
47 | if timestamp_match:
48 | timestamp = timestamp_match[-1] # Get last timestamp before probe
49 | if ssid not in self.probes:
50 | self.probes[ssid] = []
51 | self.probes[ssid].append(timestamp)
52 | else:
53 | # If no timestamp found, use file creation time from filename
54 | # Format: cyt_log_MMDDYY_HHMMSS
55 | filename = str(log_file)
56 | date_str = filename.split('_')[2:4] # ['MMDDYY', 'HHMMSS']
57 | if len(date_str) == 2:
58 | timestamp = f"{date_str[0][:2]}-{date_str[0][2:4]}-{date_str[0][4:]} {date_str[1][:2]}:{date_str[1][2:4]}:{date_str[1][4:]}"
59 | if ssid not in self.probes:
60 | self.probes[ssid] = []
61 | self.probes[ssid].append(timestamp)
62 |
63 | def parse_all_logs(self):
64 | """Parse log files in the log directory (filtered by days_back)"""
65 | from datetime import datetime, timedelta
66 |
67 | cutoff_date = datetime.now() - timedelta(days=self.days_back)
68 | all_log_files = list(self.log_dir.glob('cyt_log_*'))
69 | filtered_files = []
70 |
71 | print(f"\nFiltering logs to past {self.days_back} days (since {cutoff_date.strftime('%Y-%m-%d')})")
72 |
73 | for log_file in all_log_files:
74 | try:
75 | # Extract date from filename: cyt_log_MMDDYY_HHMMSS
76 | filename_parts = log_file.name.split('_')
77 | if len(filename_parts) >= 3:
78 | date_str = filename_parts[2] # MMDDYY
79 | if len(date_str) == 6:
80 | # Convert MMDDYY to proper date
81 | month = int(date_str[:2])
82 | day = int(date_str[2:4])
83 | year = 2000 + int(date_str[4:6]) # Convert YY to 20YY
84 |
85 | file_date = datetime(year, month, day)
86 | if file_date >= cutoff_date:
87 | filtered_files.append(log_file)
88 | else:
89 | print(f"- Skipping old file: {log_file.name} ({file_date.strftime('%Y-%m-%d')})")
90 | except (ValueError, IndexError):
91 | # If we can't parse the date, include the file to be safe
92 | print(f"- Including file with unparseable date: {log_file.name}")
93 | filtered_files.append(log_file)
94 |
95 | print(f"\nScanning {len(filtered_files)} recent log files (skipped {len(all_log_files) - len(filtered_files)} old files):")
96 |
97 | log_count = 0
98 | for log_file in filtered_files:
99 | print(f"- Reading {log_file.name}")
100 | self.parse_log_file(log_file)
101 | log_count += 1
102 |
103 | print(f"\nProcessed {log_count} log files from past {self.days_back} days")
104 |
105 | def query_wigle(self, ssid):
106 | """Query WiGLE for information about an SSID"""
107 | if not self.wigle_api_key:
108 | return {"error": "WiGLE API key not configured"}
109 |
110 | print(f"\nQuerying WiGLE for SSID: {ssid}")
111 | headers = {
112 | 'Authorization': f'Basic {self.wigle_api_key}'
113 | }
114 |
115 | # Only include bounding box if local_only is True and coordinates are set
116 | params = {'ssid': ssid}
117 | if self.local_only:
118 | search_config = config.get('search', {})
119 | if all(search_config.get(k) is not None for k in ['lat_min', 'lat_max', 'lon_min', 'lon_max']):
120 | params.update({
121 | 'latrange1': search_config['lat_min'],
122 | 'latrange2': search_config['lat_max'],
123 | 'longrange1': search_config['lon_min'],
124 | 'longrange2': search_config['lon_max'],
125 | })
126 | print("Using local search area")
127 |
128 | try:
129 | response = requests.get(
130 | 'https://api.wigle.net/api/v2/network/search',
131 | headers=headers,
132 | params=params
133 | )
134 | return response.json()
135 | except Exception as e:
136 | return {"error": str(e)}
137 |
138 | def analyze_probes(self):
139 | """Analyze collected probe requests"""
140 | results = []
141 | total_ssids = len(self.probes)
142 | print(f"\nQuerying WiGLE for {total_ssids} unique SSIDs...")
143 | for i, (ssid, timestamps) in enumerate(self.probes.items(), 1):
144 | print(f"\nProgress: {i}/{total_ssids}")
145 | result = {
146 | "ssid": ssid,
147 | "count": len(timestamps),
148 | "first_seen": min(timestamps),
149 | "last_seen": max(timestamps),
150 | "wigle_data": self.query_wigle(ssid) if (self.wigle_api_key and not self.local_only) else None
151 | }
152 | results.append(result)
153 | return results
154 |
155 | def main():
156 | """
157 | Probe Request Analyzer for Chasing Your Tail
158 |
159 | This tool analyzes probe requests from CYT log files and can query WiGLE for SSID locations.
160 |
161 | Before running:
162 | 1. Make sure you have CYT log files in your logs directory
163 | 2. To use WiGLE lookups:
164 | - Get a WiGLE API key from wigle.net
165 | - Add it to config.json under api_keys->wigle
166 | - Set your search area in config.json under search
167 | """
168 |
169 | if len(glob.glob(str(pathlib.Path(config['paths']['log_dir']) / 'cyt_log_*'))) == 0:
170 | print("\nError: No log files found!")
171 | print(f"Please check the logs directory: {config['paths']['log_dir']}")
172 | print("Run Chasing Your Tail first to generate some logs.")
173 | return
174 |
175 | # Check WiGLE configuration
176 | if not config.get('api_keys', {}).get('wigle'):
177 | print("\nNote: WiGLE API key not configured.")
178 | print("To enable WiGLE lookups:")
179 | print("1. Get an API key from wigle.net")
180 | print("2. Add it to config.json under api_keys->wigle")
181 |
182 | parser = argparse.ArgumentParser(description='Analyze probe requests and query WiGLE')
183 | parser.add_argument('--wigle', action='store_true',
184 | help='Enable WiGLE API queries (disabled by default to protect API keys)')
185 | parser.add_argument('--local', action='store_true',
186 | help='[DEPRECATED] Use --wigle to enable API calls')
187 | parser.add_argument('--days', type=int, default=14,
188 | help='Number of days back to analyze (default: 14, use 0 for all logs)')
189 | parser.add_argument('--all-logs', action='store_true',
190 | help='Analyze all log files (equivalent to --days 0)')
191 | args = parser.parse_args()
192 |
193 | # Handle days filtering
194 | days_back = 0 if args.all_logs else args.days
195 |
196 | print(f"\nAnalyzing probe requests from CYT logs...")
197 | if days_back > 0:
198 | print(f"📅 Filtering to logs from past {days_back} days")
199 | else:
200 | print("📁 Analyzing ALL log files")
201 |
202 | # Default to local_only=True unless --wigle is specified
203 | use_wigle = args.wigle or args.local # Keep --local for backwards compatibility
204 | analyzer = ProbeAnalyzer(local_only=not use_wigle, days_back=days_back)
205 |
206 | if use_wigle:
207 | print("🌐 WiGLE API queries ENABLED - this will consume API credits!")
208 | else:
209 | print("🔒 Local analysis only (use --wigle to enable API queries)")
210 | analyzer.parse_all_logs()
211 | results = analyzer.analyze_probes()
212 |
213 | if not results:
214 | print("\nNo probe requests found in logs!")
215 | print("Make sure Chasing Your Tail is running and detecting probes.")
216 | return
217 |
218 | # Print analysis results
219 | print(f"\nFound {len(results)} unique SSIDs in probe requests:")
220 | print("-" * 50)
221 |
222 | # Sort results by count (most frequent first)
223 | results.sort(key=lambda x: x['count'], reverse=True)
224 |
225 | for result in results:
226 | print(f"\nSSID: {result['ssid']}")
227 | print(f"Times seen: {result['count']}")
228 | print(f"First seen: {result['first_seen']}")
229 | print(f"Last seen: {result['last_seen']}")
230 |
231 | # Calculate time span
232 | first = datetime.strptime(result['first_seen'], '%m-%d-%y %H:%M:%S')
233 | last = datetime.strptime(result['last_seen'], '%m-%d-%y %H:%M:%S')
234 | duration = last - first
235 | if duration.total_seconds() > 0:
236 | print(f"Time span: {duration}")
237 | print(f"Average frequency: {result['count'] / duration.total_seconds():.2f} probes/second")
238 |
239 | if result.get('wigle_data'):
240 | if 'error' in result['wigle_data']:
241 | if result['wigle_data']['error'] != "WiGLE API key not configured":
242 | print(f"WiGLE Error: {result['wigle_data']['error']}")
243 | else:
244 | print("\nWiGLE Data:")
245 | locations = result['wigle_data'].get('results', [])
246 | print(f"Known locations: {len(locations)}")
247 | if locations:
248 | print("Recent sightings:")
249 | for loc in locations[:3]: # Show top 3 most recent
250 | print(f"- Lat: {loc.get('trilat')}, Lon: {loc.get('trilong')}")
251 | print(f" Last seen: {loc.get('lastupdt')}")
252 |
253 | if __name__ == "__main__":
254 | main()
--------------------------------------------------------------------------------
/CLAUDE.md:
--------------------------------------------------------------------------------
1 | # CLAUDE.md
2 |
3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4 |
5 | ## Project Overview
6 |
7 | Chasing Your Tail (CYT) is a Wi-Fi probe request analyzer that monitors and tracks wireless devices by analyzing their probe requests. The system integrates with Kismet for packet capture and WiGLE API for SSID geolocation analysis.
8 |
9 | ## Core Architecture
10 |
11 | ### Main Components
12 | - **chasing_your_tail.py**: Core monitoring engine that queries Kismet SQLite databases in real-time
13 | - **cyt_gui.py**: Enhanced Tkinter GUI interface for controlling the system with surveillance analysis
14 | - **surveillance_analyzer.py**: Main surveillance detection orchestrator with GPS correlation and advanced KML visualization
15 | - **surveillance_detector.py**: Core persistence detection engine for identifying suspicious device patterns
16 | - **gps_tracker.py**: GPS tracking integration with location clustering and spectacular Google Earth KML generation
17 | - **probe_analyzer.py**: Post-processing tool for analyzing collected probe data with WiGLE integration
18 | - **start_kismet_clean.sh**: ONLY working Kismet startup script (all others moved to old_scripts/)
19 | - **Security modules**: `secure_*.py` files providing SQL injection prevention and encrypted credentials
20 |
21 | ### Data Flow
22 | 1. Kismet captures wireless frames and stores in SQLite database
23 | 2. Main monitoring script queries database every 60 seconds for new devices/probes
24 | 3. System maintains sliding time windows (5, 10, 15, 20 minutes) to track device persistence
25 | 4. Probe requests are filtered against ignore lists and logged when devices reappear across time windows
26 | 5. Post-analysis tool can query WiGLE API for SSID geolocation data
27 |
28 | ### Configuration System
29 | All paths, API keys, and timing parameters are centralized in `config.json`:
30 | - Kismet database path pattern
31 | - Log and ignore list directories
32 | - WiGLE API credentials
33 | - Time window configurations
34 | - Geographic search boundaries
35 |
36 | ## Common Development Commands
37 |
38 | ### Security Setup (REQUIRED FIRST TIME)
39 | ```bash
40 | # Install secure dependencies
41 | pip3 install -r requirements.txt
42 |
43 | # Migrate credentials from insecure config.json (if needed)
44 | python3 migrate_credentials.py
45 |
46 | # Verify security hardening
47 | python3 chasing_your_tail.py
48 | # Should show: "🔒 SECURE MODE: All SQL injection vulnerabilities have been eliminated!"
49 | ```
50 |
51 | ### Running the System
52 | ```bash
53 | # Start enhanced GUI interface (includes surveillance analysis button)
54 | python3 cyt_gui.py
55 |
56 | # Run core monitoring (command line) - NOW SECURE!
57 | python3 chasing_your_tail.py
58 |
59 | # Analyze collected data (past 14 days, local only - default, API-safe)
60 | python3 probe_analyzer.py
61 |
62 | # Analyze past 7 days only
63 | python3 probe_analyzer.py --days 7
64 |
65 | # Analyze ALL logs (may be slow for large datasets)
66 | python3 probe_analyzer.py --all-logs
67 |
68 | # Analyze WITH WiGLE API calls (consumes API credits!)
69 | python3 probe_analyzer.py --wigle
70 |
71 | # Start Kismet (ONLY working script)
72 | ./start_kismet_clean.sh
73 |
74 | # Check if running
75 | ps aux | grep kismet
76 | ```
77 |
78 | ### Kismet Startup
79 | Kismet automatically starts on boot via crontab and can be started manually:
80 |
81 | ```bash
82 | # Manual startup (ONLY working script)
83 | ./start_kismet_clean.sh
84 |
85 | # Check if running
86 | ps aux | grep kismet
87 |
88 | # Kill if needed (use direct kill, not pkill)
89 | for pid in $(pgrep kismet); do sudo kill -9 $pid; done
90 | ```
91 |
92 | **Auto-start Setup (FIXED July 23, 2025):**
93 | - **Kismet**: Starts automatically 60 seconds after boot via root crontab using `start_kismet_clean.sh`
94 | - **GUI**: Starts automatically 120 seconds after boot via user crontab using `start_gui.sh`
95 | - Root crontab: `sudo crontab -l` - handles Kismet only
96 | - User crontab: `crontab -l` - handles GUI only
97 | - **CRITICAL FIX**: ALL broken startup scripts moved to `old_scripts/` (had hanging pkill commands)
98 | - **GUI FIX**: Restored missing `start_gui.sh` that was accidentally moved during cleanup
99 | - **Key Insight**: Post-reboot startup should NEVER attempt process cleanup
100 |
101 | ### Surveillance Detection & Advanced Visualization
102 | ```bash
103 | # NEW: Automatic GPS extraction from Kismet with spectacular KML visualization
104 | python3 surveillance_analyzer.py
105 |
106 | # Run analysis with demo GPS data (for testing - uses Phoenix coordinates)
107 | python3 surveillance_analyzer.py --demo
108 |
109 | # Analyze specific Kismet database for surveillance patterns
110 | python3 surveillance_analyzer.py --kismet-db /path/to/kismet.db
111 |
112 | # Focus on stalking detection only with persistence scoring
113 | python3 surveillance_analyzer.py --stalking-only --min-persistence 0.8
114 |
115 | # Export results to JSON for further analysis
116 | python3 surveillance_analyzer.py --output-json analysis_results.json
117 |
118 | # Analyze with external GPS data from JSON file
119 | python3 surveillance_analyzer.py --gps-file gps_coordinates.json
120 | ```
121 |
122 | ### GUI Features
123 | The enhanced GUI (`cyt_gui.py`) now includes:
124 | - **🗺️ Surveillance Analysis** button - Runs GPS-correlated persistence detection with advanced KML visualization
125 | - **📈 Analyze Logs** button - Analyzes historical probe request data
126 | - **Real-time GPS integration** - Automatically uses Bluetooth GPS data from Kismet
127 | - **Spectacular KML generation** - Creates professional Google Earth visualizations with threat-level styling
128 |
129 | ### GPS Integration & KML Visualization (ENHANCED!)
130 | The system now automatically extracts GPS coordinates from Kismet databases and creates spectacular visualizations:
131 |
132 | - **Automatic GPS Detection**: No manual GPS file needed - extracts coordinates from Kismet
133 | - **Real-time Correlation**: Links device appearances to GPS locations with precise timing
134 | - **Location Clustering**: Groups nearby GPS points (within 100m) for analysis
135 | - **Professional KML Generation**: Creates spectacular Google Earth visualizations with:
136 | - Color-coded persistence level markers (green/yellow/red)
137 | - Device tracking paths showing movement correlation
138 | - Rich balloon content with detailed device intelligence
139 | - Activity heatmaps and intensity zones
140 | - Temporal analysis with time-based pattern detection
141 | - **Multi-location Tracking**: Detects devices following across different locations with visual tracking paths
142 |
143 | ### Ignore List Management
144 | ```bash
145 | # Create new ignore lists from current Kismet data
146 | python3 legacy/create_ignore_list.py # Moved to legacy folder
147 | ```
148 | **Note**: Ignore lists are now stored as JSON files in `./ignore_lists/`
149 |
150 | ### Project Structure & Key File Locations
151 |
152 | #### Core Files (Main Directory) - CLEANED July 23, 2025
153 | - **Core Python Scripts**: `chasing_your_tail.py`, `surveillance_analyzer.py`, `cyt_gui.py`, `probe_analyzer.py`, `gps_tracker.py`, `surveillance_detector.py`
154 | - **Security Modules**: `secure_*.py` (4 files), `input_validation.py`, `migrate_credentials.py`
155 | - **Configuration**: `config.json`, `requirements.txt`
156 | - **Working Startup Scripts**: `start_kismet_clean.sh` (Kismet), `start_gui.sh` (GUI)
157 | - **Documentation**: `CLAUDE.md`, `README.md`
158 |
159 | #### Output Directories
160 | - **Surveillance Reports**: `./surveillance_reports/surveillance_report_YYYYMMDD_HHMMSS.md` (markdown)
161 | - **HTML Reports**: `./surveillance_reports/surveillance_report_YYYYMMDD_HHMMSS.html` (styled HTML with pandoc)
162 | - **KML Visualizations**: `./kml_files/surveillance_analysis_YYYYMMDD_HHMMSS.kml` (spectacular Google Earth files)
163 | - **CYT Logs**: `./logs/cyt_log_MMDDYY_HHMMSS`
164 | - **Analysis Logs**: `./analysis_logs/surveillance_analysis.log`
165 | - **Probe Reports**: `./reports/probe_analysis_report_YYYYMMDD_HHMMSS.txt`
166 |
167 | #### Configuration & Data
168 | - **Ignore Lists**: `./ignore_lists/mac_list.json` and `./ignore_lists/ssid_list.json`
169 | - **Kismet Database**: Path specified in config.json (typically `/home/matt/kismet_logs/*.kismet`)
170 |
171 | #### Archive Directories - CLEANED July 23, 2025
172 | - **old_scripts/**: All broken startup scripts with hanging pkill commands (temporarily held `start_gui.sh`)
173 | - **docs_archive/**: Session notes, old configs, backup files, duplicate logs
174 | - **legacy/**: Original legacy code archive (pre-security hardening)
175 |
176 | ## Technical Details
177 |
178 | ### Time Window System
179 | The core algorithm maintains four overlapping time windows to detect device persistence:
180 | - Recent: Past 5 minutes
181 | - Medium: 5-10 minutes ago
182 | - Old: 10-15 minutes ago
183 | - Oldest: 15-20 minutes ago
184 |
185 | Every 5 cycles (5 minutes), lists are rotated and updated from fresh database queries.
186 |
187 | ### Database Interaction
188 | System reads from live Kismet SQLite databases using direct SQL queries. Key tables:
189 | - `devices`: Contains MAC addresses, device types, and JSON device details
190 | - Probe request data is embedded in JSON `device` field under `dot11.device.last_probed_ssid_record`
191 |
192 | ### Ignore List Format
193 | - **MAC lists**: Python list variable `ignore_list = ['MAC1', 'MAC2', ...]`
194 | - **SSID lists**: Python list variable `non_alert_ssid_list = ['SSID1', 'SSID2', ...]`
195 | - Lists are loaded via `exec()` at runtime
196 |
197 | ### WiGLE Integration
198 | Probe analyzer can query WiGLE API for SSID location data using securely encrypted API credentials.
199 |
200 | ### Surveillance Detection System
201 | Advanced persistence detection algorithms analyze device behavior patterns:
202 | - **Temporal Persistence**: Detects devices appearing consistently over time
203 | - **Location Correlation**: Identifies devices following across multiple locations
204 | - **Probe Pattern Analysis**: Analyzes SSID probe requests for suspicious patterns
205 | - **Timing Analysis**: Detects unusual appearance timing (work hours, off-hours, regular intervals)
206 | - **Persistence Scoring**: Assigns weighted scores (0-1.0) based on combined indicators
207 | - **Multi-location Tracking**: Specialized algorithms for detecting following behavior across locations
208 |
209 | ### GPS Integration & Spectacular KML Export
210 | - **Location Clustering**: Groups nearby GPS coordinates (configurable threshold)
211 | - **Session Management**: Tracks location sessions with timeout handling
212 | - **Device Correlation**: Links device appearances to specific GPS locations
213 | - **Professional KML Generation**: Creates spectacular Google Earth files with:
214 | - Color-coded location markers with persistence-level styling
215 | - Device tracking paths with threat-level visualization
216 | - Rich interactive balloon content with device intelligence
217 | - Activity heatmaps showing surveillance intensity zones
218 | - Temporal analysis overlays for time-based pattern detection
219 | - Professional document metadata and feature descriptions
220 | - **Multi-location Analysis**: Identifies devices seen across multiple locations with visual tracking paths
221 |
222 | ## Security Hardening (NEW!)
223 |
224 | ### Critical Vulnerabilities FIXED
225 | - **SQL Injection**: All database queries now use parameterized statements
226 | - **Remote Code Execution**: Eliminated dangerous `exec()` calls in ignore list loading
227 | - **Credential Exposure**: API keys now encrypted with master password
228 | - **Input Validation**: Comprehensive sanitization of all inputs
229 | - **Error Handling**: Security-focused logging and error boundaries
230 |
231 | ### Security Files
232 | - `secure_ignore_loader.py`: Safe ignore list loading (replaces exec())
233 | - `secure_database.py`: SQL injection prevention
234 | - `secure_credentials.py`: Encrypted credential management
235 | - `secure_main_logic.py`: Secure monitoring logic
236 | - `input_validation.py`: Input sanitization and validation
237 | - `migrate_credentials.py`: Tool to migrate insecure credentials
238 |
239 | ### Security Logs
240 | - `cyt_security.log`: Security events and audit trail
241 | - All credential access is logged
242 | - Failed validation attempts are tracked
243 | - Database errors are monitored
244 |
245 | **⚠️ IMPORTANT: Run `python3 migrate_credentials.py` before first use to secure your API keys!**
--------------------------------------------------------------------------------
/input_validation.py:
--------------------------------------------------------------------------------
1 | """
2 | Input validation and sanitization for CYT
3 | Prevents injection attacks and ensures data integrity
4 | """
5 | import re
6 | import json
7 | import logging
8 | from typing import Any, Optional, Dict, List
9 | from pathlib import Path
10 |
11 | logger = logging.getLogger(__name__)
12 |
13 | class InputValidator:
14 | """Comprehensive input validation for CYT"""
15 |
16 | # Regex patterns for validation
17 | MAC_PATTERN = re.compile(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$')
18 | SSID_PATTERN = re.compile(r'^[\x20-\x7E]{1,32}$') # Printable ASCII, max 32 chars
19 | PATH_PATTERN = re.compile(r'^[a-zA-Z0-9._\-/\\:]+$')
20 | FILENAME_PATTERN = re.compile(r'^[a-zA-Z0-9._\-]+$')
21 |
22 | # Dangerous characters to filter
23 | DANGEROUS_CHARS = ['<', '>', '"', "'", '&', ';', '|', '`', '$', '(', ')', '{', '}', '[', ']']
24 | SQL_KEYWORDS = ['SELECT', 'INSERT', 'UPDATE', 'DELETE', 'DROP', 'UNION', 'EXEC', 'SCRIPT']
25 |
26 | @classmethod
27 | def validate_mac_address(cls, mac: str) -> bool:
28 | """Validate MAC address format"""
29 | if not isinstance(mac, str):
30 | return False
31 | if len(mac) > 17: # Max length for MAC address
32 | return False
33 | return bool(cls.MAC_PATTERN.match(mac))
34 |
35 | @classmethod
36 | def validate_ssid(cls, ssid: str) -> bool:
37 | """Validate SSID format and content"""
38 | if not isinstance(ssid, str):
39 | return False
40 | if len(ssid) == 0 or len(ssid) > 32:
41 | return False
42 | # Check for null bytes and control characters
43 | if '\x00' in ssid or any(ord(c) < 32 and c not in '\t\n\r' for c in ssid):
44 | return False
45 | # Check for dangerous characters
46 | if any(char in ssid for char in cls.DANGEROUS_CHARS):
47 | logger.warning(f"SSID contains dangerous characters: {ssid}")
48 | return False
49 | return True
50 |
51 | @classmethod
52 | def validate_file_path(cls, path: str) -> bool:
53 | """Validate file path is safe"""
54 | if not isinstance(path, str):
55 | return False
56 | if len(path) > 4096: # Max reasonable path length
57 | return False
58 |
59 | # Check for path traversal attempts
60 | if '..' in path or '~' in path:
61 | logger.warning(f"Path traversal attempt detected: {path}")
62 | return False
63 |
64 | # Check for dangerous characters
65 | if any(char in path for char in ['<', '>', '|', '&', ';', '`']):
66 | logger.warning(f"Dangerous characters in path: {path}")
67 | return False
68 |
69 | return True
70 |
71 | @classmethod
72 | def validate_filename(cls, filename: str) -> bool:
73 | """Validate filename is safe"""
74 | if not isinstance(filename, str):
75 | return False
76 | if len(filename) > 255: # Max filename length
77 | return False
78 |
79 | # Check for dangerous patterns
80 | if filename in ['.', '..', ''] or filename.startswith('.'):
81 | return False
82 |
83 | return bool(cls.FILENAME_PATTERN.match(filename))
84 |
85 | @classmethod
86 | def sanitize_string(cls, input_str: str, max_length: int = 1000) -> str:
87 | """Sanitize string input by removing dangerous content"""
88 | if not isinstance(input_str, str):
89 | return ""
90 |
91 | # Truncate if too long
92 | if len(input_str) > max_length:
93 | input_str = input_str[:max_length]
94 | logger.warning(f"Input truncated to {max_length} characters")
95 |
96 | # Remove null bytes and control characters (except whitespace)
97 | sanitized = ''.join(c for c in input_str if ord(c) >= 32 or c in '\t\n\r')
98 |
99 | # Remove dangerous characters
100 | for char in cls.DANGEROUS_CHARS:
101 | sanitized = sanitized.replace(char, '')
102 |
103 | # Check for SQL injection attempts
104 | upper_sanitized = sanitized.upper()
105 | for keyword in cls.SQL_KEYWORDS:
106 | if keyword in upper_sanitized:
107 | logger.warning(f"Potential SQL injection attempt: {sanitized}")
108 | sanitized = sanitized.replace(keyword, '')
109 | sanitized = sanitized.replace(keyword.lower(), '')
110 |
111 | return sanitized.strip()
112 |
113 | @classmethod
114 | def validate_config_structure(cls, config: Dict[str, Any]) -> bool:
115 | """Validate configuration file structure"""
116 | required_keys = ['paths', 'timing']
117 |
118 | if not isinstance(config, dict):
119 | logger.error("Config must be a dictionary")
120 | return False
121 |
122 | for key in required_keys:
123 | if key not in config:
124 | logger.error(f"Missing required config key: {key}")
125 | return False
126 |
127 | # Validate paths section
128 | paths = config['paths']
129 | if not isinstance(paths, dict):
130 | logger.error("Config 'paths' must be a dictionary")
131 | return False
132 |
133 | required_paths = ['log_dir', 'kismet_logs', 'ignore_lists']
134 | for path_key in required_paths:
135 | if path_key not in paths:
136 | logger.error(f"Missing required path: {path_key}")
137 | return False
138 |
139 | path_value = paths[path_key]
140 | if isinstance(path_value, str):
141 | if not cls.validate_file_path(path_value):
142 | logger.error(f"Invalid path format: {path_key}={path_value}")
143 | return False
144 |
145 | # Validate timing section
146 | timing = config['timing']
147 | if not isinstance(timing, dict):
148 | logger.error("Config 'timing' must be a dictionary")
149 | return False
150 |
151 | timing_keys = ['check_interval', 'list_update_interval']
152 | for timing_key in timing_keys:
153 | if timing_key in timing:
154 | value = timing[timing_key]
155 | if not isinstance(value, (int, float)) or value <= 0:
156 | logger.error(f"Invalid timing value: {timing_key}={value}")
157 | return False
158 |
159 | return True
160 |
161 | @classmethod
162 | def validate_ignore_list(cls, ignore_list: List[str], list_type: str) -> List[str]:
163 | """Validate and filter ignore list entries"""
164 | if not isinstance(ignore_list, list):
165 | logger.error(f"Ignore list must be a list, got {type(ignore_list)}")
166 | return []
167 |
168 | validated_list = []
169 | validator_func = cls.validate_mac_address if list_type == 'mac' else cls.validate_ssid
170 |
171 | for item in ignore_list:
172 | if validator_func(item):
173 | validated_list.append(item)
174 | else:
175 | logger.warning(f"Invalid {list_type} entry removed: {item}")
176 |
177 | return validated_list
178 |
179 | @classmethod
180 | def validate_json_input(cls, json_str: str, max_size: int = 1024*1024) -> Optional[Dict]:
181 | """Safely parse and validate JSON input"""
182 | if not isinstance(json_str, str):
183 | logger.error("JSON input must be string")
184 | return None
185 |
186 | if len(json_str) > max_size:
187 | logger.error(f"JSON input too large: {len(json_str)} > {max_size}")
188 | return None
189 |
190 | try:
191 | # Parse JSON
192 | data = json.loads(json_str)
193 |
194 | # Basic structure validation
195 | if isinstance(data, dict):
196 | # Validate keys and values
197 | for key, value in data.items():
198 | if not isinstance(key, str) or len(key) > 100:
199 | logger.warning(f"Invalid JSON key: {key}")
200 | return None
201 |
202 | # Recursively validate nested structures
203 | if isinstance(value, (dict, list)):
204 | continue # Could add deeper validation here
205 | elif isinstance(value, str):
206 | if len(value) > 10000: # Reasonable string limit
207 | logger.warning(f"JSON string value too long: {key}")
208 | return None
209 |
210 | return data
211 |
212 | except json.JSONDecodeError as e:
213 | logger.error(f"Invalid JSON: {e}")
214 | return None
215 |
216 | @classmethod
217 | def validate_database_path(cls, db_path: str) -> bool:
218 | """Validate database path is safe and accessible"""
219 | if not cls.validate_file_path(db_path):
220 | return False
221 |
222 | # Check if path exists (for globbed paths, check pattern)
223 | if '*' in db_path:
224 | # It's a glob pattern - validate the base directory
225 | base_dir = db_path.split('*')[0]
226 | if base_dir and not Path(base_dir).exists():
227 | logger.warning(f"Database base directory does not exist: {base_dir}")
228 | return False
229 | else:
230 | # It's a specific file
231 | if not Path(db_path).exists():
232 | logger.warning(f"Database file does not exist: {db_path}")
233 | return False
234 |
235 | return True
236 |
237 |
238 | class SecureInputHandler:
239 | """Wrapper for handling all input validation in CYT"""
240 |
241 | def __init__(self):
242 | self.validator = InputValidator()
243 |
244 | def safe_load_config(self, config_path: str) -> Optional[Dict[str, Any]]:
245 | """Safely load and validate configuration file"""
246 | try:
247 | if not self.validator.validate_file_path(config_path):
248 | logger.error(f"Invalid config path: {config_path}")
249 | return None
250 |
251 | if not Path(config_path).exists():
252 | logger.error(f"Config file not found: {config_path}")
253 | return None
254 |
255 | with open(config_path, 'r', encoding='utf-8') as f:
256 | content = f.read()
257 |
258 | # Validate JSON structure
259 | config = self.validator.validate_json_input(content)
260 | if not config:
261 | return None
262 |
263 | # Validate configuration structure
264 | if not self.validator.validate_config_structure(config):
265 | return None
266 |
267 | logger.info(f"Configuration loaded and validated: {config_path}")
268 | return config
269 |
270 | except Exception as e:
271 | logger.error(f"Error loading config: {e}")
272 | return None
273 |
274 | def safe_load_ignore_list(self, file_path: Path, list_type: str) -> List[str]:
275 | """Safely load ignore list with validation"""
276 | try:
277 | if not file_path.exists():
278 | logger.info(f"Ignore list not found: {file_path}")
279 | return []
280 |
281 | with open(file_path, 'r', encoding='utf-8') as f:
282 | content = f.read()
283 |
284 | # Try to parse as JSON first
285 | try:
286 | data = json.loads(content)
287 | if isinstance(data, list):
288 | return self.validator.validate_ignore_list(data, list_type)
289 | except json.JSONDecodeError:
290 | pass
291 |
292 | # Fall back to Python variable parsing (but safer)
293 | # This is for legacy compatibility
294 | logger.warning(f"Using legacy ignore list format: {file_path}")
295 | # We'll implement safer parsing if needed
296 |
297 | return []
298 |
299 | except Exception as e:
300 | logger.error(f"Error loading ignore list {file_path}: {e}")
301 | return []
--------------------------------------------------------------------------------
/secure_main_logic.py:
--------------------------------------------------------------------------------
1 | """
2 | Secure main logic for Chasing Your Tail - replaces vulnerable SQL operations
3 | """
4 | import logging
5 | from typing import List, Dict, Set
6 | from secure_database import SecureKismetDB, SecureTimeWindows
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 | class SecureCYTMonitor:
11 | """Secure monitoring logic for CYT"""
12 |
13 | def __init__(self, config: dict, ignore_list: List[str], ssid_ignore_list: List[str], log_file):
14 | self.config = config
15 | self.ignore_list = set(mac.upper() for mac in ignore_list) # Convert to set for O(1) lookup
16 | self.ssid_ignore_list = set(ssid_ignore_list)
17 | self.log_file = log_file
18 | self.time_manager = SecureTimeWindows(config)
19 |
20 | # Initialize tracking lists
21 | self.past_five_mins_macs: Set[str] = set()
22 | self.five_ten_min_ago_macs: Set[str] = set()
23 | self.ten_fifteen_min_ago_macs: Set[str] = set()
24 | self.fifteen_twenty_min_ago_macs: Set[str] = set()
25 |
26 | self.past_five_mins_ssids: Set[str] = set()
27 | self.five_ten_min_ago_ssids: Set[str] = set()
28 | self.ten_fifteen_min_ago_ssids: Set[str] = set()
29 | self.fifteen_twenty_min_ago_ssids: Set[str] = set()
30 |
31 | def initialize_tracking_lists(self, db: SecureKismetDB) -> None:
32 | """Initialize all tracking lists securely"""
33 | try:
34 | boundaries = self.time_manager.get_time_boundaries()
35 |
36 | # Initialize MAC tracking lists
37 | self._initialize_mac_lists(db, boundaries)
38 |
39 | # Initialize SSID tracking lists
40 | self._initialize_ssid_lists(db, boundaries)
41 |
42 | self._log_initialization_stats()
43 |
44 | except Exception as e:
45 | logger.error(f"Failed to initialize tracking lists: {e}")
46 | raise
47 |
48 | def _initialize_mac_lists(self, db: SecureKismetDB, boundaries: Dict[str, float]) -> None:
49 | """Initialize MAC address tracking lists"""
50 | # Past 5 minutes
51 | macs = db.get_mac_addresses_by_time_range(boundaries['recent_time'])
52 | self.past_five_mins_macs = self._filter_macs(macs)
53 |
54 | # 5-10 minutes ago
55 | macs = db.get_mac_addresses_by_time_range(boundaries['medium_time'], boundaries['recent_time'])
56 | self.five_ten_min_ago_macs = self._filter_macs(macs)
57 |
58 | # 10-15 minutes ago
59 | macs = db.get_mac_addresses_by_time_range(boundaries['old_time'], boundaries['medium_time'])
60 | self.ten_fifteen_min_ago_macs = self._filter_macs(macs)
61 |
62 | # 15-20 minutes ago
63 | macs = db.get_mac_addresses_by_time_range(boundaries['oldest_time'], boundaries['old_time'])
64 | self.fifteen_twenty_min_ago_macs = self._filter_macs(macs)
65 |
66 | def _initialize_ssid_lists(self, db: SecureKismetDB, boundaries: Dict[str, float]) -> None:
67 | """Initialize SSID tracking lists"""
68 | # Past 5 minutes
69 | probes = db.get_probe_requests_by_time_range(boundaries['recent_time'])
70 | self.past_five_mins_ssids = self._filter_ssids([p['ssid'] for p in probes])
71 |
72 | # 5-10 minutes ago
73 | probes = db.get_probe_requests_by_time_range(boundaries['medium_time'], boundaries['recent_time'])
74 | self.five_ten_min_ago_ssids = self._filter_ssids([p['ssid'] for p in probes])
75 |
76 | # 10-15 minutes ago
77 | probes = db.get_probe_requests_by_time_range(boundaries['old_time'], boundaries['medium_time'])
78 | self.ten_fifteen_min_ago_ssids = self._filter_ssids([p['ssid'] for p in probes])
79 |
80 | # 15-20 minutes ago
81 | probes = db.get_probe_requests_by_time_range(boundaries['oldest_time'], boundaries['old_time'])
82 | self.fifteen_twenty_min_ago_ssids = self._filter_ssids([p['ssid'] for p in probes])
83 |
84 | def _filter_macs(self, mac_list: List[str]) -> Set[str]:
85 | """Filter MAC addresses against ignore list"""
86 | return {mac.upper() for mac in mac_list if mac.upper() not in self.ignore_list}
87 |
88 | def _filter_ssids(self, ssid_list: List[str]) -> Set[str]:
89 | """Filter SSIDs against ignore list"""
90 | return {ssid for ssid in ssid_list if ssid and ssid not in self.ssid_ignore_list}
91 |
92 | def _log_initialization_stats(self) -> None:
93 | """Log initialization statistics"""
94 | mac_stats = [
95 | ("Past 5 minutes", len(self.past_five_mins_macs)),
96 | ("5-10 minutes ago", len(self.five_ten_min_ago_macs)),
97 | ("10-15 minutes ago", len(self.ten_fifteen_min_ago_macs)),
98 | ("15-20 minutes ago", len(self.fifteen_twenty_min_ago_macs))
99 | ]
100 |
101 | ssid_stats = [
102 | ("Past 5 minutes", len(self.past_five_mins_ssids)),
103 | ("5-10 minutes ago", len(self.five_ten_min_ago_ssids)),
104 | ("10-15 minutes ago", len(self.ten_fifteen_min_ago_ssids)),
105 | ("15-20 minutes ago", len(self.fifteen_twenty_min_ago_ssids))
106 | ]
107 |
108 | for period, count in mac_stats:
109 | message = f"{count} MACs added to the {period} list"
110 | print(message)
111 | self.log_file.write(f"{message}\n")
112 |
113 | for period, count in ssid_stats:
114 | message = f"{count} Probed SSIDs added to the {period} list"
115 | print(message)
116 | self.log_file.write(f"{message}\n")
117 |
118 | def process_current_activity(self, db: SecureKismetDB) -> None:
119 | """Process current activity and detect matches"""
120 | try:
121 | boundaries = self.time_manager.get_time_boundaries()
122 |
123 | # Get current devices and probes
124 | current_devices = db.get_devices_by_time_range(boundaries['current_time'])
125 |
126 | for device in current_devices:
127 | mac = device['mac']
128 | device_data = device.get('device_data', {})
129 |
130 | if not mac:
131 | continue
132 |
133 | # Check for probe requests
134 | self._process_probe_requests(device_data, mac)
135 |
136 | # Check MAC address tracking
137 | self._process_mac_tracking(mac)
138 |
139 | except Exception as e:
140 | logger.error(f"Error processing current activity: {e}")
141 |
142 | def _process_probe_requests(self, device_data: Dict, mac: str) -> None:
143 | """Process probe requests from device data"""
144 | if not device_data:
145 | return
146 |
147 | try:
148 | dot11_device = device_data.get('dot11.device', {})
149 | if not isinstance(dot11_device, dict):
150 | return
151 |
152 | probe_record = dot11_device.get('dot11.device.last_probed_ssid_record', {})
153 | if not isinstance(probe_record, dict):
154 | return
155 |
156 | ssid = probe_record.get('dot11.probedssid.ssid', '')
157 | if not ssid or ssid in self.ssid_ignore_list:
158 | return
159 |
160 | # Log the probe
161 | message = f'Found a probe!: {ssid}'
162 | self.log_file.write(f'{message}\n')
163 | logger.info(f"Probe detected from {mac}: {ssid}")
164 |
165 | # Check against historical lists
166 | self._check_ssid_history(ssid)
167 |
168 | except (KeyError, TypeError, AttributeError) as e:
169 | logger.debug(f"No probe data for device {mac}: {e}")
170 |
171 | def _check_ssid_history(self, ssid: str) -> None:
172 | """Check SSID against historical tracking lists"""
173 | if ssid in self.five_ten_min_ago_ssids:
174 | message = f"Probe for {ssid} in 5 to 10 mins list"
175 | print(message)
176 | self.log_file.write(f"{message}\n")
177 | logger.warning(f"Repeated probe detected: {ssid} (5-10 min window)")
178 |
179 | if ssid in self.ten_fifteen_min_ago_ssids:
180 | message = f"Probe for {ssid} in 10 to 15 mins list"
181 | print(message)
182 | self.log_file.write(f"{message}\n")
183 | logger.warning(f"Repeated probe detected: {ssid} (10-15 min window)")
184 |
185 | if ssid in self.fifteen_twenty_min_ago_ssids:
186 | message = f"Probe for {ssid} in 15 to 20 mins list"
187 | print(message)
188 | self.log_file.write(f"{message}\n")
189 | logger.warning(f"Repeated probe detected: {ssid} (15-20 min window)")
190 |
191 | def _process_mac_tracking(self, mac: str) -> None:
192 | """Process MAC address tracking"""
193 | if mac.upper() in self.ignore_list:
194 | return
195 |
196 | # Check against historical lists
197 | if mac in self.five_ten_min_ago_macs:
198 | message = f"{mac} in 5 to 10 mins list"
199 | print(message)
200 | self.log_file.write(f"{message}\n")
201 | logger.warning(f"Device reappeared: {mac} (5-10 min window)")
202 |
203 | if mac in self.ten_fifteen_min_ago_macs:
204 | message = f"{mac} in 10 to 15 mins list"
205 | print(message)
206 | self.log_file.write(f"{message}\n")
207 | logger.warning(f"Device reappeared: {mac} (10-15 min window)")
208 |
209 | if mac in self.fifteen_twenty_min_ago_macs:
210 | message = f"{mac} in 15 to 20 mins list"
211 | print(message)
212 | self.log_file.write(f"{message}\n")
213 | logger.warning(f"Device reappeared: {mac} (15-20 min window)")
214 |
215 | def rotate_tracking_lists(self, db: SecureKismetDB) -> None:
216 | """Rotate tracking lists and update with fresh data"""
217 | try:
218 | # Rotate MAC lists
219 | self.fifteen_twenty_min_ago_macs = self.ten_fifteen_min_ago_macs
220 | self.ten_fifteen_min_ago_macs = self.five_ten_min_ago_macs
221 | self.five_ten_min_ago_macs = self.past_five_mins_macs
222 |
223 | # Rotate SSID lists
224 | self.fifteen_twenty_min_ago_ssids = self.ten_fifteen_min_ago_ssids
225 | self.ten_fifteen_min_ago_ssids = self.five_ten_min_ago_ssids
226 | self.five_ten_min_ago_ssids = self.past_five_mins_ssids
227 |
228 | # Get fresh data for past 5 minutes
229 | boundaries = self.time_manager.get_time_boundaries()
230 |
231 | # Update past 5 minutes MAC list
232 | macs = db.get_mac_addresses_by_time_range(boundaries['recent_time'])
233 | self.past_five_mins_macs = self._filter_macs(macs)
234 |
235 | # Update past 5 minutes SSID list
236 | probes = db.get_probe_requests_by_time_range(boundaries['recent_time'])
237 | self.past_five_mins_ssids = self._filter_ssids([p['ssid'] for p in probes])
238 |
239 | self._log_rotation_stats()
240 |
241 | except Exception as e:
242 | logger.error(f"Error rotating tracking lists: {e}")
243 |
244 | def _log_rotation_stats(self) -> None:
245 | """Log rotation statistics"""
246 | print("Updated MAC tracking lists:")
247 | print(f"- 15-20 min ago: {len(self.fifteen_twenty_min_ago_macs)}")
248 | print(f"- 10-15 min ago: {len(self.ten_fifteen_min_ago_macs)}")
249 | print(f"- 5-10 min ago: {len(self.five_ten_min_ago_macs)}")
250 | print(f"- Current: {len(self.past_five_mins_macs)}")
251 |
252 | # Log to file
253 | self.log_file.write(f"{len(self.fifteen_twenty_min_ago_macs)} MACs moved to the 15-20 Min list\n")
254 | self.log_file.write(f"{len(self.ten_fifteen_min_ago_macs)} MACs moved to the 10-15 Min list\n")
255 | self.log_file.write(f"{len(self.five_ten_min_ago_macs)} MACs moved to the 5 to 10 mins ago list\n")
256 |
257 | print(f"{len(self.fifteen_twenty_min_ago_ssids)} Probed SSIDs moved to the 15 to 20 mins ago list")
258 | print(f"{len(self.ten_fifteen_min_ago_ssids)} Probed SSIDs moved to the 10 to 15 mins ago list")
259 | print(f"{len(self.five_ten_min_ago_ssids)} Probed SSIDs moved to the 5 to 10 mins ago list")
260 |
261 | self.log_file.write(f"{len(self.fifteen_twenty_min_ago_ssids)} Probed SSIDs moved to the 15 to 20 mins ago list\n")
262 | self.log_file.write(f"{len(self.ten_fifteen_min_ago_ssids)} Probed SSIDs moved to the 10 to 15 mins ago list\n")
263 | self.log_file.write(f"{len(self.five_ten_min_ago_ssids)} Probed SSIDs moved to the 5 to 10 mins ago list\n")
--------------------------------------------------------------------------------
/surveillance_analyzer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Integrated Surveillance Analysis Tool for CYT
4 | Combines GPS tracking, device detection, and KML export for stalking/surveillance detection
5 | """
6 | import argparse
7 | import glob
8 | import json
9 | import logging
10 | import os
11 | import time
12 | from datetime import datetime
13 | from pathlib import Path
14 |
15 | from surveillance_detector import SurveillanceDetector, load_appearances_from_kismet
16 | from gps_tracker import GPSTracker, KMLExporter, simulate_gps_data
17 | from secure_credentials import secure_config_loader
18 |
19 | # Configure logging
20 | logging.basicConfig(
21 | level=logging.INFO,
22 | format='%(asctime)s - %(levelname)s - %(message)s',
23 | handlers=[
24 | logging.FileHandler('surveillance_analysis.log'),
25 | logging.StreamHandler()
26 | ]
27 | )
28 |
29 | logger = logging.getLogger(__name__)
30 |
31 | class SurveillanceAnalyzer:
32 | """Main surveillance analysis orchestrator"""
33 |
34 | def __init__(self, config_path: str = 'config.json'):
35 | # Load secure configuration
36 | os.environ['CYT_TEST_MODE'] = 'true' # For non-interactive mode
37 | self.config, self.credential_manager = secure_config_loader(config_path)
38 |
39 | # Initialize components
40 | self.detector = SurveillanceDetector(self.config)
41 | self.gps_tracker = GPSTracker(self.config)
42 | self.kml_exporter = KMLExporter()
43 |
44 | # Analysis settings
45 | self.analysis_window_hours = 24 # Analyze last 24 hours by default
46 |
47 | def analyze_kismet_data(self, kismet_db_path: str = None,
48 | gps_data: list = None) -> dict:
49 | """Perform complete surveillance analysis on Kismet data"""
50 |
51 | print("🔍 Starting Surveillance Analysis...")
52 | print("=" * 50)
53 |
54 | # Find all Kismet databases from past 24 hours
55 | if not kismet_db_path:
56 | db_pattern = self.config['paths']['kismet_logs']
57 | all_db_files = glob.glob(db_pattern)
58 | if not all_db_files:
59 | raise FileNotFoundError(f"No Kismet database found at: {db_pattern}")
60 |
61 | # Filter to databases modified in the past 24 hours
62 | import sqlite3
63 | current_time = time.time()
64 | hours_24_ago = current_time - (self.analysis_window_hours * 3600)
65 |
66 | recent_db_files = [db for db in all_db_files if os.path.getmtime(db) >= hours_24_ago]
67 | recent_db_files = sorted(recent_db_files, key=os.path.getmtime, reverse=True)
68 |
69 | if not recent_db_files:
70 | print(f"⚠️ No databases found from past {self.analysis_window_hours} hours, using most recent")
71 | kismet_db_path = max(all_db_files, key=os.path.getmtime)
72 | else:
73 | print(f"📊 Found {len(recent_db_files)} databases from past {self.analysis_window_hours} hours:")
74 | total_gps_coords = 0
75 | for db_file in recent_db_files:
76 | try:
77 | conn = sqlite3.connect(db_file)
78 | cursor = conn.cursor()
79 | cursor.execute("SELECT COUNT(*) FROM devices WHERE avg_lat != 0 AND avg_lon != 0")
80 | gps_count = cursor.fetchone()[0]
81 | conn.close()
82 | print(f" 📁 {os.path.basename(db_file)}: {gps_count} GPS locations")
83 | total_gps_coords += gps_count
84 | except:
85 | print(f" ❌ {os.path.basename(db_file)}: Error reading")
86 |
87 | print(f"🛰️ Total GPS coordinates across all databases: {total_gps_coords}")
88 | # We'll process all recent databases, not just one
89 | kismet_db_path = recent_db_files # Pass list instead of single file
90 |
91 | # Handle multiple database files
92 | db_files_to_process = kismet_db_path if isinstance(kismet_db_path, list) else [kismet_db_path]
93 | print(f"📊 Processing {len(db_files_to_process)} Kismet database(s)")
94 |
95 | # Load GPS data (real, simulated, or extract from Kismet)
96 | if gps_data:
97 | print(f"🛰️ Loading {len(gps_data)} GPS coordinates...")
98 | for lat, lon, name in gps_data:
99 | location_id = self.gps_tracker.add_gps_reading(lat, lon, location_name=name)
100 | print(f" 📍 {name}: {lat:.4f}, {lon:.4f} -> {location_id}")
101 | else:
102 | # Extract GPS coordinates from all Kismet databases
103 | print("🛰️ Extracting GPS coordinates from Kismet databases...")
104 | try:
105 | import sqlite3
106 | all_gps_coords = []
107 |
108 | for db_file in db_files_to_process:
109 | try:
110 | conn = sqlite3.connect(db_file)
111 | cursor = conn.cursor()
112 |
113 | # Get GPS locations with timestamps from this database
114 | cursor.execute("""
115 | SELECT DISTINCT avg_lat, avg_lon, first_time
116 | FROM devices
117 | WHERE avg_lat != 0 AND avg_lon != 0
118 | ORDER BY first_time
119 | """)
120 |
121 | db_coords = cursor.fetchall()
122 | conn.close()
123 |
124 | if db_coords:
125 | print(f" 📁 {os.path.basename(db_file)}: {len(db_coords)} GPS locations")
126 | all_gps_coords.extend(db_coords)
127 |
128 | except Exception as e:
129 | print(f" ❌ Error reading {os.path.basename(db_file)}: {e}")
130 | continue
131 |
132 | if all_gps_coords:
133 | # Sort all coordinates by timestamp and deduplicate nearby points
134 | all_gps_coords.sort(key=lambda x: x[2]) # Sort by timestamp
135 |
136 | gps_data = []
137 | prev_lat, prev_lon = None, None
138 | location_counter = 1
139 |
140 | for lat, lon, timestamp in all_gps_coords:
141 | # Skip if too close to previous point (within ~50m)
142 | if prev_lat and prev_lon:
143 | import math
144 | distance = math.sqrt((lat - prev_lat)**2 + (lon - prev_lon)**2) * 111000 # rough meters
145 | if distance < 50:
146 | continue
147 |
148 | location_name = f"Location_{location_counter}"
149 | location_id = self.gps_tracker.add_gps_reading(lat, lon, location_name=location_name)
150 | print(f" 📍 {location_name}: {lat:.6f}, {lon:.6f}")
151 | gps_data.append((lat, lon, location_name))
152 |
153 | prev_lat, prev_lon = lat, lon
154 | location_counter += 1
155 |
156 | print(f"🛰️ Total unique GPS locations: {len(gps_data)}")
157 | else:
158 | print("⚠️ No GPS coordinates found in any Kismet database - using single location mode")
159 | location_id = "unknown_location"
160 |
161 | except Exception as e:
162 | print(f"❌ Error extracting GPS from Kismet: {e}")
163 | print("⚠️ Using single location mode")
164 | location_id = "unknown_location"
165 |
166 | # Load device appearances from Kismet databases
167 | print("📡 Loading device appearances from Kismet databases...")
168 | total_count = 0
169 |
170 | if gps_data:
171 | # Load devices from all databases, associating them with GPS locations
172 | primary_location = "Location_1" # Use the first/primary location
173 | for db_file in db_files_to_process:
174 | db_count = self._load_appearances_with_gps(db_file, primary_location)
175 | print(f" 📁 {os.path.basename(db_file)}: {db_count} device appearances")
176 | total_count += db_count
177 | else:
178 | # Load from all databases without GPS correlation
179 | for db_file in db_files_to_process:
180 | db_count = load_appearances_from_kismet(db_file, self.detector, "unknown_location")
181 | print(f" 📁 {os.path.basename(db_file)}: {db_count} device appearances")
182 | total_count += db_count
183 |
184 | print(f"✅ Total device appearances loaded: {total_count:,}")
185 |
186 | # Perform surveillance detection
187 | print("\\n🚨 Analyzing for surveillance patterns...")
188 | suspicious_devices = self.detector.analyze_surveillance_patterns()
189 |
190 | if suspicious_devices:
191 | print(f"⚠️ Found {len(suspicious_devices)} potentially suspicious devices!")
192 | print("\\nTop suspicious devices:")
193 | for i, device in enumerate(suspicious_devices[:5], 1):
194 | print(f" {i}. {device.mac} (Score: {device.persistence_score:.2f})")
195 | print(f" Appearances: {device.total_appearances}, Locations: {len(device.locations_seen)}")
196 | for reason in device.reasons[:2]: # Show top 2 reasons
197 | print(f" • {reason}")
198 | print()
199 | else:
200 | print("✅ No suspicious surveillance patterns detected")
201 |
202 | # Generate reports
203 | timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
204 |
205 | # Generate surveillance report
206 | report_file = f"surveillance_reports/surveillance_report_{timestamp}.md"
207 | html_file = f"surveillance_reports/surveillance_report_{timestamp}.html"
208 | print(f"\\n📝 Generating surveillance reports:")
209 | print(f" 📄 Markdown: {report_file}")
210 | print(f" 🌐 HTML: {html_file}")
211 | surveillance_report = self.detector.generate_surveillance_report(report_file)
212 |
213 | # Generate KML file if GPS data available
214 | kml_file = None
215 | if gps_data:
216 | kml_file = f"kml_files/surveillance_analysis_{timestamp}.kml"
217 | print(f"🗺️ Generating KML visualization: {kml_file}")
218 | self.kml_exporter.generate_kml(self.gps_tracker, suspicious_devices, kml_file)
219 | print(f" Open in Google Earth to visualize device tracking patterns")
220 |
221 | # Analysis summary
222 | multi_location_devices = self.gps_tracker.get_devices_across_locations()
223 | location_sessions = self.gps_tracker.get_location_history()
224 |
225 | results = {
226 | 'total_devices': total_count,
227 | 'suspicious_devices': len(suspicious_devices),
228 | 'high_persistence_devices': len([d for d in suspicious_devices if d.persistence_score > 0.8]),
229 | 'multi_location_devices': len(multi_location_devices),
230 | 'location_sessions': len(location_sessions),
231 | 'report_file': report_file,
232 | 'kml_file': kml_file,
233 | 'suspicious_device_list': suspicious_devices
234 | }
235 |
236 | return results
237 |
238 | def generate_demo_analysis(self) -> dict:
239 | """Generate analysis using simulated GPS data for demo purposes"""
240 | print("🎯 Generating BlackHat Arsenal Demo Analysis...")
241 | print("Using simulated GPS route with real Kismet data")
242 |
243 | # Use simulated GPS route
244 | gps_route = simulate_gps_data()
245 |
246 | # Perform analysis
247 | results = self.analyze_kismet_data(gps_data=gps_route)
248 |
249 | print("\\n🎪 Demo Analysis Complete!")
250 | print("=" * 50)
251 | print(f"📊 Analysis Results:")
252 | print(f" Total Devices: {results['total_devices']:,}")
253 | print(f" Suspicious Devices: {results['suspicious_devices']}")
254 | print(f" High Threat: {results['high_threat_devices']}")
255 | print(f" Multi-Location Devices: {results['multi_location_devices']}")
256 | print(f" Location Sessions: {results['location_sessions']}")
257 | print(f"\\n📁 Generated Files:")
258 | print(f" 📝 Report: {results['report_file']}")
259 | if results['kml_file']:
260 | print(f" 🗺️ KML: {results['kml_file']}")
261 |
262 | return results
263 |
264 | def analyze_for_stalking(self, min_persistence_score: float = 0.7) -> list:
265 | """Specifically analyze for stalking patterns"""
266 | suspicious_devices = self.detector.analyze_surveillance_patterns()
267 |
268 | # Filter for high-threat stalking indicators
269 | stalking_candidates = []
270 | for device in suspicious_devices:
271 | if device.persistence_score >= min_persistence_score:
272 | # Additional stalking-specific checks
273 | locations = len(device.locations_seen)
274 | appearances = device.total_appearances
275 |
276 | # Stalking indicators:
277 | # - Appears at 3+ different locations
278 | # - High frequency of appearances
279 | # - Spans multiple days
280 | time_span = device.last_seen - device.first_seen
281 | time_span_hours = time_span.total_seconds() / 3600
282 |
283 | stalking_score = 0
284 | stalking_reasons = []
285 |
286 | if locations >= 3:
287 | stalking_score += 0.4
288 | stalking_reasons.append(f"Follows across {locations} locations")
289 |
290 | if appearances >= 10:
291 | stalking_score += 0.3
292 | stalking_reasons.append(f"High frequency ({appearances} appearances)")
293 |
294 | if time_span_hours >= 24:
295 | stalking_score += 0.3
296 | stalking_reasons.append(f"Persistent over {time_span_hours/24:.1f} days")
297 |
298 | if stalking_score >= 0.6:
299 | device.stalking_score = stalking_score
300 | device.stalking_reasons = stalking_reasons
301 | stalking_candidates.append(device)
302 |
303 | return stalking_candidates
304 |
305 | def export_results_json(self, results: dict, output_file: str) -> None:
306 | """Export analysis results to JSON for further processing"""
307 |
308 | # Convert device objects to serializable format
309 | serializable_results = results.copy()
310 | if 'suspicious_device_list' in results:
311 | device_list = []
312 | for device in results['suspicious_device_list']:
313 | device_dict = {
314 | 'mac': device.mac,
315 | 'persistence_score': device.persistence_score,
316 | 'total_appearances': device.total_appearances,
317 | 'locations_seen': device.locations_seen,
318 | 'reasons': device.reasons,
319 | 'first_seen': device.first_seen.isoformat(),
320 | 'last_seen': device.last_seen.isoformat()
321 | }
322 | device_list.append(device_dict)
323 | serializable_results['suspicious_device_list'] = device_list
324 |
325 | with open(output_file, 'w') as f:
326 | json.dump(serializable_results, f, indent=2)
327 |
328 | print(f"📊 Results exported to JSON: {output_file}")
329 |
330 | def _load_appearances_with_gps(self, db_path: str, location_id: str) -> int:
331 | """Load device appearances and register them with GPS tracker"""
332 | import sqlite3
333 | import json
334 |
335 | try:
336 | with sqlite3.connect(db_path) as conn:
337 | cursor = conn.cursor()
338 |
339 | # Get all devices with timestamps
340 | cursor.execute("""
341 | SELECT devmac, last_time, type, device
342 | FROM devices
343 | WHERE last_time > 0
344 | ORDER BY last_time DESC
345 | """)
346 |
347 | rows = cursor.fetchall()
348 | count = 0
349 |
350 | # Set current location in GPS tracker for device correlation
351 | if hasattr(self.gps_tracker, 'location_sessions') and self.gps_tracker.location_sessions:
352 | # Find the location session that matches our location_id
353 | for session in self.gps_tracker.location_sessions:
354 | if session.session_id == location_id:
355 | self.gps_tracker.current_location = session
356 | break
357 |
358 | for row in rows:
359 | mac, timestamp, device_type, device_json = row
360 |
361 | # Extract SSIDs from device JSON
362 | ssids_probed = []
363 | try:
364 | device_data = json.loads(device_json)
365 | dot11_device = device_data.get('dot11.device', {})
366 | if dot11_device:
367 | probe_record = dot11_device.get('dot11.device.last_probed_ssid_record', {})
368 | ssid = probe_record.get('dot11.probedssid.ssid')
369 | if ssid:
370 | ssids_probed = [ssid]
371 | except (json.JSONDecodeError, KeyError):
372 | pass
373 |
374 | # Add to surveillance detector
375 | self.detector.add_device_appearance(
376 | mac=mac,
377 | timestamp=timestamp,
378 | location_id=location_id,
379 | ssids_probed=ssids_probed,
380 | device_type=device_type
381 | )
382 |
383 | # Also add to GPS tracker if current location is set
384 | if self.gps_tracker.current_location:
385 | self.gps_tracker.add_device_at_current_location(mac)
386 |
387 | count += 1
388 |
389 | logger.info(f"Loaded {count} device appearances from {db_path}")
390 | return count
391 |
392 | except Exception as e:
393 | logger.error(f"Error loading from Kismet database: {e}")
394 | return 0
395 |
396 | def main():
397 | """Main CLI interface"""
398 | parser = argparse.ArgumentParser(description='CYT Surveillance Analysis Tool')
399 | parser.add_argument('--demo', action='store_true',
400 | help='Run demo analysis with simulated GPS data')
401 | parser.add_argument('--kismet-db', type=str,
402 | help='Path to specific Kismet database file')
403 | parser.add_argument('--gps-file', type=str,
404 | help='JSON file with GPS coordinates')
405 | parser.add_argument('--stalking-only', action='store_true',
406 | help='Focus analysis on stalking detection')
407 | parser.add_argument('--output-json', type=str,
408 | help='Export results to JSON file')
409 | parser.add_argument('--min-threat', type=float, default=0.5,
410 | help='Minimum threat score for reporting (default: 0.5)')
411 |
412 | args = parser.parse_args()
413 |
414 | try:
415 | analyzer = SurveillanceAnalyzer()
416 |
417 | if args.demo:
418 | results = analyzer.generate_demo_analysis()
419 | else:
420 | # Load GPS data if provided
421 | gps_data = None
422 | if args.gps_file:
423 | with open(args.gps_file, 'r') as f:
424 | gps_data = json.load(f)
425 |
426 | results = analyzer.analyze_kismet_data(
427 | kismet_db_path=args.kismet_db,
428 | gps_data=gps_data
429 | )
430 |
431 | # Stalking-specific analysis
432 | if args.stalking_only:
433 | stalking_devices = analyzer.analyze_for_stalking(args.min_threat)
434 | if stalking_devices:
435 | print(f"\\n🚨 STALKING ALERT: {len(stalking_devices)} devices with stalking patterns!")
436 | for device in stalking_devices:
437 | print(f" ⚠️ {device.mac} (Stalking Score: {device.stalking_score:.2f})")
438 | for reason in device.stalking_reasons:
439 | print(f" • {reason}")
440 | else:
441 | print("\\n✅ No stalking patterns detected")
442 |
443 | # Export JSON if requested
444 | if args.output_json:
445 | analyzer.export_results_json(results, args.output_json)
446 |
447 | print("\\n🔒 Analysis complete! Stay safe out there.")
448 |
449 | except Exception as e:
450 | logger.error(f"Analysis failed: {e}")
451 | import traceback
452 | traceback.print_exc()
453 | return 1
454 |
455 | return 0
456 |
457 | if __name__ == '__main__':
458 | exit(main())
--------------------------------------------------------------------------------
/cyt_gui.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Enhanced CYT GUI - BlackHat Arsenal Ready
4 | Maintains Fisher Price usability for small screens while looking professional
5 | """
6 | import tkinter as tk
7 | from tkinter import ttk, messagebox, scrolledtext
8 | import subprocess
9 | import os
10 | import pathlib
11 | import sqlite3
12 | import glob
13 | import json
14 | import time
15 | import threading
16 | from datetime import datetime
17 |
18 | # Set test mode for GUI before any imports
19 | import os
20 | os.environ['CYT_TEST_MODE'] = 'true' # Enable test mode for GUI
21 |
22 | class CYTGui:
23 | def __init__(self):
24 | self.root = tk.Tk()
25 |
26 | # Load config later when needed
27 | self.config = None
28 | self.credential_manager = None
29 |
30 | self.setup_ui()
31 | self.running_processes = {}
32 | self.update_status()
33 |
34 | def setup_ui(self):
35 | """Setup the enhanced UI"""
36 | self.root.title('🔒 Chasing Your Tail - BlackHat Arsenal Edition')
37 | self.root.configure(bg='#1a1a1a') # Dark theme
38 | self.root.geometry('800x480') # Optimized for 7-inch screens
39 |
40 | # Create main container
41 | main_frame = tk.Frame(self.root, bg='#1a1a1a', padx=20, pady=20)
42 | main_frame.pack(fill=tk.BOTH, expand=True)
43 |
44 | # Title and status section
45 | self.create_header(main_frame)
46 |
47 | # Status indicators
48 | self.create_status_section(main_frame)
49 |
50 | # Main control buttons (keeping Fisher Price chunky style)
51 | self.create_control_buttons(main_frame)
52 |
53 | # Log output area
54 | self.create_log_section(main_frame)
55 |
56 | def create_header(self, parent):
57 | """Create header with title and security badge"""
58 | header_frame = tk.Frame(parent, bg='#1a1a1a')
59 | header_frame.pack(fill=tk.X, pady=(0, 20))
60 |
61 | # Main title
62 | title_label = tk.Label(
63 | header_frame,
64 | text="🔒 Chasing Your Tail",
65 | font=('Arial', 18, 'bold'),
66 | fg='#00ff41', # Matrix green
67 | bg='#1a1a1a'
68 | )
69 | title_label.pack(side=tk.LEFT)
70 |
71 | # Security badge
72 | security_badge = tk.Label(
73 | header_frame,
74 | text="🛡️ SECURED",
75 | font=('Arial', 10, 'bold'),
76 | fg='#ffffff',
77 | bg='#ff6b35', # Orange badge
78 | padx=10,
79 | pady=5
80 | )
81 | security_badge.pack(side=tk.RIGHT)
82 |
83 | # Subtitle
84 | subtitle_label = tk.Label(
85 | parent,
86 | text="Wi-Fi Probe Request Analyzer - BlackHat Arsenal Ready",
87 | font=('Arial', 10),
88 | fg='#cccccc',
89 | bg='#1a1a1a'
90 | )
91 | subtitle_label.pack(pady=(0, 10))
92 |
93 | def create_status_section(self, parent):
94 | """Create status indicators section"""
95 | status_frame = tk.LabelFrame(
96 | parent,
97 | text="System Status",
98 | font=('Arial', 10, 'bold'),
99 | fg='#ffffff',
100 | bg='#2a2a2a',
101 | padx=10,
102 | pady=10
103 | )
104 | status_frame.pack(fill=tk.X, pady=(0, 20))
105 |
106 | # Status indicators row
107 | indicators_frame = tk.Frame(status_frame, bg='#2a2a2a')
108 | indicators_frame.pack(fill=tk.X)
109 |
110 | # Kismet status
111 | self.kismet_status = tk.Label(
112 | indicators_frame,
113 | text="⏳ Kismet: Checking...",
114 | font=('Arial', 10),
115 | fg='#ffaa00',
116 | bg='#2a2a2a'
117 | )
118 | self.kismet_status.pack(side=tk.LEFT, padx=(0, 20))
119 |
120 | # Database status
121 | self.db_status = tk.Label(
122 | indicators_frame,
123 | text="⏳ Database: Checking...",
124 | font=('Arial', 10),
125 | fg='#ffaa00',
126 | bg='#2a2a2a'
127 | )
128 | self.db_status.pack(side=tk.LEFT, padx=(0, 20))
129 |
130 | # Credentials status
131 | self.creds_status = tk.Label(
132 | indicators_frame,
133 | text="⏳ Credentials: Checking...",
134 | font=('Arial', 10),
135 | fg='#ffaa00',
136 | bg='#2a2a2a'
137 | )
138 | self.creds_status.pack(side=tk.LEFT)
139 |
140 | def create_control_buttons(self, parent):
141 | """Create the main control buttons (Fisher Price style but professional)"""
142 | controls_frame = tk.LabelFrame(
143 | parent,
144 | text="Controls",
145 | font=('Arial', 12, 'bold'),
146 | fg='#ffffff',
147 | bg='#2a2a2a',
148 | padx=10,
149 | pady=10
150 | )
151 | controls_frame.pack(fill=tk.X, pady=(0, 20))
152 |
153 | # Top row buttons
154 | top_row = tk.Frame(controls_frame, bg='#2a2a2a')
155 | top_row.pack(fill=tk.X, pady=(0, 10))
156 |
157 | # System status button
158 | self.status_btn = tk.Button(
159 | top_row,
160 | text="📊 Check\nSystem Status",
161 | font=('Arial', 9, 'bold'),
162 | width=12,
163 | height=2,
164 | fg='#ffffff',
165 | bg='#007acc',
166 | activebackground='#005999',
167 | relief='raised',
168 | bd=3,
169 | command=self.check_status_threaded
170 | )
171 | self.status_btn.pack(side=tk.LEFT, padx=(0, 10))
172 |
173 | # Create ignore lists button
174 | self.create_ignore_btn = tk.Button(
175 | top_row,
176 | text="📝 Create\nIgnore Lists",
177 | font=('Arial', 9, 'bold'),
178 | width=12,
179 | height=2,
180 | fg='#ffffff',
181 | bg='#28a745',
182 | activebackground='#1e7e34',
183 | relief='raised',
184 | bd=3,
185 | command=self.create_ignore_lists_threaded
186 | )
187 | self.create_ignore_btn.pack(side=tk.LEFT, padx=(0, 10))
188 |
189 | # Delete ignore lists button
190 | self.delete_ignore_btn = tk.Button(
191 | top_row,
192 | text="🗑️ Delete\nIgnore Lists",
193 | font=('Arial', 9, 'bold'),
194 | width=12,
195 | height=2,
196 | fg='#ffffff',
197 | bg='#dc3545',
198 | activebackground='#c82333',
199 | relief='raised',
200 | bd=3,
201 | command=self.delete_ignore_lists
202 | )
203 | self.delete_ignore_btn.pack(side=tk.LEFT)
204 |
205 | # Bottom row buttons
206 | bottom_row = tk.Frame(controls_frame, bg='#2a2a2a')
207 | bottom_row.pack(fill=tk.X)
208 |
209 | # Run CYT button (main action)
210 | self.run_cyt_btn = tk.Button(
211 | bottom_row,
212 | text="🚀 START\nCHASING YOUR TAIL",
213 | font=('Arial', 11, 'bold'),
214 | width=18,
215 | height=2,
216 | fg='#ffffff',
217 | bg='#ff6b35', # Distinctive orange
218 | activebackground='#e55a2b',
219 | relief='raised',
220 | bd=4,
221 | command=self.run_cyt_threaded
222 | )
223 | self.run_cyt_btn.pack(side=tk.LEFT, padx=(0, 10))
224 |
225 | # Analyze logs button
226 | self.analyze_btn = tk.Button(
227 | bottom_row,
228 | text="📈 Analyze\nLogs",
229 | font=('Arial', 9, 'bold'),
230 | width=12,
231 | height=2,
232 | fg='#ffffff',
233 | bg='#6f42c1',
234 | activebackground='#5a359c',
235 | relief='raised',
236 | bd=3,
237 | command=self.analyze_logs_threaded
238 | )
239 | self.analyze_btn.pack(side=tk.LEFT, padx=(0, 10))
240 |
241 | # Surveillance analysis button
242 | self.surveillance_btn = tk.Button(
243 | bottom_row,
244 | text="🗺️ Surveillance\nAnalysis",
245 | font=('Arial', 9, 'bold'),
246 | width=12,
247 | height=2,
248 | fg='#ffffff',
249 | bg='#28a745',
250 | activebackground='#218838',
251 | relief='raised',
252 | bd=3,
253 | command=self.surveillance_analysis_threaded
254 | )
255 | self.surveillance_btn.pack(side=tk.LEFT, padx=(0, 10))
256 |
257 | # Quit button
258 | self.quit_btn = tk.Button(
259 | bottom_row,
260 | text="❌ QUIT",
261 | font=('Arial', 9, 'bold'),
262 | width=12,
263 | height=2,
264 | fg='#ffffff',
265 | bg='#6c757d',
266 | activebackground='#545b62',
267 | relief='raised',
268 | bd=3,
269 | command=self.quit_application
270 | )
271 | self.quit_btn.pack(side=tk.RIGHT)
272 |
273 | def create_log_section(self, parent):
274 | """Create log output section"""
275 | log_frame = tk.LabelFrame(
276 | parent,
277 | text="Output Log",
278 | font=('Arial', 10, 'bold'),
279 | fg='#ffffff',
280 | bg='#2a2a2a',
281 | padx=10,
282 | pady=10
283 | )
284 | log_frame.pack(fill=tk.BOTH, expand=True)
285 |
286 | # Log text area with dark theme
287 | self.log_text = scrolledtext.ScrolledText(
288 | log_frame,
289 | height=10,
290 | font=('Courier', 14),
291 | bg='#000000',
292 | fg='#00ff41', # Matrix green text
293 | insertbackground='#00ff41',
294 | selectbackground='#333333'
295 | )
296 | self.log_text.pack(fill=tk.BOTH, expand=True)
297 |
298 | # Initial welcome message
299 | self.log_message("🔒 CYT Enhanced GUI - Security Hardened Edition")
300 | self.log_message("=" * 50)
301 | self.log_message("All SQL injection vulnerabilities eliminated ✅")
302 | self.log_message("Credential encryption active ✅")
303 | self.log_message("Input validation enabled ✅")
304 | self.log_message("Ready for BlackHat Arsenal demo! 🎯")
305 | self.log_message("")
306 |
307 | def log_message(self, message):
308 | """Add message to log with timestamp"""
309 | timestamp = datetime.now().strftime("[%H:%M:%S]")
310 | full_message = f"{timestamp} {message}\n"
311 | self.log_text.insert(tk.END, full_message)
312 | self.log_text.see(tk.END)
313 | self.root.update_idletasks()
314 |
315 | def update_status(self):
316 | """Update status indicators"""
317 | threading.Thread(target=self._update_status_background, daemon=True).start()
318 |
319 | def _update_status_background(self):
320 | """Background status update"""
321 | # Check Kismet
322 | kismet_running = self.check_kismet_running()
323 | if kismet_running:
324 | self.kismet_status.config(text="✅ Kismet: Running", fg='#28a745')
325 | else:
326 | self.kismet_status.config(text="❌ Kismet: Not Running", fg='#dc3545')
327 |
328 | # Check database
329 | db_file, db_error = self.check_kismet_db()
330 | if db_error:
331 | self.db_status.config(text="❌ Database: Error", fg='#dc3545')
332 | else:
333 | # Get device count
334 | try:
335 | with sqlite3.connect(db_file) as con:
336 | cursor = con.cursor()
337 | cursor.execute("SELECT COUNT(*) FROM devices")
338 | count = cursor.fetchone()[0]
339 | self.db_status.config(text=f"✅ Database: {count:,} devices", fg='#28a745')
340 | except:
341 | self.db_status.config(text="⚠️ Database: Connected", fg='#ffaa00')
342 |
343 | # Check credentials
344 | if self.credential_manager:
345 | try:
346 | token = self.credential_manager.get_wigle_token()
347 | if token:
348 | self.creds_status.config(text="✅ Credentials: Encrypted", fg='#28a745')
349 | else:
350 | self.creds_status.config(text="⚠️ Credentials: Missing", fg='#ffaa00')
351 | except:
352 | self.creds_status.config(text="❌ Credentials: Error", fg='#dc3545')
353 | else:
354 | self.creds_status.config(text="⚠️ Credentials: Optional", fg='#ffaa00')
355 |
356 | def check_kismet_running(self):
357 | """Check if Kismet is running"""
358 | try:
359 | result = subprocess.run(['pgrep', 'kismet'], capture_output=True)
360 | return result.returncode == 0
361 | except:
362 | return False
363 |
364 | def check_kismet_db(self):
365 | """Check if Kismet database exists and is accessible"""
366 | if not self.config:
367 | try:
368 | with open('config.json', 'r') as f:
369 | self.config = json.load(f)
370 | except:
371 | self.config = {}
372 |
373 | db_path = self.config.get('paths', {}).get('kismet_logs', '/tmp/kismet*.kismet')
374 | list_of_files = glob.glob(db_path)
375 | if not list_of_files:
376 | return None, "No Kismet database files found"
377 | try:
378 | latest_file = max(list_of_files, key=os.path.getctime)
379 | with sqlite3.connect(latest_file) as con:
380 | cursor = con.cursor()
381 | cursor.execute("SELECT COUNT(*) FROM devices")
382 | return latest_file, None
383 | except Exception as e:
384 | return None, str(e)
385 |
386 | def check_status_threaded(self):
387 | """Check system status in background"""
388 | self.log_message("🔍 Checking system status...")
389 | threading.Thread(target=self._check_status_background, daemon=True).start()
390 |
391 | def _check_status_background(self):
392 | """Background status check"""
393 | try:
394 | # Check Kismet status
395 | kismet_processes = subprocess.run(['pgrep', '-c', 'kismet'], capture_output=True, text=True)
396 | kismet_count = int(kismet_processes.stdout.strip()) if kismet_processes.returncode == 0 else 0
397 |
398 | if kismet_count > 0:
399 | self.log_message("✅ Kismet is running")
400 | else:
401 | self.log_message("❌ Kismet is not running")
402 |
403 | # Check monitor mode
404 | try:
405 | iwconfig_result = subprocess.run(['iwconfig'], capture_output=True, text=True, timeout=5)
406 | if "Mode:Monitor" in iwconfig_result.stdout:
407 | self.log_message("✅ Monitor mode detected")
408 | else:
409 | self.log_message("❌ Monitor mode not detected")
410 | except Exception as e:
411 | self.log_message(f"⚠️ Could not check monitor mode: {e}")
412 |
413 | self.update_status()
414 | except Exception as e:
415 | self.log_message(f"❌ Error running status check: {e}")
416 |
417 | def create_ignore_lists_threaded(self):
418 | """Create ignore lists in background"""
419 | self.log_message("📝 Creating ignore lists from Kismet database...")
420 | self.create_ignore_btn.config(state='disabled', text='Creating...')
421 | threading.Thread(target=self._create_ignore_lists_background, daemon=True).start()
422 |
423 | def _create_ignore_lists_background(self):
424 | """Background ignore list creation"""
425 | try:
426 | # Check database first
427 | db_file, error = self.check_kismet_db()
428 | if error:
429 | self.log_message(f"❌ Database error: {error}")
430 | return
431 |
432 | self.log_message(f"📊 Using database: {os.path.basename(db_file)}")
433 |
434 | # Create ignore_lists directory
435 | ignore_dir = pathlib.Path('./ignore_lists')
436 | ignore_dir.mkdir(parents=True, exist_ok=True)
437 |
438 | # Process database
439 | with sqlite3.connect(db_file) as con:
440 | # Get MAC addresses
441 | cursor = con.cursor()
442 | cursor.execute("SELECT DISTINCT devmac FROM devices")
443 | mac_rows = cursor.fetchall()
444 |
445 | mac_list = []
446 | for row in mac_rows:
447 | mac = row[0]
448 | if mac and mac not in mac_list:
449 | mac_list.append(mac)
450 |
451 | self.log_message(f"✅ Found {len(mac_list)} unique MAC addresses")
452 |
453 | # Get SSIDs from probe requests
454 | cursor.execute("SELECT device FROM devices WHERE device LIKE '%dot11.probedssid.ssid%'")
455 | device_rows = cursor.fetchall()
456 |
457 | ssid_list = []
458 | for row in device_rows:
459 | try:
460 | device_json = json.loads(row[0])
461 | dot11_device = device_json.get('dot11.device', {})
462 | if dot11_device:
463 | last_probe = dot11_device.get('dot11.device.last_probed_ssid_record', {})
464 | ssid = last_probe.get('dot11.probedssid.ssid')
465 | if ssid and ssid not in ssid_list:
466 | ssid_list.append(ssid)
467 | except (json.JSONDecodeError, KeyError):
468 | continue
469 |
470 | self.log_message(f"✅ Found {len(ssid_list)} unique SSIDs")
471 |
472 | # Write files using secure format (JSON instead of Python exec)
473 | import json as json_module
474 |
475 | mac_file = ignore_dir / 'mac_list.json'
476 | with open(mac_file, 'w') as f:
477 | json_module.dump(mac_list, f, indent=2)
478 |
479 | ssid_file = ignore_dir / 'ssid_list.json'
480 | with open(ssid_file, 'w') as f:
481 | json_module.dump(ssid_list, f, indent=2)
482 |
483 | self.log_message(f"💾 Saved MAC list to: {mac_file}")
484 | self.log_message(f"💾 Saved SSID list to: {ssid_file}")
485 | self.log_message("✅ Ignore lists created successfully!")
486 |
487 | except Exception as e:
488 | self.log_message(f"❌ Error creating ignore lists: {e}")
489 | finally:
490 | self.create_ignore_btn.config(state='normal', text='📝 Create\nIgnore Lists')
491 |
492 | def delete_ignore_lists(self):
493 | """Delete ignore lists with confirmation"""
494 | if messagebox.askyesno("Confirm Delete", "Are you sure you want to delete all ignore lists?"):
495 | try:
496 | ignore_dir = pathlib.Path('./ignore_lists')
497 | deleted_count = 0
498 |
499 | for file_path in ignore_dir.glob('*'):
500 | if file_path.is_file():
501 | os.remove(file_path)
502 | deleted_count += 1
503 | self.log_message(f"🗑️ Deleted: {file_path.name}")
504 |
505 | self.log_message(f"✅ Deleted {deleted_count} ignore list files")
506 |
507 | except Exception as e:
508 | self.log_message(f"❌ Error deleting ignore lists: {e}")
509 |
510 | def run_cyt_threaded(self):
511 | """Run CYT in background"""
512 | if 'cyt' in self.running_processes:
513 | self.log_message("⚠️ CYT is already running!")
514 | return
515 |
516 | self.log_message("🚀 Starting Chasing Your Tail...")
517 | self.run_cyt_btn.config(state='disabled', text='🔄 RUNNING...', bg='#ffaa00')
518 | threading.Thread(target=self._run_cyt_background, daemon=True).start()
519 |
520 | def _run_cyt_background(self):
521 | """Background CYT execution"""
522 | try:
523 | # Set test mode for non-interactive credential access
524 | env = os.environ.copy()
525 | env['CYT_TEST_MODE'] = 'true'
526 |
527 | process = subprocess.Popen(
528 | ['python3', './chasing_your_tail.py'],
529 | stdout=subprocess.PIPE,
530 | stderr=subprocess.STDOUT,
531 | text=True,
532 | bufsize=1,
533 | universal_newlines=True,
534 | env=env
535 | )
536 |
537 | self.running_processes['cyt'] = process
538 | self.log_message("✅ CYT process started successfully")
539 |
540 | # Read output in real-time
541 | for line in process.stdout:
542 | if line.strip():
543 | self.log_message(f"CYT: {line.strip()}")
544 |
545 | except Exception as e:
546 | self.log_message(f"❌ Error running CYT: {e}")
547 | finally:
548 | if 'cyt' in self.running_processes:
549 | del self.running_processes['cyt']
550 | self.run_cyt_btn.config(state='normal', text='🚀 START\nCHASING YOUR TAIL', bg='#ff6b35')
551 |
552 | def analyze_logs_threaded(self):
553 | """Analyze logs in background"""
554 | self.log_message("📈 Starting log analysis...")
555 | self.analyze_btn.config(state='disabled', text='Analyzing...')
556 | threading.Thread(target=self._analyze_logs_background, daemon=True).start()
557 |
558 | def _analyze_logs_background(self):
559 | """Background log analysis"""
560 | try:
561 | env = os.environ.copy()
562 | env['CYT_TEST_MODE'] = 'true'
563 |
564 | self.log_message("🔄 Running probe analyzer (this may take several minutes for large datasets)...")
565 |
566 | result = subprocess.run(
567 | ['python3', './probe_analyzer.py', '--local'],
568 | capture_output=True,
569 | text=True,
570 | timeout=300, # Increased to 5 minutes
571 | env=env
572 | )
573 |
574 | # Save full output to timestamped report file
575 | from datetime import datetime
576 | import pathlib
577 |
578 | # Create reports directory if it doesn't exist
579 | reports_dir = pathlib.Path('./reports')
580 | reports_dir.mkdir(exist_ok=True)
581 |
582 | timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
583 | report_file = reports_dir / f"probe_analysis_report_{timestamp}.txt"
584 |
585 | with open(report_file, 'w') as f:
586 | f.write(f"CYT Probe Analysis Report\n")
587 | f.write(f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
588 | f.write("=" * 50 + "\n\n")
589 |
590 | if result.stdout:
591 | f.write("ANALYSIS OUTPUT:\n")
592 | f.write("-" * 30 + "\n")
593 | f.write(result.stdout)
594 | f.write("\n\n")
595 |
596 | if result.stderr and result.stderr.strip():
597 | f.write("WARNINGS/ERRORS:\n")
598 | f.write("-" * 30 + "\n")
599 | f.write(result.stderr)
600 | f.write("\n\n")
601 |
602 | f.write("End of Report\n")
603 |
604 | self.log_message(f"📄 Full analysis saved to: {report_file}")
605 |
606 | # Show summary in GUI
607 | if result.stdout:
608 | lines = result.stdout.split('\n')
609 | summary_lines = []
610 |
611 | # Extract key findings for GUI display
612 | for line in lines:
613 | if any(keyword in line.lower() for keyword in ['found', 'ssid:', 'times seen:', 'unique ssids']):
614 | summary_lines.append(line)
615 |
616 | if summary_lines:
617 | self.log_message("📊 Analysis Summary:")
618 | for line in summary_lines[:15]: # Show top findings
619 | if line.strip():
620 | self.log_message(f" {line}")
621 |
622 | if len(summary_lines) > 15:
623 | self.log_message(f" ... and {len(summary_lines)-15} more findings")
624 | else:
625 | # Fallback to first 10 lines if no key findings
626 | for line in lines[:10]:
627 | if line.strip():
628 | self.log_message(f"Analysis: {line}")
629 |
630 | if result.stderr and result.stderr.strip():
631 | self.log_message(f"⚠️ Analysis warnings saved to report file")
632 |
633 | self.log_message("✅ Log analysis complete - see report file for full details")
634 |
635 | except subprocess.TimeoutExpired:
636 | self.log_message("⚠️ Analysis timed out after 5 minutes (very large dataset)")
637 | self.log_message("💡 Try running 'python3 probe_analyzer.py --local' manually for large datasets")
638 | except Exception as e:
639 | self.log_message(f"❌ Error analyzing logs: {e}")
640 | finally:
641 | self.analyze_btn.config(state='normal', text='📈 Analyze\nLogs')
642 |
643 | def surveillance_analysis_threaded(self):
644 | """Run surveillance analysis in background"""
645 | self.log_message("🗺️ Starting surveillance analysis with GPS correlation...")
646 | self.surveillance_btn.config(state='disabled', text='Analyzing...')
647 | threading.Thread(target=self._surveillance_analysis_background, daemon=True).start()
648 |
649 | def _surveillance_analysis_background(self):
650 | """Background surveillance analysis"""
651 | try:
652 | env = os.environ.copy()
653 | env['CYT_TEST_MODE'] = 'true'
654 |
655 | self.log_message("🔄 Running surveillance analyzer (generating KML for Google Earth)...")
656 |
657 | result = subprocess.run(
658 | ['python3', './surveillance_analyzer.py'],
659 | capture_output=True,
660 | text=True,
661 | timeout=300,
662 | env=env
663 | )
664 |
665 | if result.returncode == 0:
666 | # Look for generated files
667 | import glob
668 | kml_files = glob.glob("kml_files/surveillance_analysis_*.kml")
669 | report_files = glob.glob("surveillance_reports/surveillance_report_*.md")
670 |
671 | if kml_files:
672 | latest_kml = max(kml_files, key=os.path.getctime)
673 | self.log_message(f"✅ KML file generated: {latest_kml}")
674 | self.log_message("🌍 Open this file in Google Earth to see GPS tracking!")
675 |
676 | if report_files:
677 | latest_report = max(report_files, key=os.path.getctime)
678 | self.log_message(f"📝 Analysis report: {latest_report}")
679 |
680 | self.log_message("✅ Surveillance analysis complete!")
681 |
682 | # Show some output
683 | if result.stdout:
684 | lines = result.stdout.split('\n')[:10] # Show first 10 lines
685 | for line in lines:
686 | if line.strip():
687 | self.log_message(f"📊 {line.strip()}")
688 | else:
689 | self.log_message(f"❌ Surveillance analysis failed")
690 | if result.stderr:
691 | self.log_message(f"Error: {result.stderr}")
692 |
693 | except subprocess.TimeoutExpired:
694 | self.log_message("⚠️ Surveillance analysis timed out")
695 | except Exception as e:
696 | self.log_message(f"❌ Error running surveillance analysis: {e}")
697 | finally:
698 | self.surveillance_btn.config(state='normal', text='🗺️ Surveillance\nAnalysis')
699 |
700 | def quit_application(self):
701 | """Quit application with cleanup"""
702 | if messagebox.askyesno("Quit", "Are you sure you want to quit CYT?"):
703 | # Clean up any running processes
704 | for name, process in list(self.running_processes.items()):
705 | try:
706 | process.terminate()
707 | self.log_message(f"🛑 Stopped {name} process")
708 | except:
709 | pass
710 |
711 | self.log_message("👋 Goodbye!")
712 | self.root.quit()
713 |
714 | def run(self):
715 | """Start the GUI"""
716 | self.root.mainloop()
717 |
718 | if __name__ == '__main__':
719 | try:
720 | app = CYTGui()
721 | app.run()
722 | except Exception as e:
723 | print(f"Error starting CYT GUI: {e}")
724 | import traceback
725 | traceback.print_exc()
--------------------------------------------------------------------------------
/gps_tracker.py:
--------------------------------------------------------------------------------
1 | """
2 | GPS Integration for CYT
3 | Correlates device appearances with GPS locations for surveillance detection
4 | """
5 | import json
6 | import time
7 | import logging
8 | from datetime import datetime
9 | from typing import Dict, List, Tuple, Optional, NamedTuple
10 | from dataclasses import dataclass
11 | import math
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 | @dataclass
16 | class GPSLocation:
17 | """GPS coordinate with metadata"""
18 | latitude: float
19 | longitude: float
20 | altitude: Optional[float] = None
21 | timestamp: float = None
22 | accuracy: Optional[float] = None
23 | location_name: Optional[str] = None
24 |
25 | @dataclass
26 | class LocationSession:
27 | """A session at a specific location"""
28 | location: GPSLocation
29 | start_time: float
30 | end_time: float
31 | devices_seen: List[str] # MAC addresses
32 | session_id: str
33 |
34 | class GPSTracker:
35 | """Track GPS locations and correlate with device appearances"""
36 |
37 | def __init__(self, config: Dict):
38 | self.config = config
39 | self.locations = []
40 | self.location_sessions = []
41 | self.current_location = None
42 |
43 | # Location clustering settings
44 | self.location_threshold = 100 # meters - same location if within this distance
45 | self.session_timeout = 600 # seconds - new session if gap longer than this
46 |
47 | def add_gps_reading(self, latitude: float, longitude: float,
48 | altitude: float = None, accuracy: float = None,
49 | location_name: str = None) -> str:
50 | """Add a GPS reading and return location ID"""
51 | timestamp = time.time()
52 |
53 | location = GPSLocation(
54 | latitude=latitude,
55 | longitude=longitude,
56 | altitude=altitude,
57 | timestamp=timestamp,
58 | accuracy=accuracy,
59 | location_name=location_name
60 | )
61 |
62 | self.locations.append(location)
63 |
64 | # Generate location ID (for clustering nearby locations)
65 | location_id = self._get_location_cluster_id(location)
66 |
67 | # Update current location and session
68 | self._update_current_session(location, location_id)
69 |
70 | logger.info(f"GPS reading added: {latitude:.6f}, {longitude:.6f} -> {location_id}")
71 | return location_id
72 |
73 | def _get_location_cluster_id(self, location: GPSLocation) -> str:
74 | """Get cluster ID for location (groups nearby locations)"""
75 | # Check if this location is close to any existing session
76 | for session in self.location_sessions:
77 | distance = self._calculate_distance(location, session.location)
78 | if distance <= self.location_threshold:
79 | return session.session_id
80 |
81 | # Create new location cluster
82 | if location.location_name:
83 | base_name = location.location_name.replace(' ', '_')
84 | else:
85 | # Generate name from coordinates
86 | base_name = f"loc_{location.latitude:.4f}_{location.longitude:.4f}"
87 |
88 | # Make unique
89 | existing_ids = [s.session_id for s in self.location_sessions]
90 | counter = 1
91 | location_id = base_name
92 | while location_id in existing_ids:
93 | location_id = f"{base_name}_{counter}"
94 | counter += 1
95 |
96 | return location_id
97 |
98 | def _update_current_session(self, location: GPSLocation, location_id: str) -> None:
99 | """Update current location session"""
100 | now = time.time()
101 |
102 | # Find existing session or create new one
103 | current_session = None
104 | for session in self.location_sessions:
105 | if session.session_id == location_id:
106 | # Check if this continues the existing session
107 | if now - session.end_time <= self.session_timeout:
108 | session.end_time = now
109 | current_session = session
110 | break
111 |
112 | if not current_session:
113 | # Create new session
114 | current_session = LocationSession(
115 | location=location,
116 | start_time=now,
117 | end_time=now,
118 | devices_seen=[],
119 | session_id=location_id
120 | )
121 | self.location_sessions.append(current_session)
122 |
123 | self.current_location = current_session
124 | logger.debug(f"Updated session: {location_id}")
125 |
126 | def _calculate_distance(self, loc1: GPSLocation, loc2: GPSLocation) -> float:
127 | """Calculate distance between two GPS locations in meters"""
128 | # Haversine formula
129 | R = 6371000 # Earth's radius in meters
130 |
131 | lat1_rad = math.radians(loc1.latitude)
132 | lat2_rad = math.radians(loc2.latitude)
133 | delta_lat = math.radians(loc2.latitude - loc1.latitude)
134 | delta_lon = math.radians(loc2.longitude - loc1.longitude)
135 |
136 | a = (math.sin(delta_lat/2) * math.sin(delta_lat/2) +
137 | math.cos(lat1_rad) * math.cos(lat2_rad) *
138 | math.sin(delta_lon/2) * math.sin(delta_lon/2))
139 |
140 | c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
141 | distance = R * c
142 |
143 | return distance
144 |
145 | def add_device_at_current_location(self, mac: str) -> Optional[str]:
146 | """Record that a device was seen at current location"""
147 | if not self.current_location:
148 | logger.warning("No current location - cannot record device")
149 | return None
150 |
151 | if mac not in self.current_location.devices_seen:
152 | self.current_location.devices_seen.append(mac)
153 | logger.debug(f"Device {mac} seen at {self.current_location.session_id}")
154 |
155 | return self.current_location.session_id
156 |
157 | def get_current_location_id(self) -> Optional[str]:
158 | """Get current location ID"""
159 | if self.current_location:
160 | return self.current_location.session_id
161 | return None
162 |
163 | def get_location_history(self) -> List[LocationSession]:
164 | """Get chronological location history"""
165 | return sorted(self.location_sessions, key=lambda s: s.start_time)
166 |
167 | def get_devices_across_locations(self) -> Dict[str, List[str]]:
168 | """Get devices that appeared across multiple locations"""
169 | device_locations = {}
170 |
171 | for session in self.location_sessions:
172 | for mac in session.devices_seen:
173 | if mac not in device_locations:
174 | device_locations[mac] = []
175 | if session.session_id not in device_locations[mac]:
176 | device_locations[mac].append(session.session_id)
177 |
178 | # Filter to devices seen at multiple locations
179 | multi_location_devices = {
180 | mac: locations for mac, locations in device_locations.items()
181 | if len(locations) > 1
182 | }
183 |
184 | return multi_location_devices
185 |
186 | class KMLExporter:
187 | """Export GPS and device data to KML format for Google Earth"""
188 |
189 | def __init__(self):
190 | self.kml_template = '''
191 | 📊 Generated: {timestamp} 🎯 Analysis: Advanced wireless device tracking and persistence analysis 🛰️ GPS Data: Real-time location correlation with device appearances This visualization shows device tracking patterns and location-based surveillance analysis. ✅ No persistent devices detected at this location
200 |
409 |
418 |
410 | Location ID {session.session_id}
411 | Persistence Score {location_persistence_score:.3f}/1.000
412 | Monitoring Start {start_time.strftime('%Y-%m-%d %H:%M:%S')}
413 | Monitoring End {end_time.strftime('%Y-%m-%d %H:%M:%S')}
414 | Duration {duration:.1f} minutes
415 | Total Devices {len(session.devices_seen)}
416 | Suspicious Devices {len(suspicious_devices_here)}
417 | Coordinates {session.location.latitude:.6f}, {session.location.longitude:.6f}
419 | 📱 Device Intelligence Summary:
420 | {self._format_enhanced_device_list(session.devices_seen, suspicious_devices_here)}
421 |
422 | {self._format_location_persistence_analysis(suspicious_devices_here) if suspicious_devices_here else '
".join(f"• {mac}" for mac in displayed_devices)
509 |
510 | if len(devices) > 10:
511 | formatted += f"
... and {len(devices) - 10} more"
512 |
513 | return formatted
514 |
515 | def _format_threat_reasons(self, reasons: List[str]) -> str:
516 | """Format threat reasons for KML description"""
517 | if not reasons:
518 | return "No specific threats identified"
519 |
520 | return "
".join(f"• {reason}" for reason in reasons)
521 |
522 | def _format_enhanced_device_list(self, all_devices: List[str], suspicious_devices: List) -> str:
523 | """Format enhanced device list with threat intelligence"""
524 | if not all_devices:
525 | return "
No devices detected
" 526 | 527 | html = "| MAC Address | {device.mac} |
| Persistence Classification | {threat_level} PERSISTENCE |
| Persistence Score | {device.persistence_score:.3f}/1.000 |
| Surveillance Duration | {duration.total_seconds()/3600:.1f} hours |
| Locations Tracked | {len(locations)} |
| Total Appearances | {device.total_appearances} |
| First Seen | {device.first_seen.strftime('%Y-%m-%d %H:%M:%S')} |
| Last Seen | {device.last_seen.strftime('%Y-%m-%d %H:%M:%S')} |
| Device MAC | {device.mac} |
| Location | {session.session_id} |
| Persistence Level | {threat_level} |
| Detection Time | {datetime.fromtimestamp(session.start_time).strftime('%Y-%m-%d %H:%M:%S')} |
| Duration at Location | {(session.end_time - session.start_time)/60:.1f} minutes |
| Appearances Here | {len(appearances_here)} |
| Persistence Score | {device.persistence_score:.3f} |
No probe requests captured
'} 658 | ]]> 659 || Location | {location} |
| Suspicious Devices | {device_count} |
| Average Persistence Score | {avg_persistence:.3f} |
| Maximum Persistence Score | {max_persistence:.3f} |
| Intensity Level | {'🔴 VERY HIGH' if max_persistence > 0.8 else '🟡 ELEVATED' if max_persistence > 0.6 else '🟢 MODERATE'} |
{len(work_hour_devices)} devices show activity primarily during work hours (9 AM - 5 PM)
764 |Implications: Possible workplace surveillance or professional monitoring
765 |{len(off_hour_devices)} devices show activity primarily during off hours (10 PM - 6 AM)
786 |Implications: Possible stalking or personal surveillance
787 |Generated: {timestamp}
825 |Analysis: Wireless device tracking and persistence analysis
826 |GPS Locations: {total_locations} monitoring locations
827 |Persistent Devices: {total_devices} devices detected
828 |KML visualization for Google Earth analysis.
838 | ]]>Generated: {timestamp}
979 |No GPS coordinates were available for visualization.
980 |Ensure GPS tracking is enabled and locations are being recorded.
981 | ]]> 982 |