├── requirements.txt ├── sample reports ├── sample_report1.pdf └── sample_report2.pdf ├── LICENSE ├── pcapninja.py ├── README.md ├── insights.py ├── report.py └── analysis.py /requirements.txt: -------------------------------------------------------------------------------- 1 | matplotlib 2 | reportlab 3 | scapy 4 | networkx 5 | scipy -------------------------------------------------------------------------------- /sample reports/sample_report1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/securezeron/PcapNinja/HEAD/sample reports/sample_report1.pdf -------------------------------------------------------------------------------- /sample reports/sample_report2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/securezeron/PcapNinja/HEAD/sample reports/sample_report2.pdf -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Zeron 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /pcapninja.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | from analysis import perform_in_depth_analysis,identify_security_concerns,analyze_pcap,generate_enhanced_executive_summary 4 | from insights import generate_recommendation 5 | from report import generate_pdf_report 6 | 7 | def banner(): 8 | font=r""" 9 | 10 | _______ _______ _______ _______ _ _________ _ _________ _______ 11 | ( ____ )( ____ \( ___ )( ____ )( ( /|\__ __/( ( /|\__ _/( ___ ) 12 | | ( )|| ( \/| ( ) || ( )|| \ ( | ) ( | \ ( | ) ( | ( ) | 13 | | (____)|| | | (___) || (____)|| \ | | | | | \ | | | | | (___) | 14 | | _____)| | | ___ || _____)| (\ \) | | | | (\ \) | | | | ___ | 15 | | ( | | | ( ) || ( | | \ | | | | | \ | | | | ( ) | 16 | | ) | (____/\| ) ( || ) | ) \ |___) (___| ) \ ||\_) ) | ) ( | 17 | |/ (_______/|/ \||/ |/ )_)\_______/|/ )_)(____/ |/ \| 18 | 19 | """ 20 | print(font) 21 | 22 | def main(pcap_file): 23 | print(f"Analyzing {pcap_file}...") 24 | 25 | # Check if the "result" directory exists 26 | if not os.path.exists("result"): 27 | os.makedirs("result") 28 | 29 | data = analyze_pcap(pcap_file) 30 | if data is None: 31 | return 32 | 33 | analysis = perform_in_depth_analysis(data) 34 | concerns = identify_security_concerns(data) 35 | recommendations = generate_recommendation(data, concerns) 36 | executive_summary = generate_enhanced_executive_summary(data, concerns, recommendations) 37 | 38 | print("Executive Summary:") 39 | print(executive_summary) 40 | 41 | print("\nGenerating PDF report...") 42 | generate_pdf_report(data, analysis, executive_summary, concerns, recommendations) 43 | 44 | if __name__ == "__main__": 45 | banner() 46 | parser = argparse.ArgumentParser(description='Analyze a PCAP file.') 47 | parser.add_argument('--pcap_file', type=str, help='Path to the PCAP file') 48 | args = parser.parse_args() 49 | main(args.pcap_file) 50 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ``` 2 | _______ _______ _______ _______ _ _________ _ _________ _______ 3 | ( ____ )( ____ \( ___ )( ____ )( ( /|\__ __/( ( /|\__ _/( ___ ) 4 | | ( )|| ( \/| ( ) || ( )|| \ ( | ) ( | \ ( | ) ( | ( ) | 5 | | (____)|| | | (___) || (____)|| \ | | | | | \ | | | | | (___) | 6 | | _____)| | | ___ || _____)| (\ \) | | | | (\ \) | | | | ___ | 7 | | ( | | | ( ) || ( | | \ | | | | | \ | | | | ( ) | 8 | | ) | (____/\| ) ( || ) | ) \ |___) (___| ) \ ||\_) ) | ) ( | 9 | |/ (_______/|/ \||/ |/ )_)\_______/|/ )_)(____/ |/ \| 10 | 11 | ``` 12 | 13 | 14 | 15 | # PCAP Ninja 16 | 17 | A lightweight Python tool to analyze PCAP files and generate network traffic reports. It detects traffic patterns, security concerns, and provides insights based on the captured data. 18 | 19 | ## Features 20 | 21 | - **Traffic Summary**: Analyze protocols, IPs, ports, packet sizes, and TCP flags. 22 | - **Time Distribution**: View packet distribution by hour. 23 | - **Top Conversations**: Identify top communication pairs. 24 | - **Protocol Breakdown**: Statistics for HTTP, DNS, SSL, TCP, UDP, and ICMP. 25 | - **Security Insights**: Detect potential DDoS attacks, port scanning, DNS tunneling, and deprecated SSL/TLS versions. 26 | - **Recommendations**: Suggestions based on detected security concerns. 27 | 28 | ## Installation 29 | 1. Clone the repository: 30 | 31 | ```bash 32 | git clone https://github.com/securezeron/PcapNinja.git 33 | cd PcapNinja 34 | ``` 35 | 36 | 2. Install dependeciies: 37 | ```bash 38 | pip install -r requirements.txt 39 | ``` 40 | 41 | 3. Run the script 42 | ```bash 43 | python3 pcapninja.py --pcap_file example.pcap 44 | ``` 45 | 46 | ## Future Scope 47 | - Enhanced protocol support for newer protocols such as QUIC. 48 | - Real-time analysis with live network capture and dynamic report generation. 49 | - Machine learning integration to detect anomalous traffic patterns and potential threats. 50 | - Web interface with real-time graphs and visual insights for easier data interpretation. 51 | - Customizable reporting options allowing users to define specific metrics or data points of interest. 52 | - API integration for interoperability with other monitoring and security tools. 53 | 54 | ## Contributions 55 | Contributions are welcome! Please fork the repository and submit a pull request with your improvements. -------------------------------------------------------------------------------- /insights.py: -------------------------------------------------------------------------------- 1 | def generate_recommendation(data, concerns): 2 | recommendations = [] 3 | 4 | if "Possible port scanning detected" in concerns: 5 | recommendations.append("Implement stricter firewall rules and investigate potential port scanning activities.") 6 | 7 | if any("potential DDoS" in concern for concern in concerns): 8 | recommendations.append("Activate DDoS mitigation measures and investigate traffic to the most targeted IP.") 9 | 10 | if any("unusual ports" in concern for concern in concerns): 11 | recommendations.append("Review and potentially restrict traffic on identified unusual ports.") 12 | 13 | if data['packet_rate'] > 1000: 14 | recommendations.append("Consider network capacity upgrades to handle high traffic volume.") 15 | 16 | if any("High TCP retransmission rate" in concern for concern in concerns): 17 | recommendations.append("Investigate network congestion or potential packet loss issues.") 18 | 19 | if any("Deprecated SSL/TLS versions detected" in concern for concern in concerns): 20 | recommendations.append("Upgrade SSL/TLS configurations to use only secure versions (TLS 1.2 or higher).") 21 | 22 | if any("DNS tunneling" in concern for concern in concerns): 23 | recommendations.append("Implement DNS query monitoring and filtering to detect and prevent DNS tunneling attempts.") 24 | 25 | if not recommendations: 26 | recommendations.append("Continue monitoring for any significant changes in traffic patterns.") 27 | 28 | return recommendations 29 | 30 | def generate_enhanced_executive_summary(data, concerns, recommendations): 31 | total_duration = data['end_time'] - data['start_time'] 32 | top_src_ip = data['src_ips'].most_common(1)[0] 33 | top_dst_ip = data['dst_ips'].most_common(1)[0] 34 | top_port = data['ports'].most_common(1)[0] 35 | 36 | summary = f""" 37 | Executive Summary of Network Traffic Analysis 38 | 39 | 1. Overview: 40 | - Total Packets: {data['total_packets']} 41 | - Duration: {total_duration:.2f} seconds 42 | - Average Packet Rate: {data['packet_rate']:.2f} packets/second 43 | 44 | 2. Traffic Distribution: 45 | - Top Source IP: {top_src_ip[0]} ({top_src_ip[1]} packets, {(top_src_ip[1]/data['total_packets']*100):.2f}% of total) 46 | - Top Destination IP: {top_dst_ip[0]} ({top_dst_ip[1]} packets, {(top_dst_ip[1]/data['total_packets']*100):.2f}% of total) 47 | - Most Active Port: {top_port[0]} ({top_port[1]} occurrences) 48 | 49 | 3. Protocol Analysis: 50 | - Primary Protocol: {data['protocols'].most_common(1)[0][0]} 51 | - Protocol Distribution: {', '.join([f"{k}:{v}" for k, v in data['protocols'].most_common(3)])} 52 | 53 | 4. Packet Size Statistics: 54 | - Average: {statistics.mean(data['packet_sizes']):.2f} bytes 55 | - Median: {statistics.median(data['packet_sizes']):.2f} bytes 56 | - Std Dev: {statistics.stdev(data['packet_sizes']):.2f} bytes 57 | 58 | 5. Security Concerns: 59 | {' '.join(concerns)} 60 | 61 | 6. Key Recommendations: 62 | {' '.join(recommendations)} 63 | 64 | This summary provides a high-level overview of the network traffic captured in the PCAP file. For detailed analysis and visualizations, please refer to the full report. 65 | """ 66 | return summary -------------------------------------------------------------------------------- /report.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | from reportlab.lib.pagesizes import letter 3 | from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image, Table, TableStyle, PageBreak 4 | from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle 5 | from reportlab.lib.units import inch 6 | from io import BytesIO 7 | import networkx as nx 8 | 9 | def create_pie_chart(data, title, filename): 10 | plt.figure(figsize=(10, 6)) 11 | plt.pie(data.values(), labels=data.keys(), autopct='%1.1f%%', startangle=90) 12 | plt.title(title) 13 | plt.axis('equal') 14 | plt.savefig(filename) 15 | plt.close() 16 | 17 | def create_bar_chart(data, title, xlabel, ylabel, filename): 18 | plt.figure(figsize=(12, 6)) 19 | plt.bar(data.keys(), data.values()) 20 | plt.title(title) 21 | plt.xlabel(xlabel) 22 | plt.ylabel(ylabel) 23 | plt.xticks(rotation=45, ha='right') 24 | plt.tight_layout() 25 | plt.savefig(filename) 26 | plt.close() 27 | 28 | def create_time_series_chart(data, title, filename): 29 | plt.figure(figsize=(12, 6)) 30 | plt.plot(data.keys(), data.values()) 31 | plt.title(title) 32 | plt.xlabel('Hour of Day') 33 | plt.ylabel('Number of Packets') 34 | plt.grid(True) 35 | plt.tight_layout() 36 | plt.savefig(filename) 37 | plt.close() 38 | 39 | def create_network_graph(data, filename): 40 | G = nx.Graph() 41 | for (src, dst), weight in data['conversations'].items(): 42 | G.add_edge(src, dst, weight=weight) 43 | 44 | plt.figure(figsize=(12, 8)) 45 | pos = nx.spring_layout(G) 46 | nx.draw(G, pos, with_labels=True, node_color='lightblue', 47 | node_size=500, font_size=8, font_weight='bold') 48 | edge_weights = nx.get_edge_attributes(G, 'weight') 49 | nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_weights) 50 | plt.title("Network Communication Graph") 51 | plt.axis('off') 52 | plt.tight_layout() 53 | plt.savefig(filename) 54 | plt.close() 55 | 56 | def generate_pdf_report(data, analysis, executive_summary, concerns, recommendations): 57 | buffer = BytesIO() 58 | doc = SimpleDocTemplate(buffer, pagesize=letter) 59 | styles = getSampleStyleSheet() 60 | story = [] 61 | 62 | # Update existing styles 63 | styles['Heading2'].spaceAfter = 6 64 | styles['BodyText'].spaceBefore = 6 65 | styles['BodyText'].spaceAfter = 6 66 | 67 | # Title 68 | story.append(Paragraph("Network Traffic Analysis Report", styles['Title'])) 69 | story.append(Spacer(1, 12)) 70 | 71 | # Executive Summary 72 | story.append(Paragraph("Executive Summary", styles['Heading1'])) 73 | story.append(Paragraph(executive_summary, styles['BodyText'])) 74 | story.append(PageBreak()) 75 | 76 | # Detailed Analysis 77 | story.append(Paragraph("Detailed Analysis", styles['Heading1'])) 78 | for finding in analysis: 79 | if finding.startswith('\n'): 80 | story.append(Paragraph(finding[1:], styles['Heading2'])) 81 | else: 82 | story.append(Paragraph(finding, styles['BodyText'])) 83 | 84 | story.append(PageBreak()) 85 | 86 | # Charts 87 | story.append(Paragraph("Visual Analysis", styles['Heading1'])) 88 | 89 | # Protocol Distribution 90 | create_pie_chart(data['protocols'], 'Protocol Distribution', 'result/protocol_dist.png') 91 | story.append(Paragraph("Protocol Distribution", styles['Heading2'])) 92 | story.append(Image('result/protocol_dist.png', 5*inch, 3*inch)) 93 | story.append(Spacer(1, 12)) 94 | 95 | # Top Source IPs 96 | create_bar_chart(dict(data['src_ips'].most_common(10)), 'Top 10 Source IPs', 'IP Address', 'Packet Count', 'result/top_src_ips.png') 97 | story.append(Paragraph("Top 10 Source IPs", styles['Heading2'])) 98 | story.append(Image('result/top_src_ips.png', 6*inch, 3*inch)) 99 | story.append(Spacer(1, 12)) 100 | 101 | # Time Distribution 102 | create_time_series_chart(data['time_distribution'], 'Traffic Distribution by Hour', 'result/time_distribution.png') 103 | story.append(Paragraph("Traffic Distribution by Hour", styles['Heading2'])) 104 | story.append(Image('result/time_distribution.png', 6*inch, 3*inch)) 105 | story.append(Spacer(1, 12)) 106 | 107 | # Network Graph 108 | create_network_graph(data, 'result/network_graph.png') 109 | story.append(Paragraph("Network Communication Graph", styles['Heading2'])) 110 | story.append(Image('result/network_graph.png', 6*inch, 4*inch)) 111 | story.append(Spacer(1, 12)) 112 | 113 | story.append(PageBreak()) 114 | 115 | # Security Concerns 116 | story.append(Paragraph("Security Concerns", styles['Heading1'])) 117 | for concern in concerns: 118 | story.append(Paragraph(f"• {concern}", styles['BodyText'])) 119 | 120 | # Recommendations 121 | story.append(Paragraph("Recommendations", styles['Heading1'])) 122 | for recommendation in recommendations: 123 | story.append(Paragraph(f"• {recommendation}", styles['BodyText'])) 124 | 125 | doc.build(story) 126 | buffer.seek(0) 127 | 128 | with open("result/network_traffic_analysis_report.pdf", "wb") as f: 129 | f.write(buffer.getvalue()) 130 | 131 | print("PDF report generated: network_traffic_analysis_report.pdf") -------------------------------------------------------------------------------- /analysis.py: -------------------------------------------------------------------------------- 1 | from scapy.all import rdpcap, IP, TCP, UDP, DNS, Raw 2 | from scapy.layers.http import HTTP 3 | from collections import Counter, defaultdict 4 | import datetime 5 | import statistics 6 | 7 | def safe_decode(data, encoding='utf-8', errors='ignore'): 8 | if isinstance(data, bytes): 9 | return data.decode(encoding, errors=errors) 10 | return str(data) 11 | 12 | def analyze_pcap(file_path): 13 | try: 14 | packets = rdpcap(file_path) 15 | except FileNotFoundError: 16 | print(f"Error: The file {file_path} was not found.") 17 | return None 18 | except Exception as e: 19 | print(f"An error occurred while reading the PCAP file: {str(e)}") 20 | return None 21 | 22 | data = { 23 | 'total_packets': len(packets), 24 | 'protocols': Counter(), 25 | 'src_ips': Counter(), 26 | 'dst_ips': Counter(), 27 | 'ports': Counter(), 28 | 'packet_sizes': [], 29 | 'tcp_flags': Counter(), 30 | 'time_distribution': defaultdict(int), 31 | 'conversations': defaultdict(int), 32 | 'ttl_values': [], 33 | 'dns_queries': Counter(), 34 | 'http_methods': Counter(), 35 | 'packet_rate': 0, 36 | 'start_time': None, 37 | 'end_time': None, 38 | 'ip_geolocation': defaultdict(Counter), 39 | 'payload_data': [], 40 | 'tcp_window_sizes': [], 41 | 'tcp_retransmissions': Counter(), 42 | 'udp_lengths': [], 43 | 'icmp_types': Counter(), 44 | 'ssl_versions': Counter(), 45 | 'application_data': defaultdict(Counter) 46 | } 47 | 48 | for packet in packets: 49 | if data['start_time'] is None: 50 | data['start_time'] = packet.time 51 | data['end_time'] = packet.time 52 | 53 | if IP in packet: 54 | ip_layer = packet[IP] 55 | data['src_ips'][ip_layer.src] += 1 56 | data['dst_ips'][ip_layer.dst] += 1 57 | data['protocols'][ip_layer.proto] += 1 58 | data['packet_sizes'].append(len(packet)) 59 | data['ttl_values'].append(ip_layer.ttl) 60 | data['conversations'][(ip_layer.src, ip_layer.dst)] += 1 61 | 62 | timestamp = datetime.datetime.fromtimestamp(float(packet.time)) 63 | data['time_distribution'][timestamp.hour] += 1 64 | 65 | # Simulated geolocation (replace with actual geolocation service in production) 66 | data['ip_geolocation'][ip_layer.src]['country'] += 1 67 | data['ip_geolocation'][ip_layer.dst]['country'] += 1 68 | 69 | if TCP in packet: 70 | tcp_layer = packet[TCP] 71 | data['ports'][tcp_layer.sport] += 1 72 | data['ports'][tcp_layer.dport] += 1 73 | data['tcp_flags'][tcp_layer.flags] += 1 74 | data['tcp_window_sizes'].append(tcp_layer.window) 75 | 76 | if tcp_layer.flags & 0x04: # RST flag 77 | data['tcp_retransmissions'][(ip_layer.src, ip_layer.dst)] += 1 78 | 79 | if tcp_layer.dport == 80 or tcp_layer.sport == 80: 80 | if HTTP in packet: 81 | http_layer = packet[HTTP] 82 | if hasattr(http_layer, 'Method'): 83 | method = safe_decode(http_layer.Method) 84 | data['http_methods'][method] += 1 85 | elif tcp_layer.dport == 443 or tcp_layer.sport == 443: 86 | data['ssl_versions']['TLSv1.2'] += 1 # Simulated SSL version detection 87 | 88 | elif UDP in packet: 89 | udp_layer = packet[UDP] 90 | data['ports'][udp_layer.sport] += 1 91 | data['ports'][udp_layer.dport] += 1 92 | data['udp_lengths'].append(udp_layer.len) 93 | 94 | if DNS in packet: 95 | dns_layer = packet[DNS] 96 | if dns_layer.qr == 0 and dns_layer.qd: # It's a query 97 | query = safe_decode(dns_layer.qd.qname) 98 | data['dns_queries'][query] += 1 99 | 100 | elif packet.haslayer('ICMP'): 101 | icmp_layer = packet['ICMP'] 102 | data['icmp_types'][icmp_layer.type] += 1 103 | 104 | # Simulated application-layer protocol detection 105 | if Raw in packet: 106 | payload = packet[Raw].load 107 | data['payload_data'].append(len(payload)) 108 | if b'HTTP' in payload: 109 | data['application_data']['HTTP'][ip_layer.src] += 1 110 | elif b'SSH' in payload: 111 | data['application_data']['SSH'][ip_layer.src] += 1 112 | 113 | if data['start_time'] and data['end_time']: 114 | duration = data['end_time'] - data['start_time'] 115 | data['packet_rate'] = data['total_packets'] / duration if duration > 0 else 0 116 | 117 | return data 118 | 119 | def perform_in_depth_analysis(data): 120 | analysis = [] 121 | 122 | # Protocol Analysis 123 | top_protocols = data['protocols'].most_common(5) 124 | analysis.append("Protocol Distribution:") 125 | for proto, count in top_protocols: 126 | percentage = (count / data['total_packets']) * 100 127 | analysis.append(f" - {proto}: {count} packets ({percentage:.2f}%)") 128 | 129 | # Traffic Pattern Analysis 130 | top_src_ips = data['src_ips'].most_common(5) 131 | top_dst_ips = data['dst_ips'].most_common(5) 132 | analysis.append("\nTop 5 Source IP Addresses:") 133 | for ip, count in top_src_ips: 134 | percentage = (count / data['total_packets']) * 100 135 | analysis.append(f" - {ip}: {count} packets ({percentage:.2f}%)") 136 | analysis.append("\nTop 5 Destination IP Addresses:") 137 | for ip, count in top_dst_ips: 138 | percentage = (count / data['total_packets']) * 100 139 | analysis.append(f" - {ip}: {count} packets ({percentage:.2f}%)") 140 | 141 | # Port Analysis 142 | top_ports = data['ports'].most_common(10) 143 | analysis.append("\nTop 10 Ports:") 144 | for port, count in top_ports: 145 | percentage = (count / data['total_packets']) * 100 146 | analysis.append(f" - Port {port}: {count} packets ({percentage:.2f}%)") 147 | 148 | # Packet Size Analysis 149 | avg_size = statistics.mean(data['packet_sizes']) 150 | median_size = statistics.median(data['packet_sizes']) 151 | std_dev = statistics.stdev(data['packet_sizes']) 152 | analysis.append(f"\nPacket Size Statistics:") 153 | analysis.append(f" - Average: {avg_size:.2f} bytes") 154 | analysis.append(f" - Median: {median_size:.2f} bytes") 155 | analysis.append(f" - Standard Deviation: {std_dev:.2f} bytes") 156 | 157 | # TCP Flags Analysis 158 | if data['tcp_flags']: 159 | analysis.append("\nTCP Flags Distribution:") 160 | for flags, count in data['tcp_flags'].most_common(): 161 | percentage = (count / sum(data['tcp_flags'].values())) * 100 162 | analysis.append(f" - {flags}: {count} ({percentage:.2f}%)") 163 | 164 | # Time Distribution Analysis 165 | analysis.append("\nTraffic Distribution by Hour:") 166 | for hour, count in sorted(data['time_distribution'].items()): 167 | percentage = (count / data['total_packets']) * 100 168 | analysis.append(f" - Hour {hour}: {count} packets ({percentage:.2f}%)") 169 | 170 | # Conversation Analysis 171 | top_conversations = sorted(data['conversations'].items(), key=lambda x: x[1], reverse=True)[:5] 172 | analysis.append("\nTop 5 Conversations:") 173 | for (src, dst), count in top_conversations: 174 | percentage = (count / data['total_packets']) * 100 175 | analysis.append(f" - {src} <-> {dst}: {count} packets ({percentage:.2f}%)") 176 | 177 | # TTL Analysis 178 | avg_ttl = statistics.mean(data['ttl_values']) 179 | analysis.append(f"\nAverage TTL: {avg_ttl:.2f}") 180 | 181 | # DNS Query Analysis 182 | if data['dns_queries']: 183 | analysis.append("\nTop 5 DNS Queries:") 184 | for query, count in data['dns_queries'].most_common(5): 185 | analysis.append(f" - {query}: {count} times") 186 | 187 | # HTTP Method Analysis 188 | if data['http_methods']: 189 | analysis.append("\nHTTP Method Distribution:") 190 | for method, count in data['http_methods'].items(): 191 | percentage = (count / sum(data['http_methods'].values())) * 100 192 | analysis.append(f" - {method}: {count} ({percentage:.2f}%)") 193 | 194 | # TCP Window Size Analysis 195 | if data['tcp_window_sizes']: 196 | avg_window = statistics.mean(data['tcp_window_sizes']) 197 | analysis.append(f"\nAverage TCP Window Size: {avg_window:.2f} bytes") 198 | 199 | # UDP Length Analysis 200 | if data['udp_lengths']: 201 | avg_udp_length = statistics.mean(data['udp_lengths']) 202 | analysis.append(f"\nAverage UDP Datagram Length: {avg_udp_length:.2f} bytes") 203 | 204 | # ICMP Type Analysis 205 | if data['icmp_types']: 206 | analysis.append("\nICMP Type Distribution:") 207 | for icmp_type, count in data['icmp_types'].most_common(): 208 | percentage = (count / sum(data['icmp_types'].values())) * 100 209 | analysis.append(f" - Type {icmp_type}: {count} ({percentage:.2f}%)") 210 | 211 | # SSL/TLS Version Analysis 212 | if data['ssl_versions']: 213 | analysis.append("\nSSL/TLS Version Distribution:") 214 | for version, count in data['ssl_versions'].items(): 215 | percentage = (count / sum(data['ssl_versions'].values())) * 100 216 | analysis.append(f" - {version}: {count} ({percentage:.2f}%)") 217 | 218 | # Application-Layer Protocol Analysis 219 | if data['application_data']: 220 | analysis.append("\nDetected Application-Layer Protocols:") 221 | for protocol, ips in data['application_data'].items(): 222 | analysis.append(f" - {protocol}: {sum(ips.values())} occurrences") 223 | 224 | return analysis 225 | 226 | 227 | def identify_security_concerns(data): 228 | concerns = [] 229 | 230 | # Check for potential port scanning 231 | if len(data['dst_ips']) / len(data['src_ips']) > 10: 232 | concerns.append("Possible port scanning detected") 233 | 234 | # Check for potential DDoS 235 | top_dst_ip = data['dst_ips'].most_common(1)[0] 236 | if (top_dst_ip[1] / data['total_packets']) > 0.5: 237 | concerns.append(f"High traffic concentration to {top_dst_ip[0]}, potential DDoS") 238 | 239 | # Check for unusual ports 240 | unusual_ports = [port for port, count in data['ports'].items() if port not in [80, 443, 22, 53] and count > 100] 241 | if unusual_ports: 242 | concerns.append(f"High traffic on unusual ports: {', '.join(map(str, unusual_ports))}") 243 | 244 | # Check for high rate of TCP retransmissions 245 | if data['tcp_retransmissions']: 246 | total_tcp = sum(data['tcp_flags'].values()) 247 | retrans_rate = sum(data['tcp_retransmissions'].values()) / total_tcp 248 | if retrans_rate > 0.05: 249 | concerns.append(f"High TCP retransmission rate: {retrans_rate:.2%}") 250 | 251 | # Check for potential SSL/TLS vulnerabilities 252 | if 'SSLv3' in data['ssl_versions'] or 'TLSv1.0' in data['ssl_versions']: 253 | concerns.append("Deprecated SSL/TLS versions detected") 254 | 255 | # Check for potential DNS tunneling 256 | if data['dns_queries']: 257 | avg_query_length = statistics.mean(len(q) for q in data['dns_queries']) 258 | if avg_query_length > 50: 259 | concerns.append("Unusually long DNS queries detected, potential DNS tunneling") 260 | 261 | return concerns 262 | 263 | def generate_enhanced_executive_summary(data, concerns, recommendations): 264 | total_duration = data['end_time'] - data['start_time'] 265 | top_src_ip = data['src_ips'].most_common(1)[0] 266 | top_dst_ip = data['dst_ips'].most_common(1)[0] 267 | top_port = data['ports'].most_common(1)[0] 268 | 269 | summary = f""" 270 | Executive Summary of Network Traffic Analysis 271 | 272 | 1. Overview: 273 | - Total Packets: {data['total_packets']} 274 | - Duration: {total_duration:.2f} seconds 275 | - Average Packet Rate: {data['packet_rate']:.2f} packets/second 276 | 277 | 2. Traffic Distribution: 278 | - Top Source IP: {top_src_ip[0]} ({top_src_ip[1]} packets, {(top_src_ip[1]/data['total_packets']*100):.2f}% of total) 279 | - Top Destination IP: {top_dst_ip[0]} ({top_dst_ip[1]} packets, {(top_dst_ip[1]/data['total_packets']*100):.2f}% of total) 280 | - Most Active Port: {top_port[0]} ({top_port[1]} occurrences) 281 | 282 | 3. Protocol Analysis: 283 | - Primary Protocol: {data['protocols'].most_common(1)[0][0]} 284 | - Protocol Distribution: {', '.join([f"{k}:{v}" for k, v in data['protocols'].most_common(3)])} 285 | 286 | 4. Packet Size Statistics: 287 | - Average: {statistics.mean(data['packet_sizes']):.2f} bytes 288 | - Median: {statistics.median(data['packet_sizes']):.2f} bytes 289 | - Std Dev: {statistics.stdev(data['packet_sizes']):.2f} bytes 290 | 291 | 5. Security Concerns: 292 | {' '.join(concerns)} 293 | 294 | 6. Key Recommendations: 295 | {' '.join(recommendations)} 296 | 297 | This summary provides a high-level overview of the network traffic captured in the PCAP file. For detailed analysis and visualizations, please refer to the full report. 298 | """ 299 | return summary --------------------------------------------------------------------------------