├── README.md ├── controller ├── DT_controller.py ├── KNN_controller.py ├── RF_controller.py ├── collect_benign_trafic.py ├── collect_benign_trafic1.py ├── collect_ddos_trafic.py └── switch.py ├── mininet ├── generate_benign_trafic.py ├── generate_ddos_trafic.py ├── generate_ddos_trafic1.py └── topology.py └── ml ├── DT.py ├── KNN.py ├── LR.py ├── ML.py ├── NB.py ├── RF.py └── SVM.py /README.md: -------------------------------------------------------------------------------- 1 | # sdn-network-ddos-detection-using-machine-learning 2 | 3 | - Find dataset here: https://drive.google.com/file/d/1N2QLDPb90XOdxcuQ_Fb7ZSVOG4J3w_zY/view?usp=sharing 4 | 5 | - Find ryu controller vm here: https://drive.google.com/file/d/1_5PQWBsQcVnxtzwhUMzP-w2mR9MZrG6S/view?usp=sharing 6 | 7 | - Find miniet vm here: https://drive.google.com/file/d/1H7Hs-yruNQKMDmcdgHJGHIDtopPNFAvH/view?usp=sharing 8 | 9 | - Find simulation here: https://www.youtube.com/playlist?list=PLpbzVrYIIhHaLQEtiVtYhNlZnyV5mb5vp 10 | 11 | # Steps: 12 | 13 | - Import virtual machines to virtualbox 14 | 15 | - Change ip address of ryu controller in source code 16 | 17 | - On ryu controller run: ryu-manager DT_controller.py 18 | 19 | - On mininet run: sudo python topology.py 20 | 21 | - Launch DDoS attacks as decribed in youtube videos and see results. 22 | -------------------------------------------------------------------------------- /controller/DT_controller.py: -------------------------------------------------------------------------------- 1 | from ryu.controller import ofp_event 2 | from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER 3 | from ryu.controller.handler import set_ev_cls 4 | from ryu.lib import hub 5 | 6 | import switch 7 | from datetime import datetime 8 | 9 | import pandas as pd 10 | from sklearn.model_selection import train_test_split 11 | from sklearn.tree import DecisionTreeClassifier 12 | from sklearn.metrics import confusion_matrix 13 | from sklearn.metrics import accuracy_score 14 | 15 | class SimpleMonitor13(switch.SimpleSwitch13): 16 | 17 | def __init__(self, *args, **kwargs): 18 | 19 | super(SimpleMonitor13, self).__init__(*args, **kwargs) 20 | self.datapaths = {} 21 | self.monitor_thread = hub.spawn(self._monitor) 22 | 23 | start = datetime.now() 24 | 25 | self.flow_training() 26 | 27 | end = datetime.now() 28 | print("Training time: ", (end-start)) 29 | 30 | @set_ev_cls(ofp_event.EventOFPStateChange, 31 | [MAIN_DISPATCHER, DEAD_DISPATCHER]) 32 | def _state_change_handler(self, ev): 33 | datapath = ev.datapath 34 | if ev.state == MAIN_DISPATCHER: 35 | if datapath.id not in self.datapaths: 36 | self.logger.debug('register datapath: %016x', datapath.id) 37 | self.datapaths[datapath.id] = datapath 38 | elif ev.state == DEAD_DISPATCHER: 39 | if datapath.id in self.datapaths: 40 | self.logger.debug('unregister datapath: %016x', datapath.id) 41 | del self.datapaths[datapath.id] 42 | 43 | def _monitor(self): 44 | while True: 45 | for dp in self.datapaths.values(): 46 | self._request_stats(dp) 47 | hub.sleep(10) 48 | 49 | self.flow_predict() 50 | 51 | def _request_stats(self, datapath): 52 | self.logger.debug('send stats request: %016x', datapath.id) 53 | parser = datapath.ofproto_parser 54 | 55 | req = parser.OFPFlowStatsRequest(datapath) 56 | datapath.send_msg(req) 57 | 58 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 59 | def _flow_stats_reply_handler(self, ev): 60 | 61 | timestamp = datetime.now() 62 | timestamp = timestamp.timestamp() 63 | 64 | file0 = open("PredictFlowStatsfile.csv","w") 65 | file0.write('timestamp,datapath_id,flow_id,ip_src,tp_src,ip_dst,tp_dst,ip_proto,icmp_code,icmp_type,flow_duration_sec,flow_duration_nsec,idle_timeout,hard_timeout,flags,packet_count,byte_count,packet_count_per_second,packet_count_per_nsecond,byte_count_per_second,byte_count_per_nsecond\n') 66 | body = ev.msg.body 67 | icmp_code = -1 68 | icmp_type = -1 69 | tp_src = 0 70 | tp_dst = 0 71 | 72 | for stat in sorted([flow for flow in body if (flow.priority == 1) ], key=lambda flow: 73 | (flow.match['eth_type'],flow.match['ipv4_src'],flow.match['ipv4_dst'],flow.match['ip_proto'])): 74 | 75 | ip_src = stat.match['ipv4_src'] 76 | ip_dst = stat.match['ipv4_dst'] 77 | ip_proto = stat.match['ip_proto'] 78 | 79 | if stat.match['ip_proto'] == 1: 80 | icmp_code = stat.match['icmpv4_code'] 81 | icmp_type = stat.match['icmpv4_type'] 82 | 83 | elif stat.match['ip_proto'] == 6: 84 | tp_src = stat.match['tcp_src'] 85 | tp_dst = stat.match['tcp_dst'] 86 | 87 | elif stat.match['ip_proto'] == 17: 88 | tp_src = stat.match['udp_src'] 89 | tp_dst = stat.match['udp_dst'] 90 | 91 | flow_id = str(ip_src) + str(tp_src) + str(ip_dst) + str(tp_dst) + str(ip_proto) 92 | 93 | try: 94 | packet_count_per_second = stat.packet_count/stat.duration_sec 95 | packet_count_per_nsecond = stat.packet_count/stat.duration_nsec 96 | except: 97 | packet_count_per_second = 0 98 | packet_count_per_nsecond = 0 99 | 100 | try: 101 | byte_count_per_second = stat.byte_count/stat.duration_sec 102 | byte_count_per_nsecond = stat.byte_count/stat.duration_nsec 103 | except: 104 | byte_count_per_second = 0 105 | byte_count_per_nsecond = 0 106 | 107 | file0.write("{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n" 108 | .format(timestamp, ev.msg.datapath.id, flow_id, ip_src, tp_src,ip_dst, tp_dst, 109 | stat.match['ip_proto'],icmp_code,icmp_type, 110 | stat.duration_sec, stat.duration_nsec, 111 | stat.idle_timeout, stat.hard_timeout, 112 | stat.flags, stat.packet_count,stat.byte_count, 113 | packet_count_per_second,packet_count_per_nsecond, 114 | byte_count_per_second,byte_count_per_nsecond)) 115 | 116 | file0.close() 117 | 118 | def flow_training(self): 119 | 120 | self.logger.info("Flow Training ...") 121 | 122 | flow_dataset = pd.read_csv('FlowStatsfile.csv') 123 | 124 | flow_dataset.iloc[:, 2] = flow_dataset.iloc[:, 2].str.replace('.', '') 125 | flow_dataset.iloc[:, 3] = flow_dataset.iloc[:, 3].str.replace('.', '') 126 | flow_dataset.iloc[:, 5] = flow_dataset.iloc[:, 5].str.replace('.', '') 127 | 128 | X_flow = flow_dataset.iloc[:, :-1].values 129 | X_flow = X_flow.astype('float64') 130 | 131 | y_flow = flow_dataset.iloc[:, -1].values 132 | 133 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 134 | 135 | classifier = DecisionTreeClassifier(criterion='entropy', random_state=0) 136 | self.flow_model = classifier.fit(X_flow_train, y_flow_train) 137 | 138 | y_flow_pred = self.flow_model.predict(X_flow_test) 139 | 140 | self.logger.info("------------------------------------------------------------------------------") 141 | 142 | self.logger.info("confusion matrix") 143 | cm = confusion_matrix(y_flow_test, y_flow_pred) 144 | self.logger.info(cm) 145 | 146 | acc = accuracy_score(y_flow_test, y_flow_pred) 147 | 148 | self.logger.info("succes accuracy = {0:.2f} %".format(acc*100)) 149 | fail = 1.0 - acc 150 | self.logger.info("fail accuracy = {0:.2f} %".format(fail*100)) 151 | self.logger.info("------------------------------------------------------------------------------") 152 | 153 | def flow_predict(self): 154 | try: 155 | predict_flow_dataset = pd.read_csv('PredictFlowStatsfile.csv') 156 | 157 | predict_flow_dataset.iloc[:, 2] = predict_flow_dataset.iloc[:, 2].str.replace('.', '') 158 | predict_flow_dataset.iloc[:, 3] = predict_flow_dataset.iloc[:, 3].str.replace('.', '') 159 | predict_flow_dataset.iloc[:, 5] = predict_flow_dataset.iloc[:, 5].str.replace('.', '') 160 | 161 | X_predict_flow = predict_flow_dataset.iloc[:, :].values 162 | X_predict_flow = X_predict_flow.astype('float64') 163 | 164 | y_flow_pred = self.flow_model.predict(X_predict_flow) 165 | 166 | legitimate_trafic = 0 167 | ddos_trafic = 0 168 | 169 | for i in y_flow_pred: 170 | if i == 0: 171 | legitimate_trafic = legitimate_trafic + 1 172 | else: 173 | ddos_trafic = ddos_trafic + 1 174 | victim = int(predict_flow_dataset.iloc[i, 5])%20 175 | 176 | 177 | 178 | 179 | self.logger.info("------------------------------------------------------------------------------") 180 | if (legitimate_trafic/len(y_flow_pred)*100) > 80: 181 | self.logger.info("legitimate trafic ...") 182 | else: 183 | self.logger.info("ddos trafic ...") 184 | self.logger.info("victim is host: h{}".format(victim)) 185 | 186 | self.logger.info("------------------------------------------------------------------------------") 187 | 188 | file0 = open("PredictFlowStatsfile.csv","w") 189 | 190 | file0.write('timestamp,datapath_id,flow_id,ip_src,tp_src,ip_dst,tp_dst,ip_proto,icmp_code,icmp_type,flow_duration_sec,flow_duration_nsec,idle_timeout,hard_timeout,flags,packet_count,byte_count,packet_count_per_second,packet_count_per_nsecond,byte_count_per_second,byte_count_per_nsecond\n') 191 | file0.close() 192 | 193 | except: 194 | pass -------------------------------------------------------------------------------- /controller/KNN_controller.py: -------------------------------------------------------------------------------- 1 | from ryu.controller import ofp_event 2 | from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER 3 | from ryu.controller.handler import set_ev_cls 4 | from ryu.lib import hub 5 | 6 | import switch 7 | from datetime import datetime 8 | 9 | import pandas as pd 10 | from sklearn.model_selection import train_test_split 11 | from sklearn.neighbors import KNeighborsClassifier 12 | from sklearn.metrics import confusion_matrix 13 | from sklearn.metrics import accuracy_score 14 | 15 | class SimpleMonitor13(switch.SimpleSwitch13): 16 | 17 | def __init__(self, *args, **kwargs): 18 | 19 | super(SimpleMonitor13, self).__init__(*args, **kwargs) 20 | self.datapaths = {} 21 | self.monitor_thread = hub.spawn(self._monitor) 22 | 23 | start = datetime.now() 24 | 25 | self.flow_training() 26 | 27 | end = datetime.now() 28 | print("Training time: ", (end-start)) 29 | 30 | @set_ev_cls(ofp_event.EventOFPStateChange, 31 | [MAIN_DISPATCHER, DEAD_DISPATCHER]) 32 | def _state_change_handler(self, ev): 33 | datapath = ev.datapath 34 | if ev.state == MAIN_DISPATCHER: 35 | if datapath.id not in self.datapaths: 36 | self.logger.debug('register datapath: %016x', datapath.id) 37 | self.datapaths[datapath.id] = datapath 38 | elif ev.state == DEAD_DISPATCHER: 39 | if datapath.id in self.datapaths: 40 | self.logger.debug('unregister datapath: %016x', datapath.id) 41 | del self.datapaths[datapath.id] 42 | 43 | def _monitor(self): 44 | while True: 45 | for dp in self.datapaths.values(): 46 | self._request_stats(dp) 47 | hub.sleep(10) 48 | 49 | self.flow_predict() 50 | 51 | def _request_stats(self, datapath): 52 | self.logger.debug('send stats request: %016x', datapath.id) 53 | parser = datapath.ofproto_parser 54 | 55 | req = parser.OFPFlowStatsRequest(datapath) 56 | datapath.send_msg(req) 57 | 58 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 59 | def _flow_stats_reply_handler(self, ev): 60 | 61 | timestamp = datetime.now() 62 | timestamp = timestamp.timestamp() 63 | 64 | file0 = open("PredictFlowStatsfile.csv","w") 65 | file0.write('timestamp,datapath_id,flow_id,ip_src,tp_src,ip_dst,tp_dst,ip_proto,icmp_code,icmp_type,flow_duration_sec,flow_duration_nsec,idle_timeout,hard_timeout,flags,packet_count,byte_count,packet_count_per_second,packet_count_per_nsecond,byte_count_per_second,byte_count_per_nsecond\n') 66 | body = ev.msg.body 67 | icmp_code = -1 68 | icmp_type = -1 69 | tp_src = 0 70 | tp_dst = 0 71 | 72 | for stat in sorted([flow for flow in body if (flow.priority == 1) ], key=lambda flow: 73 | (flow.match['eth_type'],flow.match['ipv4_src'],flow.match['ipv4_dst'],flow.match['ip_proto'])): 74 | 75 | ip_src = stat.match['ipv4_src'] 76 | ip_dst = stat.match['ipv4_dst'] 77 | ip_proto = stat.match['ip_proto'] 78 | 79 | if stat.match['ip_proto'] == 1: 80 | icmp_code = stat.match['icmpv4_code'] 81 | icmp_type = stat.match['icmpv4_type'] 82 | 83 | elif stat.match['ip_proto'] == 6: 84 | tp_src = stat.match['tcp_src'] 85 | tp_dst = stat.match['tcp_dst'] 86 | 87 | elif stat.match['ip_proto'] == 17: 88 | tp_src = stat.match['udp_src'] 89 | tp_dst = stat.match['udp_dst'] 90 | 91 | flow_id = str(ip_src) + str(tp_src) + str(ip_dst) + str(tp_dst) + str(ip_proto) 92 | 93 | try: 94 | packet_count_per_second = stat.packet_count/stat.duration_sec 95 | packet_count_per_nsecond = stat.packet_count/stat.duration_nsec 96 | except: 97 | packet_count_per_second = 0 98 | packet_count_per_nsecond = 0 99 | 100 | try: 101 | byte_count_per_second = stat.byte_count/stat.duration_sec 102 | byte_count_per_nsecond = stat.byte_count/stat.duration_nsec 103 | except: 104 | byte_count_per_second = 0 105 | byte_count_per_nsecond = 0 106 | 107 | file0.write("{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n" 108 | .format(timestamp, ev.msg.datapath.id, flow_id, ip_src, tp_src,ip_dst, tp_dst, 109 | stat.match['ip_proto'],icmp_code,icmp_type, 110 | stat.duration_sec, stat.duration_nsec, 111 | stat.idle_timeout, stat.hard_timeout, 112 | stat.flags, stat.packet_count,stat.byte_count, 113 | packet_count_per_second,packet_count_per_nsecond, 114 | byte_count_per_second,byte_count_per_nsecond)) 115 | 116 | file0.close() 117 | 118 | def flow_training(self): 119 | 120 | self.logger.info("Flow Training ...") 121 | 122 | flow_dataset = pd.read_csv('FlowStatsfile.csv') 123 | 124 | flow_dataset.iloc[:, 2] = flow_dataset.iloc[:, 2].str.replace('.', '') 125 | flow_dataset.iloc[:, 3] = flow_dataset.iloc[:, 3].str.replace('.', '') 126 | flow_dataset.iloc[:, 5] = flow_dataset.iloc[:, 5].str.replace('.', '') 127 | 128 | X_flow = flow_dataset.iloc[:, :-1].values 129 | X_flow = X_flow.astype('float64') 130 | 131 | y_flow = flow_dataset.iloc[:, -1].values 132 | 133 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 134 | 135 | classifier = KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2) 136 | self.flow_model = classifier.fit(X_flow_train, y_flow_train) 137 | 138 | y_flow_pred = self.flow_model.predict(X_flow_test) 139 | 140 | self.logger.info("------------------------------------------------------------------------------") 141 | 142 | self.logger.info("confusion matrix") 143 | cm = confusion_matrix(y_flow_test, y_flow_pred) 144 | self.logger.info(cm) 145 | 146 | acc = accuracy_score(y_flow_test, y_flow_pred) 147 | 148 | self.logger.info("succes accuracy = {0:.2f} %".format(acc*100)) 149 | fail = 1.0 - acc 150 | self.logger.info("fail accuracy = {0:.2f} %".format(fail*100)) 151 | self.logger.info("------------------------------------------------------------------------------") 152 | 153 | def flow_predict(self): 154 | try: 155 | predict_flow_dataset = pd.read_csv('PredictFlowStatsfile.csv') 156 | 157 | predict_flow_dataset.iloc[:, 2] = predict_flow_dataset.iloc[:, 2].str.replace('.', '') 158 | predict_flow_dataset.iloc[:, 3] = predict_flow_dataset.iloc[:, 3].str.replace('.', '') 159 | predict_flow_dataset.iloc[:, 5] = predict_flow_dataset.iloc[:, 5].str.replace('.', '') 160 | 161 | X_predict_flow = predict_flow_dataset.iloc[:, :].values 162 | X_predict_flow = X_predict_flow.astype('float64') 163 | 164 | y_flow_pred = self.flow_model.predict(X_predict_flow) 165 | 166 | legitimate_trafic = 0 167 | ddos_trafic = 0 168 | 169 | for i in y_flow_pred: 170 | if i == 0: 171 | legitimate_trafic = legitimate_trafic + 1 172 | else: 173 | ddos_trafic = ddos_trafic + 1 174 | victim = int(predict_flow_dataset.iloc[i, 5])%20 175 | 176 | 177 | 178 | 179 | self.logger.info("------------------------------------------------------------------------------") 180 | if (legitimate_trafic/len(y_flow_pred)*100) > 80: 181 | self.logger.info("legitimate trafic ...") 182 | else: 183 | self.logger.info("ddos trafic ...") 184 | self.logger.info("victim is host: h{}".format(victim)) 185 | 186 | self.logger.info("------------------------------------------------------------------------------") 187 | 188 | file0 = open("PredictFlowStatsfile.csv","w") 189 | 190 | file0.write('timestamp,datapath_id,flow_id,ip_src,tp_src,ip_dst,tp_dst,ip_proto,icmp_code,icmp_type,flow_duration_sec,flow_duration_nsec,idle_timeout,hard_timeout,flags,packet_count,byte_count,packet_count_per_second,packet_count_per_nsecond,byte_count_per_second,byte_count_per_nsecond\n') 191 | file0.close() 192 | 193 | except: 194 | pass -------------------------------------------------------------------------------- /controller/RF_controller.py: -------------------------------------------------------------------------------- 1 | from ryu.controller import ofp_event 2 | from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER 3 | from ryu.controller.handler import set_ev_cls 4 | from ryu.lib import hub 5 | 6 | import switch 7 | from datetime import datetime 8 | 9 | import pandas as pd 10 | from sklearn.model_selection import train_test_split 11 | from sklearn.ensemble import RandomForestClassifier 12 | from sklearn.metrics import confusion_matrix 13 | from sklearn.metrics import accuracy_score 14 | 15 | class SimpleMonitor13(switch.SimpleSwitch13): 16 | 17 | def __init__(self, *args, **kwargs): 18 | 19 | super(SimpleMonitor13, self).__init__(*args, **kwargs) 20 | self.datapaths = {} 21 | self.monitor_thread = hub.spawn(self._monitor) 22 | 23 | start = datetime.now() 24 | 25 | self.flow_training() 26 | 27 | end = datetime.now() 28 | print("Training time: ", (end-start)) 29 | 30 | @set_ev_cls(ofp_event.EventOFPStateChange, 31 | [MAIN_DISPATCHER, DEAD_DISPATCHER]) 32 | def _state_change_handler(self, ev): 33 | datapath = ev.datapath 34 | if ev.state == MAIN_DISPATCHER: 35 | if datapath.id not in self.datapaths: 36 | self.logger.debug('register datapath: %016x', datapath.id) 37 | self.datapaths[datapath.id] = datapath 38 | elif ev.state == DEAD_DISPATCHER: 39 | if datapath.id in self.datapaths: 40 | self.logger.debug('unregister datapath: %016x', datapath.id) 41 | del self.datapaths[datapath.id] 42 | 43 | def _monitor(self): 44 | while True: 45 | for dp in self.datapaths.values(): 46 | self._request_stats(dp) 47 | hub.sleep(10) 48 | 49 | self.flow_predict() 50 | 51 | def _request_stats(self, datapath): 52 | self.logger.debug('send stats request: %016x', datapath.id) 53 | parser = datapath.ofproto_parser 54 | 55 | req = parser.OFPFlowStatsRequest(datapath) 56 | datapath.send_msg(req) 57 | 58 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 59 | def _flow_stats_reply_handler(self, ev): 60 | 61 | timestamp = datetime.now() 62 | timestamp = timestamp.timestamp() 63 | 64 | file0 = open("PredictFlowStatsfile.csv","w") 65 | file0.write('timestamp,datapath_id,flow_id,ip_src,tp_src,ip_dst,tp_dst,ip_proto,icmp_code,icmp_type,flow_duration_sec,flow_duration_nsec,idle_timeout,hard_timeout,flags,packet_count,byte_count,packet_count_per_second,packet_count_per_nsecond,byte_count_per_second,byte_count_per_nsecond\n') 66 | body = ev.msg.body 67 | icmp_code = -1 68 | icmp_type = -1 69 | tp_src = 0 70 | tp_dst = 0 71 | 72 | for stat in sorted([flow for flow in body if (flow.priority == 1) ], key=lambda flow: 73 | (flow.match['eth_type'],flow.match['ipv4_src'],flow.match['ipv4_dst'],flow.match['ip_proto'])): 74 | 75 | ip_src = stat.match['ipv4_src'] 76 | ip_dst = stat.match['ipv4_dst'] 77 | ip_proto = stat.match['ip_proto'] 78 | 79 | if stat.match['ip_proto'] == 1: 80 | icmp_code = stat.match['icmpv4_code'] 81 | icmp_type = stat.match['icmpv4_type'] 82 | 83 | elif stat.match['ip_proto'] == 6: 84 | tp_src = stat.match['tcp_src'] 85 | tp_dst = stat.match['tcp_dst'] 86 | 87 | elif stat.match['ip_proto'] == 17: 88 | tp_src = stat.match['udp_src'] 89 | tp_dst = stat.match['udp_dst'] 90 | 91 | flow_id = str(ip_src) + str(tp_src) + str(ip_dst) + str(tp_dst) + str(ip_proto) 92 | 93 | try: 94 | packet_count_per_second = stat.packet_count/stat.duration_sec 95 | packet_count_per_nsecond = stat.packet_count/stat.duration_nsec 96 | except: 97 | packet_count_per_second = 0 98 | packet_count_per_nsecond = 0 99 | 100 | try: 101 | byte_count_per_second = stat.byte_count/stat.duration_sec 102 | byte_count_per_nsecond = stat.byte_count/stat.duration_nsec 103 | except: 104 | byte_count_per_second = 0 105 | byte_count_per_nsecond = 0 106 | 107 | file0.write("{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n" 108 | .format(timestamp, ev.msg.datapath.id, flow_id, ip_src, tp_src,ip_dst, tp_dst, 109 | stat.match['ip_proto'],icmp_code,icmp_type, 110 | stat.duration_sec, stat.duration_nsec, 111 | stat.idle_timeout, stat.hard_timeout, 112 | stat.flags, stat.packet_count,stat.byte_count, 113 | packet_count_per_second,packet_count_per_nsecond, 114 | byte_count_per_second,byte_count_per_nsecond)) 115 | 116 | file0.close() 117 | 118 | def flow_training(self): 119 | 120 | self.logger.info("Flow Training ...") 121 | 122 | flow_dataset = pd.read_csv('FlowStatsfile.csv') 123 | 124 | flow_dataset.iloc[:, 2] = flow_dataset.iloc[:, 2].str.replace('.', '') 125 | flow_dataset.iloc[:, 3] = flow_dataset.iloc[:, 3].str.replace('.', '') 126 | flow_dataset.iloc[:, 5] = flow_dataset.iloc[:, 5].str.replace('.', '') 127 | 128 | X_flow = flow_dataset.iloc[:, :-1].values 129 | X_flow = X_flow.astype('float64') 130 | 131 | y_flow = flow_dataset.iloc[:, -1].values 132 | 133 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 134 | 135 | classifier = RandomForestClassifier(n_estimators=10, criterion="entropy", random_state=0) 136 | self.flow_model = classifier.fit(X_flow_train, y_flow_train) 137 | 138 | y_flow_pred = self.flow_model.predict(X_flow_test) 139 | 140 | self.logger.info("------------------------------------------------------------------------------") 141 | 142 | self.logger.info("confusion matrix") 143 | cm = confusion_matrix(y_flow_test, y_flow_pred) 144 | self.logger.info(cm) 145 | 146 | acc = accuracy_score(y_flow_test, y_flow_pred) 147 | 148 | self.logger.info("succes accuracy = {0:.2f} %".format(acc*100)) 149 | fail = 1.0 - acc 150 | self.logger.info("fail accuracy = {0:.2f} %".format(fail*100)) 151 | self.logger.info("------------------------------------------------------------------------------") 152 | 153 | def flow_predict(self): 154 | try: 155 | predict_flow_dataset = pd.read_csv('PredictFlowStatsfile.csv') 156 | 157 | predict_flow_dataset.iloc[:, 2] = predict_flow_dataset.iloc[:, 2].str.replace('.', '') 158 | predict_flow_dataset.iloc[:, 3] = predict_flow_dataset.iloc[:, 3].str.replace('.', '') 159 | predict_flow_dataset.iloc[:, 5] = predict_flow_dataset.iloc[:, 5].str.replace('.', '') 160 | 161 | X_predict_flow = predict_flow_dataset.iloc[:, :].values 162 | X_predict_flow = X_predict_flow.astype('float64') 163 | 164 | y_flow_pred = self.flow_model.predict(X_predict_flow) 165 | 166 | legitimate_trafic = 0 167 | ddos_trafic = 0 168 | 169 | for i in y_flow_pred: 170 | if i == 0: 171 | legitimate_trafic = legitimate_trafic + 1 172 | else: 173 | ddos_trafic = ddos_trafic + 1 174 | victim = int(predict_flow_dataset.iloc[i, 5])%20 175 | 176 | 177 | 178 | 179 | self.logger.info("------------------------------------------------------------------------------") 180 | if (legitimate_trafic/len(y_flow_pred)*100) > 80: 181 | self.logger.info("legitimate trafic ...") 182 | else: 183 | self.logger.info("ddos trafic ...") 184 | self.logger.info("victim is host: h{}".format(victim)) 185 | 186 | self.logger.info("------------------------------------------------------------------------------") 187 | 188 | file0 = open("PredictFlowStatsfile.csv","w") 189 | 190 | file0.write('timestamp,datapath_id,flow_id,ip_src,tp_src,ip_dst,tp_dst,ip_proto,icmp_code,icmp_type,flow_duration_sec,flow_duration_nsec,idle_timeout,hard_timeout,flags,packet_count,byte_count,packet_count_per_second,packet_count_per_nsecond,byte_count_per_second,byte_count_per_nsecond\n') 191 | file0.close() 192 | 193 | except: 194 | pass -------------------------------------------------------------------------------- /controller/collect_benign_trafic.py: -------------------------------------------------------------------------------- 1 | import switch 2 | from ryu.controller import ofp_event 3 | from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER 4 | from ryu.controller.handler import set_ev_cls 5 | from ryu.lib import hub 6 | 7 | from datetime import datetime 8 | 9 | # class CollectTrainingStatsApp(simple_switch_13.SimpleSwitch13): 10 | class CollectTrainingStatsApp(switch.SimpleSwitch13): 11 | def __init__(self, *args, **kwargs): 12 | super(CollectTrainingStatsApp, self).__init__(*args, **kwargs) 13 | self.datapaths = {} 14 | self.monitor_thread = hub.spawn(self.monitor) 15 | 16 | file0 = open("FlowStatsfile.csv","w") 17 | file0.write('timestamp,datapath_id,flow_id,ip_src,tp_src,ip_dst,tp_dst,ip_proto,icmp_code,icmp_type,flow_duration_sec,flow_duration_nsec,idle_timeout,hard_timeout,flags,packet_count,byte_count,packet_count_per_second,packet_count_per_nsecond,byte_count_per_second,byte_count_per_nsecond,label\n') 18 | file0.close() 19 | 20 | #Asynchronous message 21 | @set_ev_cls(ofp_event.EventOFPStateChange,[MAIN_DISPATCHER, DEAD_DISPATCHER]) 22 | def state_change_handler(self, ev): 23 | datapath = ev.datapath 24 | if ev.state == MAIN_DISPATCHER: 25 | if datapath.id not in self.datapaths: 26 | self.logger.debug('register datapath: %016x', datapath.id) 27 | self.datapaths[datapath.id] = datapath 28 | 29 | elif ev.state == DEAD_DISPATCHER: 30 | if datapath.id in self.datapaths: 31 | self.logger.debug('unregister datapath: %016x', datapath.id) 32 | del self.datapaths[datapath.id] 33 | 34 | 35 | def monitor(self): 36 | while True: 37 | for dp in self.datapaths.values(): 38 | self.request_stats(dp) 39 | hub.sleep(10) 40 | 41 | 42 | def request_stats(self, datapath): 43 | self.logger.debug('send stats request: %016x', datapath.id) 44 | 45 | parser = datapath.ofproto_parser 46 | 47 | req = parser.OFPFlowStatsRequest(datapath) 48 | datapath.send_msg(req) 49 | 50 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 51 | def _flow_stats_reply_handler(self, ev): 52 | 53 | timestamp = datetime.now() 54 | timestamp = timestamp.timestamp() 55 | icmp_code = -1 56 | icmp_type = -1 57 | tp_src = 0 58 | tp_dst = 0 59 | 60 | file0 = open("FlowStatsfile.csv","a+") 61 | body = ev.msg.body 62 | for stat in sorted([flow for flow in body if (flow.priority == 1) ], key=lambda flow: 63 | (flow.match['eth_type'],flow.match['ipv4_src'],flow.match['ipv4_dst'],flow.match['ip_proto'])): 64 | 65 | 66 | ip_src = stat.match['ipv4_src'] 67 | ip_dst = stat.match['ipv4_dst'] 68 | ip_proto = stat.match['ip_proto'] 69 | 70 | if stat.match['ip_proto'] == 1: 71 | icmp_code = stat.match['icmpv4_code'] 72 | icmp_type = stat.match['icmpv4_type'] 73 | 74 | elif stat.match['ip_proto'] == 6: 75 | tp_src = stat.match['tcp_src'] 76 | tp_dst = stat.match['tcp_dst'] 77 | 78 | elif stat.match['ip_proto'] == 17: 79 | tp_src = stat.match['udp_src'] 80 | tp_dst = stat.match['udp_dst'] 81 | 82 | flow_id = str(ip_src) + str(tp_src) + str(ip_dst) + str(tp_dst) + str(ip_proto) 83 | 84 | try: 85 | packet_count_per_second = stat.packet_count/stat.duration_sec 86 | packet_count_per_nsecond = stat.packet_count/stat.duration_nsec 87 | except: 88 | packet_count_per_second = 0 89 | packet_count_per_nsecond = 0 90 | 91 | try: 92 | byte_count_per_second = stat.byte_count/stat.duration_sec 93 | byte_count_per_nsecond = stat.byte_count/stat.duration_nsec 94 | except: 95 | byte_count_per_second = 0 96 | byte_count_per_nsecond = 0 97 | 98 | 99 | file0.write("{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n" 100 | .format(timestamp, ev.msg.datapath.id, flow_id, ip_src, tp_src,ip_dst, tp_dst, 101 | stat.match['ip_proto'],icmp_code,icmp_type, 102 | stat.duration_sec, stat.duration_nsec, 103 | stat.idle_timeout, stat.hard_timeout, 104 | stat.flags, stat.packet_count,stat.byte_count, 105 | packet_count_per_second,packet_count_per_nsecond, 106 | byte_count_per_second,byte_count_per_nsecond,0)) 107 | file0.close() -------------------------------------------------------------------------------- /controller/collect_benign_trafic1.py: -------------------------------------------------------------------------------- 1 | import switch 2 | from ryu.controller import ofp_event 3 | from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER 4 | from ryu.controller.handler import set_ev_cls 5 | from ryu.lib import hub 6 | 7 | from datetime import datetime 8 | 9 | # class CollectTrainingStatsApp(simple_switch_13.SimpleSwitch13): 10 | class CollectTrainingStatsApp(switch.SimpleSwitch13): 11 | def __init__(self, *args, **kwargs): 12 | super(CollectTrainingStatsApp, self).__init__(*args, **kwargs) 13 | self.datapaths = {} 14 | self.monitor_thread = hub.spawn(self.monitor) 15 | 16 | #Asynchronous message 17 | @set_ev_cls(ofp_event.EventOFPStateChange,[MAIN_DISPATCHER, DEAD_DISPATCHER]) 18 | def state_change_handler(self, ev): 19 | datapath = ev.datapath 20 | if ev.state == MAIN_DISPATCHER: 21 | if datapath.id not in self.datapaths: 22 | self.logger.debug('register datapath: %016x', datapath.id) 23 | self.datapaths[datapath.id] = datapath 24 | 25 | elif ev.state == DEAD_DISPATCHER: 26 | if datapath.id in self.datapaths: 27 | self.logger.debug('unregister datapath: %016x', datapath.id) 28 | del self.datapaths[datapath.id] 29 | 30 | 31 | def monitor(self): 32 | while True: 33 | for dp in self.datapaths.values(): 34 | self.request_stats(dp) 35 | hub.sleep(10) 36 | 37 | 38 | def request_stats(self, datapath): 39 | self.logger.debug('send stats request: %016x', datapath.id) 40 | 41 | parser = datapath.ofproto_parser 42 | 43 | req = parser.OFPFlowStatsRequest(datapath) 44 | datapath.send_msg(req) 45 | 46 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 47 | def _flow_stats_reply_handler(self, ev): 48 | 49 | timestamp = datetime.now() 50 | timestamp = timestamp.timestamp() 51 | icmp_code = -1 52 | icmp_type = -1 53 | tp_src = 0 54 | tp_dst = 0 55 | 56 | file0 = open("FlowStatsfile.csv","a+") 57 | body = ev.msg.body 58 | for stat in sorted([flow for flow in body if (flow.priority == 1) ], key=lambda flow: 59 | (flow.match['eth_type'],flow.match['ipv4_src'],flow.match['ipv4_dst'],flow.match['ip_proto'])): 60 | 61 | 62 | ip_src = stat.match['ipv4_src'] 63 | ip_dst = stat.match['ipv4_dst'] 64 | ip_proto = stat.match['ip_proto'] 65 | 66 | if stat.match['ip_proto'] == 1: 67 | icmp_code = stat.match['icmpv4_code'] 68 | icmp_type = stat.match['icmpv4_type'] 69 | 70 | elif stat.match['ip_proto'] == 6: 71 | tp_src = stat.match['tcp_src'] 72 | tp_dst = stat.match['tcp_dst'] 73 | 74 | elif stat.match['ip_proto'] == 17: 75 | tp_src = stat.match['udp_src'] 76 | tp_dst = stat.match['udp_dst'] 77 | 78 | flow_id = str(ip_src) + str(tp_src) + str(ip_dst) + str(tp_dst) + str(ip_proto) 79 | 80 | try: 81 | packet_count_per_second = stat.packet_count/stat.duration_sec 82 | packet_count_per_nsecond = stat.packet_count/stat.duration_nsec 83 | except: 84 | packet_count_per_second = 0 85 | packet_count_per_nsecond = 0 86 | 87 | try: 88 | byte_count_per_second = stat.byte_count/stat.duration_sec 89 | byte_count_per_nsecond = stat.byte_count/stat.duration_nsec 90 | except: 91 | byte_count_per_second = 0 92 | byte_count_per_nsecond = 0 93 | 94 | 95 | file0.write("{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n" 96 | .format(timestamp, ev.msg.datapath.id, flow_id, ip_src, tp_src,ip_dst, tp_dst, 97 | stat.match['ip_proto'],icmp_code,icmp_type, 98 | stat.duration_sec, stat.duration_nsec, 99 | stat.idle_timeout, stat.hard_timeout, 100 | stat.flags, stat.packet_count,stat.byte_count, 101 | packet_count_per_second,packet_count_per_nsecond, 102 | byte_count_per_second,byte_count_per_nsecond,0)) 103 | file0.close() -------------------------------------------------------------------------------- /controller/collect_ddos_trafic.py: -------------------------------------------------------------------------------- 1 | import switch 2 | from ryu.controller import ofp_event 3 | from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER 4 | from ryu.controller.handler import set_ev_cls 5 | from ryu.lib import hub 6 | 7 | from datetime import datetime 8 | 9 | class CollectTrainingStatsApp(switch.SimpleSwitch13): 10 | def __init__(self, *args, **kwargs): 11 | super(CollectTrainingStatsApp, self).__init__(*args, **kwargs) 12 | self.datapaths = {} 13 | self.monitor_thread = hub.spawn(self.monitor) 14 | 15 | #Asynchronous message 16 | @set_ev_cls(ofp_event.EventOFPStateChange,[MAIN_DISPATCHER, DEAD_DISPATCHER]) 17 | def state_change_handler(self, ev): 18 | datapath = ev.datapath 19 | if ev.state == MAIN_DISPATCHER: 20 | if datapath.id not in self.datapaths: 21 | self.logger.debug('register datapath: %016x', datapath.id) 22 | self.datapaths[datapath.id] = datapath 23 | 24 | elif ev.state == DEAD_DISPATCHER: 25 | if datapath.id in self.datapaths: 26 | self.logger.debug('unregister datapath: %016x', datapath.id) 27 | del self.datapaths[datapath.id] 28 | 29 | 30 | def monitor(self): 31 | while True: 32 | for dp in self.datapaths.values(): 33 | self.request_stats(dp) 34 | hub.sleep(10) 35 | 36 | 37 | def request_stats(self, datapath): 38 | self.logger.debug('send stats request: %016x', datapath.id) 39 | 40 | parser = datapath.ofproto_parser 41 | 42 | req = parser.OFPFlowStatsRequest(datapath) 43 | datapath.send_msg(req) 44 | 45 | @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) 46 | def _flow_stats_reply_handler(self, ev): 47 | 48 | timestamp = datetime.now() 49 | timestamp = timestamp.timestamp() 50 | icmp_code = -1 51 | icmp_type = -1 52 | tp_src = 0 53 | tp_dst = 0 54 | 55 | file0 = open("FlowStatsfile.csv","a+") 56 | 57 | body = ev.msg.body 58 | for stat in sorted([flow for flow in body if (flow.priority == 1) ], key=lambda flow: 59 | (flow.match['eth_type'],flow.match['ipv4_src'],flow.match['ipv4_dst'],flow.match['ip_proto'])): 60 | 61 | ip_src = stat.match['ipv4_src'] 62 | ip_dst = stat.match['ipv4_dst'] 63 | ip_proto = stat.match['ip_proto'] 64 | 65 | if stat.match['ip_proto'] == 1: 66 | icmp_code = stat.match['icmpv4_code'] 67 | icmp_type = stat.match['icmpv4_type'] 68 | 69 | elif stat.match['ip_proto'] == 6: 70 | tp_src = stat.match['tcp_src'] 71 | tp_dst = stat.match['tcp_dst'] 72 | 73 | elif stat.match['ip_proto'] == 17: 74 | tp_src = stat.match['udp_src'] 75 | tp_dst = stat.match['udp_dst'] 76 | 77 | flow_id = str(ip_src) + str(tp_src) + str(ip_dst) + str(tp_dst) + str(ip_proto) 78 | 79 | try: 80 | packet_count_per_second = stat.packet_count/stat.duration_sec 81 | packet_count_per_nsecond = stat.packet_count/stat.duration_nsec 82 | except: 83 | packet_count_per_second = 0 84 | packet_count_per_nsecond = 0 85 | 86 | try: 87 | byte_count_per_second = stat.byte_count/stat.duration_sec 88 | byte_count_per_nsecond = stat.byte_count/stat.duration_nsec 89 | except: 90 | byte_count_per_second = 0 91 | byte_count_per_nsecond = 0 92 | 93 | 94 | file0.write("{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{},{}\n" 95 | .format(timestamp, ev.msg.datapath.id, flow_id, ip_src, tp_src,ip_dst, tp_dst, 96 | stat.match['ip_proto'],icmp_code,icmp_type, 97 | stat.duration_sec, stat.duration_nsec, 98 | stat.idle_timeout, stat.hard_timeout, 99 | stat.flags, stat.packet_count,stat.byte_count, 100 | packet_count_per_second,packet_count_per_nsecond, 101 | byte_count_per_second,byte_count_per_nsecond,1)) 102 | file0.close() -------------------------------------------------------------------------------- /controller/switch.py: -------------------------------------------------------------------------------- 1 | from ryu.base import app_manager 2 | from ryu.controller import ofp_event 3 | from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER 4 | from ryu.controller.handler import set_ev_cls 5 | from ryu.ofproto import ofproto_v1_3 6 | from ryu.lib.packet import packet 7 | from ryu.lib.packet import ethernet 8 | from ryu.lib.packet import ether_types 9 | 10 | from ryu.lib.packet import in_proto 11 | from ryu.lib.packet import ipv4 12 | from ryu.lib.packet import icmp 13 | from ryu.lib.packet import tcp 14 | from ryu.lib.packet import udp 15 | 16 | class SimpleSwitch13(app_manager.RyuApp): 17 | OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION] 18 | 19 | def __init__(self, *args, **kwargs): 20 | super(SimpleSwitch13, self).__init__(*args, **kwargs) 21 | self.mac_to_port = {} 22 | 23 | @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER) 24 | def switch_features_handler(self, ev): 25 | datapath = ev.msg.datapath 26 | ofproto = datapath.ofproto 27 | parser = datapath.ofproto_parser 28 | 29 | match = parser.OFPMatch() 30 | actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER, 31 | ofproto.OFPCML_NO_BUFFER)] 32 | self.add_flow(datapath, 0, match, actions) 33 | 34 | def add_flow(self, datapath, priority, match, actions, buffer_id=None, idle=0, hard=0): 35 | ofproto = datapath.ofproto 36 | parser = datapath.ofproto_parser 37 | 38 | inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, 39 | actions)] 40 | if buffer_id: 41 | mod = parser.OFPFlowMod(datapath=datapath, buffer_id=buffer_id, 42 | idle_timeout=idle, hard_timeout=hard, 43 | priority=priority, match=match, 44 | instructions=inst) 45 | else: 46 | mod = parser.OFPFlowMod(datapath=datapath, priority=priority, 47 | idle_timeout=idle, hard_timeout=hard, 48 | match=match, instructions=inst) 49 | 50 | datapath.send_msg(mod) 51 | 52 | @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) 53 | def _packet_in_handler(self, ev): 54 | # If you hit this you might want to increase 55 | # the "miss_send_length" of your switch 56 | if ev.msg.msg_len < ev.msg.total_len: 57 | self.logger.debug("packet truncated: only %s of %s bytes", 58 | ev.msg.msg_len, ev.msg.total_len) 59 | msg = ev.msg 60 | datapath = msg.datapath 61 | ofproto = datapath.ofproto 62 | parser = datapath.ofproto_parser 63 | in_port = msg.match['in_port'] 64 | 65 | pkt = packet.Packet(msg.data) 66 | eth = pkt.get_protocols(ethernet.ethernet)[0] 67 | 68 | if eth.ethertype == ether_types.ETH_TYPE_LLDP: 69 | return 70 | dst = eth.dst 71 | src = eth.src 72 | 73 | dpid = datapath.id 74 | self.mac_to_port.setdefault(dpid, {}) 75 | 76 | # learn a mac address to avoid FLOOD next time. 77 | self.mac_to_port[dpid][src] = in_port 78 | 79 | if dst in self.mac_to_port[dpid]: 80 | out_port = self.mac_to_port[dpid][dst] 81 | else: 82 | out_port = ofproto.OFPP_FLOOD 83 | 84 | actions = [parser.OFPActionOutput(out_port)] 85 | 86 | # install a flow to avoid packet_in next time 87 | if out_port != ofproto.OFPP_FLOOD: 88 | 89 | # check IP Protocol and create a match for IP 90 | if eth.ethertype == ether_types.ETH_TYPE_IP: 91 | ip = pkt.get_protocol(ipv4.ipv4) 92 | srcip = ip.src 93 | dstip = ip.dst 94 | protocol = ip.proto 95 | 96 | # if ICMP Protocol 97 | if protocol == in_proto.IPPROTO_ICMP: 98 | t = pkt.get_protocol(icmp.icmp) 99 | match = parser.OFPMatch(eth_type=ether_types.ETH_TYPE_IP, 100 | ipv4_src=srcip, ipv4_dst=dstip, 101 | ip_proto=protocol,icmpv4_code=t.code, 102 | icmpv4_type=t.type) 103 | 104 | # if TCP Protocol 105 | elif protocol == in_proto.IPPROTO_TCP: 106 | t = pkt.get_protocol(tcp.tcp) 107 | match = parser.OFPMatch(eth_type=ether_types.ETH_TYPE_IP, 108 | ipv4_src=srcip, ipv4_dst=dstip, 109 | ip_proto=protocol, 110 | tcp_src=t.src_port, tcp_dst=t.dst_port,) 111 | 112 | # If UDP Protocol 113 | elif protocol == in_proto.IPPROTO_UDP: 114 | u = pkt.get_protocol(udp.udp) 115 | match = parser.OFPMatch(eth_type=ether_types.ETH_TYPE_IP, 116 | ipv4_src=srcip, ipv4_dst=dstip, 117 | ip_proto=protocol, 118 | udp_src=u.src_port, udp_dst=u.dst_port,) 119 | 120 | # verify if we have a valid buffer_id, if yes avoid to send both 121 | # flow_mod & packet_out 122 | if msg.buffer_id != ofproto.OFP_NO_BUFFER: 123 | self.add_flow(datapath, 1, match, actions, msg.buffer_id, idle=20, hard=100) 124 | return 125 | else: 126 | self.add_flow(datapath, 1, match, actions, idle=20, hard=100) 127 | data = None 128 | if msg.buffer_id == ofproto.OFP_NO_BUFFER: 129 | data = msg.data 130 | 131 | out = parser.OFPPacketOut(datapath=datapath, buffer_id=msg.buffer_id, 132 | in_port=in_port, actions=actions, data=data) 133 | datapath.send_msg(out) 134 | -------------------------------------------------------------------------------- /mininet/generate_benign_trafic.py: -------------------------------------------------------------------------------- 1 | from mininet.topo import Topo 2 | from mininet.net import Mininet 3 | from mininet.link import TCLink 4 | from mininet.log import setLogLevel 5 | from mininet.node import OVSKernelSwitch, RemoteController 6 | from time import sleep 7 | 8 | from datetime import datetime 9 | from random import randrange, choice 10 | 11 | class MyTopo( Topo ): 12 | 13 | def build( self ): 14 | 15 | s1 = self.addSwitch( 's1', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 16 | 17 | h1 = self.addHost( 'h1', cpu=1.0/20,mac="00:00:00:00:00:01", ip="10.0.0.1/24" ) 18 | h2 = self.addHost( 'h2', cpu=1.0/20, mac="00:00:00:00:00:02", ip="10.0.0.2/24" ) 19 | h3 = self.addHost( 'h3', cpu=1.0/20, mac="00:00:00:00:00:03", ip="10.0.0.3/24" ) 20 | 21 | s2 = self.addSwitch( 's2', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 22 | 23 | h4 = self.addHost( 'h4', cpu=1.0/20, mac="00:00:00:00:00:04", ip="10.0.0.4/24" ) 24 | h5 = self.addHost( 'h5', cpu=1.0/20, mac="00:00:00:00:00:05", ip="10.0.0.5/24" ) 25 | h6 = self.addHost( 'h6', cpu=1.0/20, mac="00:00:00:00:00:06", ip="10.0.0.6/24" ) 26 | 27 | s3 = self.addSwitch( 's3', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 28 | 29 | h7 = self.addHost( 'h7', cpu=1.0/20, mac="00:00:00:00:00:07", ip="10.0.0.7/24" ) 30 | h8 = self.addHost( 'h8', cpu=1.0/20, mac="00:00:00:00:00:08", ip="10.0.0.8/24" ) 31 | h9 = self.addHost( 'h9', cpu=1.0/20, mac="00:00:00:00:00:09", ip="10.0.0.9/24" ) 32 | 33 | s4 = self.addSwitch( 's4', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 34 | 35 | h10 = self.addHost( 'h10', cpu=1.0/20, mac="00:00:00:00:00:10", ip="10.0.0.10/24" ) 36 | h11 = self.addHost( 'h11', cpu=1.0/20, mac="00:00:00:00:00:11", ip="10.0.0.11/24" ) 37 | h12 = self.addHost( 'h12', cpu=1.0/20, mac="00:00:00:00:00:12", ip="10.0.0.12/24" ) 38 | 39 | s5 = self.addSwitch( 's5', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 40 | 41 | h13 = self.addHost( 'h13', cpu=1.0/20, mac="00:00:00:00:00:13", ip="10.0.0.13/24" ) 42 | h14 = self.addHost( 'h14', cpu=1.0/20, mac="00:00:00:00:00:14", ip="10.0.0.14/24" ) 43 | h15 = self.addHost( 'h15', cpu=1.0/20, mac="00:00:00:00:00:15", ip="10.0.0.15/24" ) 44 | 45 | s6 = self.addSwitch( 's6', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 46 | 47 | h16 = self.addHost( 'h16', cpu=1.0/20, mac="00:00:00:00:00:16", ip="10.0.0.16/24" ) 48 | h17 = self.addHost( 'h17', cpu=1.0/20, mac="00:00:00:00:00:17", ip="10.0.0.17/24" ) 49 | h18 = self.addHost( 'h18', cpu=1.0/20, mac="00:00:00:00:00:18", ip="10.0.0.18/24" ) 50 | 51 | # Add links 52 | 53 | self.addLink( h1, s1 ) 54 | self.addLink( h2, s1 ) 55 | self.addLink( h3, s1 ) 56 | 57 | self.addLink( h4, s2 ) 58 | self.addLink( h5, s2 ) 59 | self.addLink( h6, s2 ) 60 | 61 | self.addLink( h7, s3 ) 62 | self.addLink( h8, s3 ) 63 | self.addLink( h9, s3 ) 64 | 65 | self.addLink( h10, s4 ) 66 | self.addLink( h11, s4 ) 67 | self.addLink( h12, s4 ) 68 | 69 | self.addLink( h13, s5 ) 70 | self.addLink( h14, s5 ) 71 | self.addLink( h15, s5 ) 72 | 73 | self.addLink( h16, s6 ) 74 | self.addLink( h17, s6 ) 75 | self.addLink( h18, s6 ) 76 | 77 | self.addLink( s1, s2 ) 78 | self.addLink( s2, s3 ) 79 | self.addLink( s3, s4 ) 80 | self.addLink( s4, s5 ) 81 | self.addLink( s5, s6 ) 82 | 83 | def ip_generator(): 84 | 85 | ip = ".".join(["10","0","0",str(randrange(1,19))]) 86 | return ip 87 | 88 | def startNetwork(): 89 | 90 | #print "Starting Network" 91 | topo = MyTopo() 92 | #net = Mininet( topo=topo, host=CPULimitedHost, link=TCLink, controller=None ) 93 | #net.addController( 'c0', controller=RemoteController, ip='192.168.43.55', port=6653 ) 94 | 95 | c0 = RemoteController('c0', ip='192.168.0.101', port=6653) 96 | net = Mininet(topo=topo, link=TCLink, controller=c0) 97 | 98 | net.start() 99 | 100 | h1 = net.get('h1') 101 | h2 = net.get('h2') 102 | h3 = net.get('h3') 103 | h4 = net.get('h4') 104 | h5 = net.get('h5') 105 | h6 = net.get('h6') 106 | h7 = net.get('h7') 107 | h8 = net.get('h8') 108 | h9 = net.get('h9') 109 | h10 = net.get('h10') 110 | h11 = net.get('h11') 111 | h12 = net.get('h12') 112 | h13 = net.get('h13') 113 | h14 = net.get('h14') 114 | h15 = net.get('h15') 115 | h16 = net.get('h16') 116 | h17 = net.get('h17') 117 | h18 = net.get('h18') 118 | 119 | hosts = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11, h12, h13, h14, h15, h16, h17, h18] 120 | print("--------------------------------------------------------------------------------") 121 | print("Generating traffic ...") 122 | h1.cmd('cd /home/mininet/webserver') 123 | h1.cmd('python -m SimpleHTTPServer 80 &') 124 | h1.cmd('iperf -s -p 5050 &') 125 | h1.cmd('iperf -s -u -p 5051 &') 126 | sleep(2) 127 | for h in hosts: 128 | h.cmd('cd /home/mininet/Downloads') 129 | for i in range(600): 130 | 131 | print("--------------------------------------------------------------------------------") 132 | print("Iteration n {} ...".format(i+1)) 133 | print("--------------------------------------------------------------------------------") 134 | 135 | for j in range(10): 136 | src = choice(hosts) 137 | dst = ip_generator() 138 | 139 | if j <9: 140 | print("generating ICMP traffic between %s and h%s and TCP/UDP traffic between %s and h1" % (src,((dst.split('.'))[3]),src)) 141 | src.cmd("ping {} -c 100 &".format(dst)) 142 | src.cmd("iperf -p 5050 -c 10.0.0.1") 143 | src.cmd("iperf -p 5051 -u -c 10.0.0.1") 144 | else: 145 | print("generating ICMP traffic between %s and h%s and TCP/UDP traffic between %s and h1" % (src,((dst.split('.'))[3]),src)) 146 | src.cmd("ping {} -c 100".format(dst)) 147 | src.cmd("iperf -p 5050 -c 10.0.0.1") 148 | src.cmd("iperf -p 5051 -u -c 10.0.0.1") 149 | 150 | print("%s Downloading index.html from h1" % src) 151 | src.cmd("wget http://10.0.0.1/index.html") 152 | print("%s Downloading test.zip from h1" % src) 153 | src.cmd("wget http://10.0.0.1/test.zip") 154 | 155 | h1.cmd("rm -f *.* /home/mininet/Downloads") 156 | 157 | print("--------------------------------------------------------------------------------") 158 | 159 | # CLI(net) 160 | net.stop() 161 | 162 | if __name__ == '__main__': 163 | 164 | start = datetime.now() 165 | 166 | setLogLevel( 'info' ) 167 | startNetwork() 168 | 169 | end = datetime.now() 170 | 171 | print(end-start) 172 | -------------------------------------------------------------------------------- /mininet/generate_ddos_trafic.py: -------------------------------------------------------------------------------- 1 | from mininet.topo import Topo 2 | from mininet.net import Mininet 3 | # from mininet.node import CPULimitedHost 4 | from mininet.link import TCLink 5 | # from mininet.util import dumpNodeConnections 6 | from mininet.log import setLogLevel 7 | # from mininet.cli import CLI 8 | from mininet.node import OVSKernelSwitch, RemoteController 9 | from time import sleep 10 | 11 | from datetime import datetime 12 | from random import randrange, choice 13 | 14 | class MyTopo( Topo ): 15 | 16 | def build( self ): 17 | 18 | s1 = self.addSwitch( 's1', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 19 | 20 | h1 = self.addHost( 'h1', cpu=1.0/20,mac="00:00:00:00:00:01", ip="10.0.0.1/24" ) 21 | h2 = self.addHost( 'h2', cpu=1.0/20, mac="00:00:00:00:00:02", ip="10.0.0.2/24" ) 22 | h3 = self.addHost( 'h3', cpu=1.0/20, mac="00:00:00:00:00:03", ip="10.0.0.3/24" ) 23 | 24 | s2 = self.addSwitch( 's2', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 25 | 26 | h4 = self.addHost( 'h4', cpu=1.0/20, mac="00:00:00:00:00:04", ip="10.0.0.4/24" ) 27 | h5 = self.addHost( 'h5', cpu=1.0/20, mac="00:00:00:00:00:05", ip="10.0.0.5/24" ) 28 | h6 = self.addHost( 'h6', cpu=1.0/20, mac="00:00:00:00:00:06", ip="10.0.0.6/24" ) 29 | 30 | s3 = self.addSwitch( 's3', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 31 | 32 | h7 = self.addHost( 'h7', cpu=1.0/20, mac="00:00:00:00:00:07", ip="10.0.0.7/24" ) 33 | h8 = self.addHost( 'h8', cpu=1.0/20, mac="00:00:00:00:00:08", ip="10.0.0.8/24" ) 34 | h9 = self.addHost( 'h9', cpu=1.0/20, mac="00:00:00:00:00:09", ip="10.0.0.9/24" ) 35 | 36 | s4 = self.addSwitch( 's4', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 37 | 38 | h10 = self.addHost( 'h10', cpu=1.0/20, mac="00:00:00:00:00:10", ip="10.0.0.10/24" ) 39 | h11 = self.addHost( 'h11', cpu=1.0/20, mac="00:00:00:00:00:11", ip="10.0.0.11/24" ) 40 | h12 = self.addHost( 'h12', cpu=1.0/20, mac="00:00:00:00:00:12", ip="10.0.0.12/24" ) 41 | 42 | s5 = self.addSwitch( 's5', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 43 | 44 | h13 = self.addHost( 'h13', cpu=1.0/20, mac="00:00:00:00:00:13", ip="10.0.0.13/24" ) 45 | h14 = self.addHost( 'h14', cpu=1.0/20, mac="00:00:00:00:00:14", ip="10.0.0.14/24" ) 46 | h15 = self.addHost( 'h15', cpu=1.0/20, mac="00:00:00:00:00:15", ip="10.0.0.15/24" ) 47 | 48 | s6 = self.addSwitch( 's6', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 49 | 50 | h16 = self.addHost( 'h16', cpu=1.0/20, mac="00:00:00:00:00:16", ip="10.0.0.16/24" ) 51 | h17 = self.addHost( 'h17', cpu=1.0/20, mac="00:00:00:00:00:17", ip="10.0.0.17/24" ) 52 | h18 = self.addHost( 'h18', cpu=1.0/20, mac="00:00:00:00:00:18", ip="10.0.0.18/24" ) 53 | 54 | # Add links 55 | 56 | self.addLink( h1, s1 ) 57 | self.addLink( h2, s1 ) 58 | self.addLink( h3, s1 ) 59 | 60 | self.addLink( h4, s2 ) 61 | self.addLink( h5, s2 ) 62 | self.addLink( h6, s2 ) 63 | 64 | self.addLink( h7, s3 ) 65 | self.addLink( h8, s3 ) 66 | self.addLink( h9, s3 ) 67 | 68 | self.addLink( h10, s4 ) 69 | self.addLink( h11, s4 ) 70 | self.addLink( h12, s4 ) 71 | 72 | self.addLink( h13, s5 ) 73 | self.addLink( h14, s5 ) 74 | self.addLink( h15, s5 ) 75 | 76 | self.addLink( h16, s6 ) 77 | self.addLink( h17, s6 ) 78 | self.addLink( h18, s6 ) 79 | 80 | self.addLink( s1, s2 ) 81 | self.addLink( s2, s3 ) 82 | self.addLink( s3, s4 ) 83 | self.addLink( s4, s5 ) 84 | self.addLink( s5, s6 ) 85 | def ip_generator(): 86 | 87 | ip = ".".join(["10","0","0",str(randrange(1,19))]) 88 | return ip 89 | 90 | def startNetwork(): 91 | 92 | #print "Starting Network" 93 | topo = MyTopo() 94 | #net = Mininet( topo=topo, host=CPULimitedHost, link=TCLink, controller=None ) 95 | #net.addController( 'c0', controller=RemoteController, ip='192.168.43.55', port=6653 ) 96 | 97 | c0 = RemoteController('c0', ip='192.168.0.101', port=6653) 98 | net = Mininet(topo=topo, link=TCLink, controller=c0) 99 | 100 | net.start() 101 | 102 | h1 = net.get('h1') 103 | h2 = net.get('h2') 104 | h3 = net.get('h3') 105 | h4 = net.get('h4') 106 | h5 = net.get('h5') 107 | h6 = net.get('h6') 108 | h7 = net.get('h7') 109 | h8 = net.get('h8') 110 | h9 = net.get('h9') 111 | h10 = net.get('h10') 112 | h11 = net.get('h11') 113 | h12 = net.get('h12') 114 | h13 = net.get('h13') 115 | h14 = net.get('h14') 116 | h15 = net.get('h15') 117 | h16 = net.get('h16') 118 | h17 = net.get('h17') 119 | h18 = net.get('h18') 120 | 121 | hosts = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11, h12, h13, h14, h15, h16, h17, h18] 122 | 123 | h1.cmd('cd /home/mininet/webserver') 124 | h1.cmd('python -m SimpleHTTPServer 80 &') 125 | 126 | src = choice(hosts) 127 | dst = ip_generator() 128 | print("--------------------------------------------------------------------------------") 129 | print("Performing ICMP (Ping) Flood") 130 | print("--------------------------------------------------------------------------------") 131 | src.cmd("timeout 20s hping3 -1 -V -d 120 -w 64 -p 80 --rand-source --flood {}".format(dst)) 132 | sleep(100) 133 | 134 | src = choice(hosts) 135 | dst = ip_generator() 136 | print("--------------------------------------------------------------------------------") 137 | print("Performing UDP Flood") 138 | print("--------------------------------------------------------------------------------") 139 | src.cmd("timeout 20s hping3 -2 -V -d 120 -w 64 --rand-source --flood {}".format(dst)) 140 | sleep(100) 141 | 142 | src = choice(hosts) 143 | dst = ip_generator() 144 | print("--------------------------------------------------------------------------------") 145 | print("Performing TCP-SYN Flood") 146 | print("--------------------------------------------------------------------------------") 147 | src.cmd('timeout 20s hping3 -S -V -d 120 -w 64 -p 80 --rand-source --flood 10.0.0.1') 148 | sleep(100) 149 | 150 | src = choice(hosts) 151 | dst = ip_generator() 152 | print("--------------------------------------------------------------------------------") 153 | print("Performing LAND Attack") 154 | print("--------------------------------------------------------------------------------") 155 | src.cmd("timeout 20s hping3 -1 -V -d 120 -w 64 --flood -a {} {}".format(dst,dst)) 156 | sleep(100) 157 | print("--------------------------------------------------------------------------------") 158 | 159 | # CLI(net) 160 | net.stop() 161 | 162 | if __name__ == '__main__': 163 | 164 | start = datetime.now() 165 | 166 | setLogLevel( 'info' ) 167 | startNetwork() 168 | 169 | end = datetime.now() 170 | 171 | print(end-start) -------------------------------------------------------------------------------- /mininet/generate_ddos_trafic1.py: -------------------------------------------------------------------------------- 1 | from mininet.topo import Topo 2 | from mininet.net import Mininet 3 | # from mininet.node import CPULimitedHost 4 | from mininet.link import TCLink 5 | # from mininet.util import dumpNodeConnections 6 | from mininet.log import setLogLevel 7 | # from mininet.cli import CLI 8 | from mininet.node import OVSKernelSwitch, RemoteController 9 | from time import sleep 10 | 11 | from datetime import datetime 12 | from random import randrange, choice 13 | 14 | class MyTopo( Topo ): 15 | 16 | def build( self ): 17 | 18 | s1 = self.addSwitch( 's1', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 19 | 20 | h1 = self.addHost( 'h1', cpu=1.0/20,mac="00:00:00:00:00:01", ip="10.0.0.1/24" ) 21 | h2 = self.addHost( 'h2', cpu=1.0/20, mac="00:00:00:00:00:02", ip="10.0.0.2/24" ) 22 | h3 = self.addHost( 'h3', cpu=1.0/20, mac="00:00:00:00:00:03", ip="10.0.0.3/24" ) 23 | 24 | s2 = self.addSwitch( 's2', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 25 | 26 | h4 = self.addHost( 'h4', cpu=1.0/20, mac="00:00:00:00:00:04", ip="10.0.0.4/24" ) 27 | h5 = self.addHost( 'h5', cpu=1.0/20, mac="00:00:00:00:00:05", ip="10.0.0.5/24" ) 28 | h6 = self.addHost( 'h6', cpu=1.0/20, mac="00:00:00:00:00:06", ip="10.0.0.6/24" ) 29 | 30 | s3 = self.addSwitch( 's3', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 31 | 32 | h7 = self.addHost( 'h7', cpu=1.0/20, mac="00:00:00:00:00:07", ip="10.0.0.7/24" ) 33 | h8 = self.addHost( 'h8', cpu=1.0/20, mac="00:00:00:00:00:08", ip="10.0.0.8/24" ) 34 | h9 = self.addHost( 'h9', cpu=1.0/20, mac="00:00:00:00:00:09", ip="10.0.0.9/24" ) 35 | 36 | s4 = self.addSwitch( 's4', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 37 | 38 | h10 = self.addHost( 'h10', cpu=1.0/20, mac="00:00:00:00:00:10", ip="10.0.0.10/24" ) 39 | h11 = self.addHost( 'h11', cpu=1.0/20, mac="00:00:00:00:00:11", ip="10.0.0.11/24" ) 40 | h12 = self.addHost( 'h12', cpu=1.0/20, mac="00:00:00:00:00:12", ip="10.0.0.12/24" ) 41 | 42 | s5 = self.addSwitch( 's5', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 43 | 44 | h13 = self.addHost( 'h13', cpu=1.0/20, mac="00:00:00:00:00:13", ip="10.0.0.13/24" ) 45 | h14 = self.addHost( 'h14', cpu=1.0/20, mac="00:00:00:00:00:14", ip="10.0.0.14/24" ) 46 | h15 = self.addHost( 'h15', cpu=1.0/20, mac="00:00:00:00:00:15", ip="10.0.0.15/24" ) 47 | 48 | s6 = self.addSwitch( 's6', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 49 | 50 | h16 = self.addHost( 'h16', cpu=1.0/20, mac="00:00:00:00:00:16", ip="10.0.0.16/24" ) 51 | h17 = self.addHost( 'h17', cpu=1.0/20, mac="00:00:00:00:00:17", ip="10.0.0.17/24" ) 52 | h18 = self.addHost( 'h18', cpu=1.0/20, mac="00:00:00:00:00:18", ip="10.0.0.18/24" ) 53 | 54 | # Add links 55 | 56 | self.addLink( h1, s1 ) 57 | self.addLink( h2, s1 ) 58 | self.addLink( h3, s1 ) 59 | 60 | self.addLink( h4, s2 ) 61 | self.addLink( h5, s2 ) 62 | self.addLink( h6, s2 ) 63 | 64 | self.addLink( h7, s3 ) 65 | self.addLink( h8, s3 ) 66 | self.addLink( h9, s3 ) 67 | 68 | self.addLink( h10, s4 ) 69 | self.addLink( h11, s4 ) 70 | self.addLink( h12, s4 ) 71 | 72 | self.addLink( h13, s5 ) 73 | self.addLink( h14, s5 ) 74 | self.addLink( h15, s5 ) 75 | 76 | self.addLink( h16, s6 ) 77 | self.addLink( h17, s6 ) 78 | self.addLink( h18, s6 ) 79 | 80 | self.addLink( s1, s2 ) 81 | self.addLink( s2, s3 ) 82 | self.addLink( s3, s4 ) 83 | self.addLink( s4, s5 ) 84 | self.addLink( s5, s6 ) 85 | def ip_generator(): 86 | 87 | ip = ".".join(["10","0","0",str(randrange(1,19))]) 88 | return ip 89 | 90 | def startNetwork(): 91 | 92 | #print "Starting Network" 93 | topo = MyTopo() 94 | #net = Mininet( topo=topo, host=CPULimitedHost, link=TCLink, controller=None ) 95 | #net.addController( 'c0', controller=RemoteController, ip='192.168.43.55', port=6653 ) 96 | 97 | c0 = RemoteController('c0', ip='192.168.0.101', port=6653) 98 | net = Mininet(topo=topo, link=TCLink, controller=c0) 99 | 100 | net.start() 101 | 102 | h1 = net.get('h1') 103 | h2 = net.get('h2') 104 | h3 = net.get('h3') 105 | h4 = net.get('h4') 106 | h5 = net.get('h5') 107 | h6 = net.get('h6') 108 | h7 = net.get('h7') 109 | h8 = net.get('h8') 110 | h9 = net.get('h9') 111 | h10 = net.get('h10') 112 | h11 = net.get('h11') 113 | h12 = net.get('h12') 114 | h13 = net.get('h13') 115 | h14 = net.get('h14') 116 | h15 = net.get('h15') 117 | h16 = net.get('h16') 118 | h17 = net.get('h17') 119 | h18 = net.get('h18') 120 | 121 | hosts = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11, h12, h13, h14, h15, h16, h17, h18] 122 | 123 | h1.cmd('cd /home/mininet/webserver') 124 | h1.cmd('python -m SimpleHTTPServer 80 &') 125 | 126 | src = choice(hosts) 127 | dst = ip_generator() 128 | print("--------------------------------------------------------------------------------") 129 | print("Performing ICMP (Ping) Flood") 130 | print("--------------------------------------------------------------------------------") 131 | src.cmd("timeout 20s hping3 -1 -V -d 120 -w 64 -p 80 --flood {}".format(dst)) 132 | sleep(100) 133 | 134 | src = choice(hosts) 135 | dst = ip_generator() 136 | print("--------------------------------------------------------------------------------") 137 | print("Performing UDP Flood") 138 | print("--------------------------------------------------------------------------------") 139 | src.cmd("timeout 20s hping3 -2 -V -d 120 -w 64 --flood {}".format(dst)) 140 | sleep(100) 141 | 142 | src = choice(hosts) 143 | dst = ip_generator() 144 | print("--------------------------------------------------------------------------------") 145 | print("Performing TCP-SYN Flood") 146 | print("--------------------------------------------------------------------------------") 147 | src.cmd('timeout 20s hping3 -S -V -d 120 -w 64 -p 80 --flood 10.0.0.1') 148 | sleep(100) 149 | 150 | src = choice(hosts) 151 | dst = ip_generator() 152 | print("--------------------------------------------------------------------------------") 153 | print("Performing LAND Attack") 154 | print("--------------------------------------------------------------------------------") 155 | src.cmd("timeout 20s hping3 -1 -V -d 120 -w 64 --flood -a {} {}".format(dst,dst)) 156 | sleep(100) 157 | print("--------------------------------------------------------------------------------") 158 | 159 | # CLI(net) 160 | net.stop() 161 | 162 | if __name__ == '__main__': 163 | 164 | start = datetime.now() 165 | 166 | setLogLevel( 'info' ) 167 | startNetwork() 168 | 169 | end = datetime.now() 170 | 171 | print(end-start) -------------------------------------------------------------------------------- /mininet/topology.py: -------------------------------------------------------------------------------- 1 | from mininet.topo import Topo 2 | from mininet.net import Mininet 3 | # from mininet.node import CPULimitedHost 4 | from mininet.link import TCLink 5 | # from mininet.util import dumpNodeConnections 6 | from mininet.log import setLogLevel 7 | from mininet.cli import CLI 8 | from mininet.node import OVSKernelSwitch, RemoteController 9 | # from time import sleep 10 | 11 | class MyTopo( Topo ): 12 | 13 | def build( self ): 14 | 15 | s1 = self.addSwitch( 's1', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 16 | 17 | h1 = self.addHost( 'h1', cpu=1.0/20,mac="00:00:00:00:00:01", ip="10.0.0.1/24" ) 18 | h2 = self.addHost( 'h2', cpu=1.0/20, mac="00:00:00:00:00:02", ip="10.0.0.2/24" ) 19 | h3 = self.addHost( 'h3', cpu=1.0/20, mac="00:00:00:00:00:03", ip="10.0.0.3/24" ) 20 | 21 | s2 = self.addSwitch( 's2', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 22 | 23 | h4 = self.addHost( 'h4', cpu=1.0/20, mac="00:00:00:00:00:04", ip="10.0.0.4/24" ) 24 | h5 = self.addHost( 'h5', cpu=1.0/20, mac="00:00:00:00:00:05", ip="10.0.0.5/24" ) 25 | h6 = self.addHost( 'h6', cpu=1.0/20, mac="00:00:00:00:00:06", ip="10.0.0.6/24" ) 26 | 27 | s3 = self.addSwitch( 's3', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 28 | 29 | h7 = self.addHost( 'h7', cpu=1.0/20, mac="00:00:00:00:00:07", ip="10.0.0.7/24" ) 30 | h8 = self.addHost( 'h8', cpu=1.0/20, mac="00:00:00:00:00:08", ip="10.0.0.8/24" ) 31 | h9 = self.addHost( 'h9', cpu=1.0/20, mac="00:00:00:00:00:09", ip="10.0.0.9/24" ) 32 | 33 | s4 = self.addSwitch( 's4', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 34 | 35 | h10 = self.addHost( 'h10', cpu=1.0/20, mac="00:00:00:00:00:10", ip="10.0.0.10/24" ) 36 | h11 = self.addHost( 'h11', cpu=1.0/20, mac="00:00:00:00:00:11", ip="10.0.0.11/24" ) 37 | h12 = self.addHost( 'h12', cpu=1.0/20, mac="00:00:00:00:00:12", ip="10.0.0.12/24" ) 38 | 39 | s5 = self.addSwitch( 's5', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 40 | 41 | h13 = self.addHost( 'h13', cpu=1.0/20, mac="00:00:00:00:00:13", ip="10.0.0.13/24" ) 42 | h14 = self.addHost( 'h14', cpu=1.0/20, mac="00:00:00:00:00:14", ip="10.0.0.14/24" ) 43 | h15 = self.addHost( 'h15', cpu=1.0/20, mac="00:00:00:00:00:15", ip="10.0.0.15/24" ) 44 | 45 | s6 = self.addSwitch( 's6', cls=OVSKernelSwitch, protocols='OpenFlow13' ) 46 | 47 | h16 = self.addHost( 'h16', cpu=1.0/20, mac="00:00:00:00:00:16", ip="10.0.0.16/24" ) 48 | h17 = self.addHost( 'h17', cpu=1.0/20, mac="00:00:00:00:00:17", ip="10.0.0.17/24" ) 49 | h18 = self.addHost( 'h18', cpu=1.0/20, mac="00:00:00:00:00:18", ip="10.0.0.18/24" ) 50 | 51 | # Add links 52 | 53 | self.addLink( h1, s1 ) 54 | self.addLink( h2, s1 ) 55 | self.addLink( h3, s1 ) 56 | 57 | self.addLink( h4, s2 ) 58 | self.addLink( h5, s2 ) 59 | self.addLink( h6, s2 ) 60 | 61 | self.addLink( h7, s3 ) 62 | self.addLink( h8, s3 ) 63 | self.addLink( h9, s3 ) 64 | 65 | self.addLink( h10, s4 ) 66 | self.addLink( h11, s4 ) 67 | self.addLink( h12, s4 ) 68 | 69 | self.addLink( h13, s5 ) 70 | self.addLink( h14, s5 ) 71 | self.addLink( h15, s5 ) 72 | 73 | self.addLink( h16, s6 ) 74 | self.addLink( h17, s6 ) 75 | self.addLink( h18, s6 ) 76 | 77 | self.addLink( s1, s2 ) 78 | self.addLink( s2, s3 ) 79 | self.addLink( s3, s4 ) 80 | self.addLink( s4, s5 ) 81 | self.addLink( s5, s6 ) 82 | 83 | def startNetwork(): 84 | 85 | topo = MyTopo() 86 | c0 = RemoteController('c0', ip='192.168.0.101', port=6653) 87 | net = Mininet(topo=topo, link=TCLink, controller=c0) 88 | 89 | net.start() 90 | CLI(net) 91 | net.stop() 92 | 93 | if __name__ == '__main__': 94 | setLogLevel( 'info' ) 95 | startNetwork() 96 | -------------------------------------------------------------------------------- /ml/DT.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from matplotlib import pyplot as plt 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | # from sklearn.linear_model import LogisticRegression 6 | # from sklearn.neighbors import KNeighborsClassifier 7 | # from sklearn.svm import SVC 8 | # from sklearn.naive_bayes import GaussianNB 9 | from sklearn.tree import DecisionTreeClassifier 10 | # from sklearn.ensemble import RandomForestClassifier 11 | from sklearn.metrics import confusion_matrix 12 | from sklearn.metrics import accuracy_score 13 | 14 | class MachineLearning(): 15 | 16 | def __init__(self): 17 | 18 | print("Loading dataset ...") 19 | 20 | self.flow_dataset = pd.read_csv('FlowStatsfile.csv') 21 | 22 | self.flow_dataset.iloc[:, 2] = self.flow_dataset.iloc[:, 2].str.replace('.', '') 23 | self.flow_dataset.iloc[:, 3] = self.flow_dataset.iloc[:, 3].str.replace('.', '') 24 | self.flow_dataset.iloc[:, 5] = self.flow_dataset.iloc[:, 5].str.replace('.', '') 25 | 26 | def flow_training(self): 27 | 28 | print("Flow Training ...") 29 | 30 | X_flow = self.flow_dataset.iloc[:, :-1].values 31 | X_flow = X_flow.astype('float64') 32 | 33 | y_flow = self.flow_dataset.iloc[:, -1].values 34 | 35 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 36 | 37 | classifier = DecisionTreeClassifier(criterion='entropy', random_state=0) 38 | flow_model = classifier.fit(X_flow_train, y_flow_train) 39 | 40 | y_flow_pred = flow_model.predict(X_flow_test) 41 | 42 | print("------------------------------------------------------------------------------") 43 | 44 | print("confusion matrix") 45 | cm = confusion_matrix(y_flow_test, y_flow_pred) 46 | print(cm) 47 | 48 | acc = accuracy_score(y_flow_test, y_flow_pred) 49 | 50 | print("succes accuracy = {0:.2f} %".format(acc*100)) 51 | fail = 1.0 - acc 52 | print("fail accuracy = {0:.2f} %".format(fail*100)) 53 | print("------------------------------------------------------------------------------") 54 | 55 | x = ['TP','FP','FN','TN'] 56 | plt.title("Decision Tree") 57 | plt.xlabel('Classe predite') 58 | plt.ylabel('Nombre de flux') 59 | plt.tight_layout() 60 | plt.style.use("seaborn-darkgrid") 61 | y = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 62 | plt.bar(x,y, color="#e0d692", label='DT') 63 | plt.legend() 64 | plt.show() 65 | 66 | def main(): 67 | start = datetime.now() 68 | 69 | ml = MachineLearning() 70 | ml.flow_training() 71 | 72 | end = datetime.now() 73 | print("Training time: ", (end-start)) 74 | 75 | if __name__ == "__main__": 76 | main() -------------------------------------------------------------------------------- /ml/KNN.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from matplotlib import pyplot as plt 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | # from sklearn.linear_model import LogisticRegression 6 | from sklearn.neighbors import KNeighborsClassifier 7 | # from sklearn.ensemble import RandomForestClassifier 8 | from sklearn.metrics import confusion_matrix 9 | from sklearn.metrics import accuracy_score 10 | 11 | class MachineLearning(): 12 | 13 | def __init__(self): 14 | 15 | print("Loading dataset ...") 16 | 17 | self.flow_dataset = pd.read_csv('FlowStatsfile.csv') 18 | 19 | self.flow_dataset.iloc[:, 2] = self.flow_dataset.iloc[:, 2].str.replace('.', '') 20 | self.flow_dataset.iloc[:, 3] = self.flow_dataset.iloc[:, 3].str.replace('.', '') 21 | self.flow_dataset.iloc[:, 5] = self.flow_dataset.iloc[:, 5].str.replace('.', '') 22 | 23 | def flow_training(self): 24 | 25 | print("Flow Training ...") 26 | 27 | X_flow = self.flow_dataset.iloc[:, :-1].values 28 | X_flow = X_flow.astype('float64') 29 | 30 | y_flow = self.flow_dataset.iloc[:, -1].values 31 | 32 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 33 | 34 | classifier = KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2) 35 | flow_model = classifier.fit(X_flow_train, y_flow_train) 36 | 37 | y_flow_pred = flow_model.predict(X_flow_test) 38 | 39 | print("------------------------------------------------------------------------------") 40 | 41 | print("confusion matrix") 42 | cm = confusion_matrix(y_flow_test, y_flow_pred) 43 | print(cm) 44 | 45 | acc = accuracy_score(y_flow_test, y_flow_pred) 46 | 47 | print("succes accuracy = {0:.2f} %".format(acc*100)) 48 | fail = 1.0 - acc 49 | print("fail accuracy = {0:.2f} %".format(fail*100)) 50 | print("------------------------------------------------------------------------------") 51 | 52 | x = ['TP','FP','FN','TN'] 53 | plt.title("KNN") 54 | plt.xlabel('Classe predite') 55 | plt.ylabel('Nombre de flux') 56 | plt.tight_layout() 57 | plt.style.use("seaborn-darkgrid") 58 | y = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 59 | plt.bar(x,y, color="#e46e6e", label='KNN') 60 | plt.legend() 61 | plt.show() 62 | 63 | def main(): 64 | start = datetime.now() 65 | 66 | ml = MachineLearning() 67 | ml.flow_training() 68 | 69 | end = datetime.now() 70 | print("Training time: ", (end-start)) 71 | 72 | if __name__ == "__main__": 73 | main() -------------------------------------------------------------------------------- /ml/LR.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from matplotlib import pyplot as plt 4 | import pandas as pd 5 | from sklearn.model_selection import train_test_split 6 | from sklearn.linear_model import LogisticRegression 7 | # from sklearn.ensemble import RandomForestClassifier 8 | from sklearn.metrics import confusion_matrix 9 | from sklearn.metrics import accuracy_score 10 | 11 | class MachineLearning(): 12 | 13 | def __init__(self): 14 | 15 | print("Loading dataset ...") 16 | 17 | self.flow_dataset = pd.read_csv('FlowStatsfile.csv') 18 | 19 | self.flow_dataset.iloc[:, 2] = self.flow_dataset.iloc[:, 2].str.replace('.', '') 20 | self.flow_dataset.iloc[:, 3] = self.flow_dataset.iloc[:, 3].str.replace('.', '') 21 | self.flow_dataset.iloc[:, 5] = self.flow_dataset.iloc[:, 5].str.replace('.', '') 22 | 23 | def flow_training(self): 24 | 25 | print("Flow Training ...") 26 | 27 | X_flow = self.flow_dataset.iloc[:, :-1].values 28 | X_flow = X_flow.astype('float64') 29 | 30 | y_flow = self.flow_dataset.iloc[:, -1].values 31 | 32 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 33 | 34 | classifier = LogisticRegression(solver='liblinear', random_state=0) 35 | flow_model = classifier.fit(X_flow_train, y_flow_train) 36 | 37 | y_flow_pred = flow_model.predict(X_flow_test) 38 | 39 | print("------------------------------------------------------------------------------") 40 | 41 | print("confusion matrix") 42 | cm = confusion_matrix(y_flow_test, y_flow_pred) 43 | print(cm) 44 | 45 | acc = accuracy_score(y_flow_test, y_flow_pred) 46 | 47 | print("succes accuracy = {0:.2f} %".format(acc*100)) 48 | fail = 1.0 - acc 49 | print("fail accuracy = {0:.2f} %".format(fail*100)) 50 | print("------------------------------------------------------------------------------") 51 | 52 | 53 | benin = 0 54 | ddos = 0 55 | for i in y_flow: 56 | if i == 0: 57 | benin += 1 58 | elif i == 1: 59 | ddos += 1 60 | 61 | print("benin = ", benin) 62 | print("ddos = ", ddos) 63 | print("------------------------------------------------------------------------------") 64 | 65 | plt.title("Dataset") 66 | # plt.xlabel('Type de flux') 67 | # plt.ylabel('Nombre de flux') 68 | plt.tight_layout() 69 | # plt.style.use("seaborn-darkgrid") 70 | # plt.bar(['NORMAL','DDOS'],[benin,ddos]) 71 | # plt.legend() 72 | 73 | explode = [0, 0.1] 74 | 75 | plt.pie([benin,ddos], labels=['NORMAL','DDoS'], wedgeprops={'edgecolor':'black'} 76 | , explode=explode, autopct="%1.2f%%") 77 | plt.show() 78 | 79 | icmp = 0 80 | tcp = 0 81 | udp = 0 82 | 83 | proto = self.flow_dataset.iloc[:, 7].values 84 | proto = proto.astype('int') 85 | for i in proto: 86 | if i == 6: 87 | tcp += 1 88 | elif i == 17: 89 | udp += 1 90 | elif i == 1: 91 | icmp += 1 92 | 93 | print("tcp = ", tcp) 94 | print("udp = ", udp) 95 | print("icmp = ", icmp) 96 | 97 | plt.title("Dataset") 98 | # plt.xlabel('Protocole') 99 | # plt.ylabel('Nombre de flux') 100 | # plt.tight_layout() 101 | # plt.style.use("seaborn-darkgrid") 102 | # plt.bar(['ICMP','TCP','UDP'],[icmp,tcp,udp]) 103 | # plt.legend() 104 | 105 | explode = [0, 0.1, 0.1] 106 | 107 | plt.pie([icmp,tcp,udp], labels=['ICMP','TCP','UDP'], wedgeprops={'edgecolor':'black'} 108 | , explode=explode, autopct="%1.2f%%") 109 | plt.show() 110 | 111 | icmp_normal = 0 112 | tcp_normal = 0 113 | udp_normal = 0 114 | icmp_ddos = 0 115 | tcp_ddos = 0 116 | udp_ddos = 0 117 | 118 | proto = self.flow_dataset.iloc[:, [7,-1]].values 119 | proto = proto.astype('int') 120 | 121 | for i in proto: 122 | if i[0] == 6 and i[1] == 0: 123 | tcp_normal += 1 124 | elif i[0] == 6 and i[1] == 1: 125 | tcp_ddos += 1 126 | 127 | if i[0] == 17 and i[1] == 0: 128 | udp_normal += 1 129 | elif i[0] == 17 and i[1] == 1: 130 | udp_ddos += 1 131 | 132 | if i[0] == 1 and i[1] == 0: 133 | icmp_normal += 1 134 | elif i[0] == 1 and i[1] == 1: 135 | icmp_ddos += 1 136 | 137 | print("tcp_normal = ", tcp_normal) 138 | print("tcp_ddos = ", tcp_ddos) 139 | print("udp_normal = ", udp_normal) 140 | print("udp_ddos = ", udp_ddos) 141 | print("icmp_normal = ", icmp_normal) 142 | print("icmp_ddos = ", icmp_ddos) 143 | 144 | plt.title("Dataset") 145 | # plt.xlabel('Protocole') 146 | # plt.ylabel('Nombre de flux') 147 | # plt.tight_layout() 148 | # plt.style.use("seaborn-darkgrid") 149 | # plt.bar(['ICMP_N','ICMP_D','TCP_N','TCP_D','UDP_N','UDP_D'],[icmp_normal,icmp_ddos,tcp_normal,tcp_ddos,udp_normal,udp_ddos]) 150 | # plt.legend() 151 | 152 | explode = [0, 0.1, 0.1, 0.1, 0.1, 0.1] 153 | 154 | plt.pie([icmp_normal,icmp_ddos,tcp_normal,tcp_ddos,udp_normal,udp_ddos], 155 | labels=['ICMP_Normal','ICMP_DDoS','TCP_Normal','TCP_DDoS','UDP_Normal','UDP_DDoS'], 156 | wedgeprops={'edgecolor':'black'}, explode=explode, autopct="%1.2f%%") 157 | plt.show() 158 | 159 | 160 | 161 | # plt.title("Dataset") 162 | # plt.xlabel('Temps de génération') 163 | # plt.ylabel('type de flux') 164 | # # plt.tight_layout() 165 | # # plt.style.use("seaborn-darkgrid") 166 | # plt.plot(X_flow[:,0],y_flow) 167 | # # plt.legend() 168 | # plt.show() 169 | 170 | 171 | # x = ['TP','FP','FN','TN'] 172 | # plt.title("Régression Logistique") 173 | # plt.xlabel('Classe predite') 174 | # plt.ylabel('Nombre de flux') 175 | # plt.tight_layout() 176 | # plt.style.use("seaborn-darkgrid") 177 | # y = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 178 | # plt.bar(x,y, color="#1b7021", label='LR') 179 | # plt.legend() 180 | 181 | # plt.pie(y, labels=x, wedgeprops={'edgecolor':'black'}) 182 | # plt.show() 183 | 184 | 185 | def main(): 186 | start = datetime.now() 187 | 188 | ml = MachineLearning() 189 | ml.flow_training() 190 | 191 | end = datetime.now() 192 | print("Training time: ", (end-start)) 193 | 194 | if __name__ == "__main__": 195 | main() -------------------------------------------------------------------------------- /ml/ML.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from matplotlib import pyplot as plt 3 | import numpy as np 4 | import pandas as pd 5 | from sklearn.model_selection import train_test_split 6 | from sklearn.linear_model import LogisticRegression 7 | from sklearn.neighbors import KNeighborsClassifier 8 | from sklearn.svm import SVC 9 | from sklearn.naive_bayes import GaussianNB 10 | from sklearn.tree import DecisionTreeClassifier 11 | from sklearn.ensemble import RandomForestClassifier 12 | from sklearn.metrics import confusion_matrix 13 | from sklearn.metrics import accuracy_score 14 | 15 | class MachineLearning(): 16 | 17 | def __init__(self): 18 | 19 | print("Loading dataset ...") 20 | 21 | self.counter = 0 22 | 23 | self.flow_dataset = pd.read_csv('FlowStatsfile.csv') 24 | 25 | self.flow_dataset.iloc[:, 2] = self.flow_dataset.iloc[:, 2].str.replace('.', '') 26 | self.flow_dataset.iloc[:, 3] = self.flow_dataset.iloc[:, 3].str.replace('.', '') 27 | self.flow_dataset.iloc[:, 5] = self.flow_dataset.iloc[:, 5].str.replace('.', '') 28 | 29 | self.X_flow = self.flow_dataset.iloc[:, :-1].values 30 | self.X_flow = self.X_flow.astype('float64') 31 | 32 | self.y_flow = self.flow_dataset.iloc[:, -1].values 33 | 34 | self.X_flow_train, self.X_flow_test, self.y_flow_train, self.y_flow_test = train_test_split(self.X_flow, self.y_flow, test_size=0.25, random_state=0) 35 | 36 | def LR(self): 37 | 38 | print("------------------------------------------------------------------------------") 39 | print("Logistic Regression ...") 40 | 41 | self.classifier = LogisticRegression(solver='liblinear', random_state=0) 42 | self.Confusion_matrix() 43 | 44 | def KNN(self): 45 | 46 | print("------------------------------------------------------------------------------") 47 | print("K-NEAREST NEIGHBORS ...") 48 | 49 | self.classifier = KNeighborsClassifier(n_neighbors=5, metric='minkowski', p=2) 50 | self.Confusion_matrix() 51 | 52 | def SVM(self): 53 | 54 | print("------------------------------------------------------------------------------") 55 | print("SUPPORT-VECTOR MACHINE ...") 56 | 57 | self.classifier = SVC(kernel='rbf', random_state=0) 58 | self.Confusion_matrix() 59 | 60 | def NB(self): 61 | 62 | print("------------------------------------------------------------------------------") 63 | print("NAIVE-BAYES ...") 64 | 65 | self.classifier = GaussianNB() 66 | self.Confusion_matrix() 67 | 68 | 69 | def DT(self): 70 | 71 | print("------------------------------------------------------------------------------") 72 | print("DECISION TREE ...") 73 | 74 | self.classifier = DecisionTreeClassifier(criterion='entropy', random_state=0) 75 | self.Confusion_matrix() 76 | 77 | def RF(self): 78 | 79 | print("------------------------------------------------------------------------------") 80 | print("RANDOM FOREST ...") 81 | 82 | self.classifier = RandomForestClassifier(n_estimators=10, criterion="entropy", random_state=0) 83 | self.Confusion_matrix() 84 | 85 | def Confusion_matrix(self): 86 | self.counter += 1 87 | 88 | self.flow_model = self.classifier.fit(self.X_flow_train, self.y_flow_train) 89 | 90 | self.y_flow_pred = self.flow_model.predict(self.X_flow_test) 91 | 92 | print("------------------------------------------------------------------------------") 93 | 94 | print("confusion matrix") 95 | cm = confusion_matrix(self.y_flow_test, self.y_flow_pred) 96 | print(cm) 97 | 98 | acc = accuracy_score(self.y_flow_test, self.y_flow_pred) 99 | 100 | print("succes accuracy = {0:.2f} %".format(acc*100)) 101 | fail = 1.0 - acc 102 | print("fail accuracy = {0:.2f} %".format(fail*100)) 103 | print("------------------------------------------------------------------------------") 104 | 105 | x = ['TP','FP','FN','TN'] 106 | x_indexes = np.arange(len(x)) 107 | width = 0.10 108 | plt.xticks(ticks=x_indexes, labels=x) 109 | plt.title("Résultats des algorithmes") 110 | plt.xlabel('Classe predite') 111 | plt.ylabel('Nombre de flux') 112 | plt.tight_layout() 113 | plt.style.use("seaborn-darkgrid") 114 | # plt.style.use("dark_background") 115 | # plt.style.use("ggplot") 116 | if self.counter == 1: 117 | y1 = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 118 | plt.bar(x_indexes-2*width,y1, width=width, color="#1b7021", label='LR') 119 | plt.legend() 120 | if self.counter == 2: 121 | y2 = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 122 | plt.bar(x_indexes-width,y2, width=width, color="#e46e6e", label='KNN') 123 | plt.legend() 124 | if self.counter == 3: 125 | y3 = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 126 | plt.bar(x_indexes,y3, width=width, color="#0000ff", label='NB') 127 | plt.legend() 128 | if self.counter == 4: 129 | y4 = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 130 | plt.bar(x_indexes+width,y4, width=width, color="#e0d692", label='DT') 131 | plt.legend() 132 | if self.counter == 5: 133 | y5 = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 134 | plt.bar(x_indexes+2*width,y5, width=width, color="#000000", label='RF') 135 | plt.legend() 136 | plt.show() 137 | 138 | 139 | def main(): 140 | 141 | start_script = datetime.now() 142 | 143 | ml = MachineLearning() 144 | 145 | start = datetime.now() 146 | ml.LR() 147 | end = datetime.now() 148 | print("LEARNING and PREDICTING Time: ", (end-start)) 149 | 150 | start = datetime.now() 151 | ml.KNN() 152 | end = datetime.now() 153 | print("LEARNING and PREDICTING Time: ", (end-start)) 154 | 155 | # start = datetime.now() 156 | # ml.SVM() 157 | # end = datetime.now() 158 | # print("LEARNING and PREDICTING Time: ", (end-start)) 159 | 160 | start = datetime.now() 161 | ml.NB() 162 | end = datetime.now() 163 | print("LEARNING and PREDICTING Time: ", (end-start)) 164 | 165 | start = datetime.now() 166 | ml.DT() 167 | end = datetime.now() 168 | print("LEARNING and PREDICTING Time: ", (end-start)) 169 | 170 | start = datetime.now() 171 | ml.RF() 172 | end = datetime.now() 173 | print("LEARNING and PREDICTING Time: ", (end-start)) 174 | 175 | end_script = datetime.now() 176 | print("Script Time: ", (end_script-start_script)) 177 | 178 | if __name__ == "__main__": 179 | main() -------------------------------------------------------------------------------- /ml/NB.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from matplotlib import pyplot as plt 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | # from sklearn.linear_model import LogisticRegression 6 | # from sklearn.neighbors import KNeighborsClassifier 7 | # from sklearn.svm import SVC 8 | from sklearn.naive_bayes import GaussianNB 9 | # from sklearn.ensemble import RandomForestClassifier 10 | from sklearn.metrics import confusion_matrix 11 | from sklearn.metrics import accuracy_score 12 | 13 | class MachineLearning(): 14 | 15 | def __init__(self): 16 | 17 | print("Loading dataset ...") 18 | 19 | self.flow_dataset = pd.read_csv('FlowStatsfile.csv') 20 | 21 | self.flow_dataset.iloc[:, 2] = self.flow_dataset.iloc[:, 2].str.replace('.', '') 22 | self.flow_dataset.iloc[:, 3] = self.flow_dataset.iloc[:, 3].str.replace('.', '') 23 | self.flow_dataset.iloc[:, 5] = self.flow_dataset.iloc[:, 5].str.replace('.', '') 24 | 25 | def flow_training(self): 26 | 27 | print("Flow Training ...") 28 | 29 | X_flow = self.flow_dataset.iloc[:, :-1].values 30 | X_flow = X_flow.astype('float64') 31 | 32 | y_flow = self.flow_dataset.iloc[:, -1].values 33 | 34 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 35 | 36 | classifier = GaussianNB() 37 | flow_model = classifier.fit(X_flow_train, y_flow_train) 38 | 39 | y_flow_pred = flow_model.predict(X_flow_test) 40 | 41 | print("------------------------------------------------------------------------------") 42 | 43 | print("confusion matrix") 44 | cm = confusion_matrix(y_flow_test, y_flow_pred) 45 | print(cm) 46 | 47 | acc = accuracy_score(y_flow_test, y_flow_pred) 48 | 49 | print("succes accuracy = {0:.2f} %".format(acc*100)) 50 | fail = 1.0 - acc 51 | print("fail accuracy = {0:.2f} %".format(fail*100)) 52 | print("------------------------------------------------------------------------------") 53 | 54 | x = ['TP','FP','FN','TN'] 55 | plt.title("Naive Bayes") 56 | plt.xlabel('Classe predite') 57 | plt.ylabel('Nombre de flux') 58 | plt.tight_layout() 59 | plt.style.use("seaborn-darkgrid") 60 | y = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 61 | plt.bar(x,y, color="#0000ff", label='NB') 62 | plt.legend() 63 | plt.show() 64 | 65 | def main(): 66 | start = datetime.now() 67 | 68 | ml = MachineLearning() 69 | ml.flow_training() 70 | 71 | end = datetime.now() 72 | print("Training time: ", (end-start)) 73 | 74 | if __name__ == "__main__": 75 | main() -------------------------------------------------------------------------------- /ml/RF.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from matplotlib import pyplot as plt 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.ensemble import RandomForestClassifier 6 | from sklearn.metrics import confusion_matrix 7 | from sklearn.metrics import accuracy_score 8 | 9 | class MachineLearning(): 10 | 11 | def __init__(self): 12 | 13 | print("Loading dataset ...") 14 | 15 | self.flow_dataset = pd.read_csv('FlowStatsfile.csv') 16 | 17 | self.flow_dataset.iloc[:, 2] = self.flow_dataset.iloc[:, 2].str.replace('.', '') 18 | self.flow_dataset.iloc[:, 3] = self.flow_dataset.iloc[:, 3].str.replace('.', '') 19 | self.flow_dataset.iloc[:, 5] = self.flow_dataset.iloc[:, 5].str.replace('.', '') 20 | 21 | def flow_training(self): 22 | 23 | print("Flow Training ...") 24 | 25 | X_flow = self.flow_dataset.iloc[:, :-1].values 26 | X_flow = X_flow.astype('float64') 27 | 28 | y_flow = self.flow_dataset.iloc[:, -1].values 29 | 30 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 31 | 32 | classifier = RandomForestClassifier(n_estimators=10, criterion="entropy", random_state=0) 33 | flow_model = classifier.fit(X_flow_train, y_flow_train) 34 | 35 | y_flow_pred = flow_model.predict(X_flow_test) 36 | 37 | print("------------------------------------------------------------------------------") 38 | 39 | print("confusion matrix") 40 | cm = confusion_matrix(y_flow_test, y_flow_pred) 41 | print(cm) 42 | 43 | acc = accuracy_score(y_flow_test, y_flow_pred) 44 | 45 | print("succes accuracy = {0:.2f} %".format(acc*100)) 46 | fail = 1.0 - acc 47 | print("fail accuracy = {0:.2f} %".format(fail*100)) 48 | print("------------------------------------------------------------------------------") 49 | 50 | x = ['TP','FP','FN','TN'] 51 | plt.title("Random Forest") 52 | plt.xlabel('Classe predite') 53 | plt.ylabel('Nombre de flux') 54 | plt.tight_layout() 55 | plt.style.use("seaborn-darkgrid") 56 | y = [cm[0][0],cm[0][1],cm[1][0],cm[1][1]] 57 | plt.bar(x,y, color="#000000", label='RF') 58 | plt.legend() 59 | plt.show() 60 | 61 | def main(): 62 | start = datetime.now() 63 | 64 | ml = MachineLearning() 65 | ml.flow_training() 66 | 67 | end = datetime.now() 68 | print("Training time: ", (end-start)) 69 | 70 | if __name__ == "__main__": 71 | main() -------------------------------------------------------------------------------- /ml/SVM.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | # from sklearn.linear_model import LogisticRegression 6 | # from sklearn.neighbors import KNeighborsClassifier 7 | from sklearn.svm import SVC 8 | # from sklearn.ensemble import RandomForestClassifier 9 | from sklearn.metrics import confusion_matrix 10 | from sklearn.metrics import accuracy_score 11 | 12 | class MachineLearning(): 13 | 14 | def __init__(self): 15 | 16 | print("Loading dataset ...") 17 | 18 | self.flow_dataset = pd.read_csv('FlowStatsfile.csv') 19 | 20 | self.flow_dataset.iloc[:, 2] = self.flow_dataset.iloc[:, 2].str.replace('.', '') 21 | self.flow_dataset.iloc[:, 3] = self.flow_dataset.iloc[:, 3].str.replace('.', '') 22 | self.flow_dataset.iloc[:, 5] = self.flow_dataset.iloc[:, 5].str.replace('.', '') 23 | 24 | def flow_training(self): 25 | 26 | print("Flow Training ...") 27 | 28 | X_flow = self.flow_dataset.iloc[:, :-1].values 29 | X_flow = X_flow.astype('float64') 30 | 31 | y_flow = self.flow_dataset.iloc[:, -1].values 32 | 33 | X_flow_train, X_flow_test, y_flow_train, y_flow_test = train_test_split(X_flow, y_flow, test_size=0.25, random_state=0) 34 | 35 | classifier = SVC(kernel='rbf', random_state=0) 36 | flow_model = classifier.fit(X_flow_train, y_flow_train) 37 | 38 | y_flow_pred = flow_model.predict(X_flow_test) 39 | 40 | print("------------------------------------------------------------------------------") 41 | 42 | print("confusion matrix") 43 | cm = confusion_matrix(y_flow_test, y_flow_pred) 44 | print(cm) 45 | 46 | acc = accuracy_score(y_flow_test, y_flow_pred) 47 | 48 | print("succes accuracy = {0:.2f} %".format(acc*100)) 49 | fail = 1.0 - acc 50 | print("fail accuracy = {0:.2f} %".format(fail*100)) 51 | print("------------------------------------------------------------------------------") 52 | 53 | def main(): 54 | start = datetime.now() 55 | 56 | ml = MachineLearning() 57 | ml.flow_training() 58 | 59 | end = datetime.now() 60 | print("Training time: ", (end-start)) 61 | 62 | if __name__ == "__main__": 63 | main() --------------------------------------------------------------------------------