├── README.md ├── lib ├── __init__.py ├── __pycache__ │ ├── libCorr.pypy-41.pyc │ ├── __init__.pypy-41.pyc │ ├── __init__.cpython-36.pyc │ ├── libCorr.cpython-36.pyc │ ├── libMacros.cpython-36.pyc │ ├── libParser.cpython-36.pyc │ ├── libCorrParse.cpython-36.pyc │ └── redissettings.cpython-36.pyc ├── libMacros.py ├── redissettings.py ├── libNormalizer.py ├── libParser.py ├── libCorrParse.py └── libCorr.py ├── .testfunctions.py.swp ├── corrrules └── srciprule.json ├── testparser.py ├── parsers ├── examplerule.json └── parser.xml ├── consumit.py ├── config ├── example-nxlog.conf ├── example-rsyslog.conf └── loggen.py ├── tester1.py ├── parser.py ├── corrruleexamples.txt ├── corr_rules.py └── .vscode └── .ropeproject └── config.py /README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /lib/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.testfunctions.py.swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/.testfunctions.py.swp -------------------------------------------------------------------------------- /lib/__pycache__/libCorr.pypy-41.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/libCorr.pypy-41.pyc -------------------------------------------------------------------------------- /lib/__pycache__/__init__.pypy-41.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/__init__.pypy-41.pyc -------------------------------------------------------------------------------- /lib/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /lib/__pycache__/libCorr.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/libCorr.cpython-36.pyc -------------------------------------------------------------------------------- /lib/__pycache__/libMacros.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/libMacros.cpython-36.pyc -------------------------------------------------------------------------------- /lib/__pycache__/libParser.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/libParser.cpython-36.pyc -------------------------------------------------------------------------------- /lib/__pycache__/libCorrParse.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/libCorrParse.cpython-36.pyc -------------------------------------------------------------------------------- /lib/__pycache__/redissettings.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/product/corr_eingine/master/lib/__pycache__/redissettings.cpython-36.pyc -------------------------------------------------------------------------------- /corrrules/srciprule.json: -------------------------------------------------------------------------------- 1 | {"rulename":"Source IP in Established ThreatLists", 2 | "id":"100000005", 3 | "descp":"Source IP in log is in a ThreatList", 4 | "score":50, 5 | "sev":1, 6 | "pri":5, 7 | "rule":[ 8 | {"NEQ":["src","10.0.0.1"]}, 9 | {"EQ":["dst","10.2.0.2"]}, 10 | {"REX":["src","10.+\s2.0"]}, 11 | {"IPRANGE":["dst","10.2.0.1","10.2.0.10"]}, 12 | {"ALIST":["dst","BADIPLIST"]}] 13 | } -------------------------------------------------------------------------------- /lib/libMacros.py: -------------------------------------------------------------------------------- 1 | macros = {"$IP":r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", 2 | "$PORT":r"[\d]{1,5}", 3 | "$SCISCOTIMEHOST":r"^<[\d]+>(?P[^ ]+\s[^ ]+\s[^ ]+\s[\d]+:[\d]+:[\d]+)\s(?P[^ ]+)\s:\s+%ASA-(?P\d-\d{1,8}):", 4 | "$SSPT":r"(?P[\d]{1,5})", 5 | "$SDPT":r"(?P[\d]{1,5})", 6 | "$SSRC":r"(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})", 7 | "$SDST":r"(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})", 8 | "$SFADDR":r"(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})", 9 | "$SLADDR":r"(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})", 10 | "$SGADDR":r"(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"} 11 | -------------------------------------------------------------------------------- /testparser.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import string 4 | import ast 5 | import json 6 | from lib.libCorrParse import CorrParser 7 | from lib.libParser import BuildParsers 8 | 9 | ''' 10 | with open(sys.argv[1],'r') as input: 11 | f = BuildParsers(sys.argv[1]) 12 | print(f.results()) 13 | 14 | ''' 15 | with open(sys.argv[1],'r') as input: 16 | c = ast.literal_eval(input.read()) 17 | d = {"src":"10. 2.0.1","dst":"10.2.0.2"} 18 | b = CorrParser() 19 | k = b.corrTest(c,d) 20 | if k: 21 | d['rulename'] = c['rulename'] 22 | d['score'] = c['score'] 23 | d['sev'] = c['sev'] 24 | d['pri'] = c['pri'] 25 | print(d) 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /parsers/examplerule.json: -------------------------------------------------------------------------------- 1 | {"rulename":"tester", 2 | "rulekeyid":"CISCO0001", 3 | "rulekey":"CiscoLogs", 4 | "triggers":["%ASA","%ASA-"], 5 | "prerule":["$SCISCOTIMEHOST"], 6 | "rules":[{"name":"tester", 7 | "id":"000000001", 8 | "type":"DebugTraffic", 9 | "payload":"%ASA-6-302020", 10 | "npayload":"inbound", 11 | "info":'faddr\s$SFADDR+[^ ]+\sgaddr+\s+$SGADDR+[^ ]+\sladdr\s$SLADDR' 12 | }, 13 | {"name":"tester2", 14 | "id":"000000002", 15 | "type":"DebugTraffic", 16 | "payload":"%ASA-6-302021", 17 | "npayload":"outbound", 18 | "info":'faddr\s$SFADDR+[^ ]+\sgaddr+\\s+$SGADDR+[^ ]+\sladdr\s$SLADDR' 19 | }] 20 | } -------------------------------------------------------------------------------- /consumit.py: -------------------------------------------------------------------------------- 1 | import corr_rules 2 | import time 3 | import json 4 | import Queue 5 | import threading 6 | from lib.redissettings import rServer 7 | 8 | q = Queue.Queue() 9 | 10 | def getLog(): 11 | sub = rServer.sub("logs") 12 | while True: 13 | for item in sub.listen(): 14 | if item['data'] != 1: 15 | q.put(item['data']) 16 | 17 | 18 | def correlation(): 19 | while True: 20 | data = q.get() 21 | for i in dir(corr_rules): 22 | if i.startswith("c"): 23 | new = getattr(corr_rules,i) 24 | new(json.loads(data)) 25 | q.task_done() 26 | 27 | 28 | def main(): 29 | for x in range(8): 30 | t = threading.Thread(target=correlation) 31 | t.daemon = True 32 | t.start() 33 | getLog() 34 | q.join() 35 | 36 | if __name__ == '__main__': 37 | main() 38 | -------------------------------------------------------------------------------- /config/example-nxlog.conf: -------------------------------------------------------------------------------- 1 | # put your here, probably C:\Program Files\nxlog 2 | define ROOT C:\nxlog 3 | 4 | Moduledir %ROOT%\modules 5 | CacheDir %ROOT%\data 6 | Pidfile %ROOT%\data\nxlog.pid 7 | SpoolDir %ROOT%\data 8 | LogFile %ROOT%\data\nxlog.log 9 | 10 | 11 | Module xm_syslog 12 | 13 | 14 | 15 | Module xm_json 16 | 17 | 18 | 19 | Module im_msvistalog 20 | # this kinda works for me, put * to get everything 21 | Query \ 22 | \ 23 | \ 24 | \ 25 | \ 26 | \ 27 | 28 | Exec $Message = to_json(); to_syslog_bsd(); 29 | 30 | 31 | 32 | Module om_tcp 33 | # obviously put your rsyslog ip here 34 | Host 192.168.x.xxx 35 | Port 514 36 | 37 | 38 | 39 | Path in => out 40 | 41 | -------------------------------------------------------------------------------- /lib/redissettings.py: -------------------------------------------------------------------------------- 1 | import redis 2 | 3 | rServerconfig = { 4 | 'host': 'localhost', 5 | 'port': 6379, 6 | 'db': 0, 7 | } 8 | 9 | class rServer(): 10 | 11 | def __init__(self): 12 | self.r = redis.StrictRedis(**rServerconfig) 13 | 14 | def getKey(self,k): 15 | if self.r.exists(k): 16 | return self.r.get(k) 17 | return False 18 | 19 | def setKey(self,k,limit=10,t=300): 20 | if self.r.exists(k): 21 | return False 22 | else: 23 | self.r.setex(k,limit,t) 24 | return False 25 | 26 | def setList(self,hsh,d): 27 | return self.r.rpush(hsh,d) 28 | 29 | def getList(self,hsh): 30 | if self.r.exists(hsh): 31 | c = self.r.lrange(hsh,0,-1) 32 | return c 33 | return False 34 | 35 | def incr(self,hsh): 36 | self.r.incr(hsh,1) 37 | return True 38 | 39 | def sub(self,topic): 40 | d = self.r.pubsub().subscribe(topic) 41 | return d 42 | 43 | def pub(self,topic,data): 44 | self.r.publish(topic,data) 45 | 46 | def delete(self,k): 47 | self.r.delete(k) 48 | -------------------------------------------------------------------------------- /lib/libNormalizer.py: -------------------------------------------------------------------------------- 1 | import re 2 | import os 3 | import sys 4 | import string 5 | import xml.dom.minidom 6 | 7 | class Normalize: 8 | 9 | def __init__(self): 10 | pass 11 | 12 | def buildDataSource(self,r): 13 | d = {} 14 | for i in r: 15 | d[i] = "None" 16 | return d 17 | 18 | 19 | def checklog(self,trig,logline): 20 | d = trig.search(logline) 21 | if d: 22 | return True 23 | return False 24 | 25 | def checkEvent(self,payload,npayload,logline): 26 | d = payload.search(logline) 27 | if d: 28 | if npayload == 'None': 29 | return True 30 | else: 31 | c = npayload.search(logline) 32 | if c: 33 | return False 34 | else: 35 | return True 36 | return False 37 | 38 | def doEvent(self,trig,logline): 39 | d = trig.search(logline) 40 | if d: 41 | return d 42 | return False 43 | 44 | def pLog(self,triggers,log): 45 | for c in triggers['triggers']: 46 | chlog = self.checklog(c,log) 47 | if chlog: 48 | return log 49 | return False 50 | 51 | def pPreRule(self,id,prerule,log): 52 | if id == prerule['id']: 53 | d = prerule['prerule']['regex'].search(log) 54 | if d: 55 | return d 56 | return False 57 | 58 | def pRule(self,id,rules,log): 59 | for id in rules: 60 | if id == rules['id']: 61 | for b in rules['rules']: 62 | chev = self.checkEvent(rules['rules'][b]['payload'],rules['rules'][b]['npayload'],log) 63 | if chev: 64 | return b 65 | return False 66 | -------------------------------------------------------------------------------- /tester1.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import re 3 | 4 | Token = collections.namedtuple('Token', ['typ', 'value', 'line', 'column']) 5 | 6 | def tokenize(code): 7 | keywords = {'EQ', 'NEQ', 'ENDIF', 'FOR', 'NEXT', 'GOSUB', 'RETURN','OR'} 8 | token_specification = [ 9 | ('NUMBER', r'\d+(\.\d*)?'), # Integer or decimal number 10 | ('ASSIGN', r':='), # Assignment operator 11 | ('END', r';'), # Statement terminator 12 | ('ID', r'[A-Za-z]+'), # Identifiers 13 | ('OP', r'[+\-*/]'), # Arithmetic operators 14 | ('NEWLINE', r'\n'), # Line endings 15 | ('SKIP', r'[ \t]+'), # Skip over spaces and tabs 16 | ('MISMATCH',r'.'), # Any other character 17 | ] 18 | tok_regex = '|'.join('(?P<%s>%s)' % pair for pair in token_specification) 19 | line_num = 1 20 | line_start = 0 21 | for mo in re.finditer(tok_regex, code): 22 | kind = mo.lastgroup 23 | value = mo.group(kind) 24 | if kind == 'NEWLINE': 25 | line_start = mo.end() 26 | line_num += 1 27 | elif kind == 'SKIP': 28 | pass 29 | elif kind == 'MISMATCH': 30 | pass 31 | else: 32 | if kind == 'ID' and value in keywords: 33 | kind = value 34 | column = mo.start() - line_start 35 | yield Token(kind, value, line_num, column) 36 | 37 | statements = ''' 38 | EQ test,test 39 | total := total + price * quantity; 40 | tax := price * 0.05; 41 | ENDIF; 42 | ''' 43 | 44 | for token in tokenize(statements): 45 | print(token) -------------------------------------------------------------------------------- /parser.py: -------------------------------------------------------------------------------- 1 | import re 2 | import os 3 | import sys 4 | import string 5 | import xml.dom.minidom 6 | import time 7 | import json 8 | import corr_rules 9 | from lib.libNormalizer import Normalize 10 | from lib.libParser import BuildParsers 11 | from lib.redissettings import rServer 12 | 13 | 14 | XMLFile = "parsers/parser.xml" 15 | b = BuildParsers(XMLFile) 16 | norm = Normalize() 17 | details = b.getRuleDetail() 18 | triggers = b.getRuleTriggers(details['rid']) 19 | prer = b.getPreRule(details['rid']) 20 | allrules = b.getRules(details['rid']) 21 | 22 | st = time.time() 23 | with open(sys.argv[1],'r') as f: 24 | mlogs = f.readlines() 25 | totals = len(mlogs) 26 | for nlogs in mlogs: 27 | d = norm.pLog(triggers,nlogs) 28 | if d: 29 | c = norm.pRule(details['rid'],allrules,d) 30 | if c: 31 | newsource = norm.buildDataSource(details['rmeta']) 32 | prerule = norm.pPreRule(details['rid'],prer,nlogs) 33 | if prerule: 34 | prules = prerule.groupdict() 35 | for erule in prules: 36 | newsource[erule] = prules[erule] 37 | event = norm.doEvent(allrules['rules'][c]['info'],d) 38 | if event: 39 | b = event.groupdict() 40 | newsource['atype'] = allrules['rules'][c]['atype'] 41 | newsource['aname'] = allrules['rules'][c]['aname'] 42 | for i in b: 43 | newsource[i] = b[i] 44 | # 45 | #print newsource 46 | r.publish("logs", json.dumps(newsource)) 47 | ''' 48 | for i in dir(corr_rules): 49 | item = getattr(corr_rules,i) 50 | if callable(item): 51 | item(newsource) 52 | ''' 53 | #print newsource 54 | et = time.time() 55 | dif = et - st 56 | print dif 57 | print totals 58 | -------------------------------------------------------------------------------- /lib/libParser.py: -------------------------------------------------------------------------------- 1 | import re 2 | import os 3 | import sys 4 | import string 5 | import xml.etree.ElementTree as ET 6 | from lib.libMacros import macros 7 | 8 | class BuildParsers: 9 | 10 | def __init__(self,rulef): 11 | self.rules = {} 12 | tree = ET.parse(rulef).getroot() 13 | self.rules['key'] = tree.find("./detail/rkey").text 14 | self.rules['type'] = tree.find("./detail/rtype").text 15 | self.rules['id'] = tree.find('./detail/rid').text 16 | self.rules['triggers'] = [] 17 | self.rules['prerule'] = [] 18 | self.rules['rules'] = [] 19 | for i in tree.find("./triggers"): 20 | self.rules['triggers'].append(self._Macros(i.text)) 21 | for i in tree.find("./prerules"): 22 | self.rules['prerule'].append(self._Macros(i.text)) 23 | for i in tree.findall("./rules/rule"): 24 | c = {} 25 | c['aname'] = i.get('aname') 26 | c['atype'] = i.get('atype') 27 | c['id'] = i.get('id') 28 | c['payload'] = self._Macros(i.find("payload").text) 29 | if i.find('npayload').text == "None": 30 | c['npayload'] = 'None' 31 | else: 32 | c['npayload'] = self._Macros(i.find("npayload").text) 33 | c['info'] = self._Macros(i.find('info').text) 34 | self.rules['rules'].append(c) 35 | 36 | def results(self): 37 | return self.rules 38 | 39 | def _DoMacrosList(self,d): 40 | f = [] 41 | for i in d: 42 | f.append(re.compile(self._Macros(i))) 43 | return f 44 | 45 | def _Escape(self,d): 46 | c = d.replace("$lt","<").replace("$gt",">") 47 | return c 48 | 49 | def _Macros(self,d): 50 | for i in macros: 51 | d = d.replace(i,macros[i]) 52 | return re.compile(d) 53 | -------------------------------------------------------------------------------- /config/example-rsyslog.conf: -------------------------------------------------------------------------------- 1 | #this should(!) be big enough 2 | $MaxMessageSize 64k 3 | 4 | #### MODULES #### 5 | 6 | # where we're getting logs from 7 | $ModLoad imuxsock 8 | $ModLoad imklog 9 | $ModLoad imudp 10 | $UDPServerRun 514 11 | $ModLoad imtcp 12 | $InputTCPServerRun 514 13 | 14 | # set some access rights to written log files 15 | $FileOwner root 16 | $FileGroup adm 17 | $FileCreateMode 0640 18 | $DirCreateMode 0755 19 | $Umask 0022 20 | 21 | # do NOT escape control chars 22 | $EscapeControlCharactersOnReceive off 23 | 24 | # templates 25 | $template TraditionalRFC3164,"<%PRI%>%timereported% %timegenerated% %HOSTNAME% %syslogtag:1:32% %msg:::drop-last-lf%\n" 26 | $template DailyPerHostLogs,"/var/log/remote/%HOSTNAME:::lower%/messages.log" 27 | 28 | # json template for logstash 29 | # generic template (cisco, firewall, etc) 30 | $template generic_json,"{%timestamp:::date-rfc3339,jsonf:@timestamp%,%source:::jsonf:@source_host%,\"@source\":\"syslog://%fromhost-ip:::json%\",\"@message\":\"%msg:::json%\",\"@fields\":{%syslogfacility-text:::jsonf%,%syslogseverity-text:::jsonf%,%app-name:::jsonf%,%procid:::jsonf%}}" 31 | # windows hosts template (nxlog json) 32 | $template win_nxlog_json,"{%timestamp:::date-rfc3339,jsonf:@timestamp%,%source:::jsonf:@source_host%,\"@source\":\"syslog://%fromhost-ip:::json%\",\"@message\":%msg%,\"@fields\":{%app-name:::jsonf%,%syslogfacility-text:::jsonf%,%syslogseverity-text:::jsonf%,%procid:::jsonf%}}\n" 33 | 34 | # NB: I'm sending to UDP locally, use @@localhost:55514 if need TCP 35 | # generic hosts to logstash: 36 | if ($fromhost-ip == '...' or $fromhost-ip == '...' or $fromhost-ip == '...' ) then @localhost:55514;generic_json 37 | # windows hosts to logstash: 38 | if ($fromhost-ip == '...' or $fromhost-ip == '...' or $fromhost-ip == '...' ) then @localhost:55514;win_nxlog_json 39 | 40 | # Everything from remote hosts to files goes to disk in folders named by host 41 | if ($fromhost-ip != '127.0.0.1') then -?DailyPerHostLogs;TraditionalRFC3164 42 | & ~ 43 | 44 | # NOTE: use your system's rules for local messages: 45 | # Standard log files. Log by facility. 46 | *.* -/var/log/messages 47 | -------------------------------------------------------------------------------- /corrruleexamples.txt: -------------------------------------------------------------------------------- 1 | {"EQ":[data,data], 2 | "NEQ":[data,data], 3 | "GT":[data,data], 4 | "GE:[data,data], 5 | "LT":[data,data], 6 | "LE":[data,data], 7 | "REX":[data,data], 8 | "NREX":[data,data], 9 | "IREX":[data,data], 10 | "IRANGE":[data,range,range], 11 | "NRANGE":[data,range,range], 12 | "ALIST":[data,list], 13 | "ILIST":[data,list], 14 | "NLIST":[data,list], 15 | "IN":[data,data], 16 | "NIN":[data,data], 17 | "IPCIDR":[ip,cidr], 18 | "IPRANGE":[ip,range,range], 19 | "NIPRANGE":[ip,range,range], 20 | "COUNT":[[data],limit,time], 21 | "DCOUNT":[[data],[distinct],limit,time] 22 | "OR":{}} 23 | 24 | {"rulename":"Source IP in Established ThreatLists", 25 | "id":"100000005", 26 | "descp":"Source IP in log is in a ThreatList", 27 | "score":50, 28 | "sev":1, 29 | "pri":5, 30 | "rule":{"OR":{"ILIST":["sip","portsweeplist"], 31 | "ILIST":["sip","portscanlist"], 32 | "ILIST":["sip","sshbruteforcelist"], 33 | "ILIST":["sip","badhostlist'] 34 | } 35 | } 36 | } 37 | 38 | {"rulename":"Port Scan Detected", 39 | "id":"10000000", 40 | "descp":"Rule Fires for some kind of port scan", 41 | "score":20, 42 | "sev":3, 43 | "pri":3, 44 | "rule":{"EQ":["blocked","false"], 45 | "DCOUNT":[["sip"],["dport"],100,6000], 46 | "ALIST":['sip','portscanlist'] 47 | } 48 | } 49 | 50 | {"rulename":"Port Sweep Detected", 51 | "id":"10000001", 52 | "descp":"Rule Fires when the same ip trys to hit 50 hosts in 30seconds on same port", 53 | "score":20, 54 | "sev":3, 55 | "pri":3, 56 | "rule":{"DCOUNT":[["sip","dport"],["dip"],50,30], 57 | "ALIST":["sip","portsweeplist"] 58 | } 59 | } 60 | 61 | {"rulename":"SSH Brute Force Attempt TCP Drops", 62 | "id":"100000002", 63 | "descp":"Rule triggers on failed attempts with same ip 10 times in 5 mins", 64 | "score":20, 65 | "sev":3, 66 | "pri":3, 67 | "rule":{"EQ":["dport","22"], 68 | "EQ":["proto","tcp"], 69 | "EQ":["blocked","false"], 70 | "COUNT":[["sip","dip"],10,60], 71 | "ALIST":['sip','sshbruteforcelist'], 72 | "ALIST":['sip','badhostlist'] 73 | } 74 | } -------------------------------------------------------------------------------- /config/loggen.py: -------------------------------------------------------------------------------- 1 | from faker import Faker 2 | from datetime import datetime 3 | import random 4 | import time 5 | 6 | LINE = """\ 7 | {remote_addr} - - [{time_local} +0000] "{request_type} {request_path} HTTP/1.1" {status} {body_bytes_sent} "{http_referer}" "{http_user_agent}"\ 8 | """ 9 | 10 | LOG_FILE_A = "log_a.txt" 11 | LOG_FILE_B = "log_b.txt" 12 | LOG_MAX = 100 13 | 14 | def generate_log_line(): 15 | fake = Faker() 16 | now = datetime.now() 17 | remote_addr = fake.ipv4() 18 | time_local = now.strftime('%d/%b/%Y:%H:%M:%S') 19 | request_type = random.choice(["GET", "POST", "PUT"]) 20 | request_path = "/" + fake.uri_path() 21 | 22 | status = random.choice([200, 401, 404]) 23 | body_bytes_sent = random.choice(range(5, 1000, 1)) 24 | http_referer = fake.uri() 25 | http_user_agent = fake.user_agent() 26 | 27 | log_line = LINE.format( 28 | remote_addr=remote_addr, 29 | time_local=time_local, 30 | request_type=request_type, 31 | request_path=request_path, 32 | status=status, 33 | body_bytes_sent=body_bytes_sent, 34 | http_referer=http_referer, 35 | http_user_agent=http_user_agent 36 | ) 37 | 38 | return log_line 39 | 40 | def write_log_line(log_file, line): 41 | with open(log_file, "a") as f: 42 | f.write(line) 43 | f.write("\n") 44 | 45 | def clear_log_file(log_file): 46 | with open(log_file, "w+") as f: 47 | f.write("") 48 | 49 | if __name__ == "__main__": 50 | current_log_file = LOG_FILE_A 51 | lines_written = 0 52 | 53 | clear_log_file(LOG_FILE_A) 54 | clear_log_file(LOG_FILE_B) 55 | 56 | while True: 57 | line = generate_log_line() 58 | 59 | write_log_line(current_log_file, line) 60 | lines_written += 1 61 | 62 | if lines_written % LOG_MAX == 0: 63 | new_log_file = LOG_FILE_B 64 | if current_log_file == LOG_FILE_B: 65 | new_log_file = LOG_FILE_A 66 | 67 | clear_log_file(new_log_file) 68 | current_log_file = new_log_file 69 | 70 | sleep_time = random.choice(range(1, 5, 1)) 71 | 72 | time.sleep(sleep_time) -------------------------------------------------------------------------------- /corr_rules.py: -------------------------------------------------------------------------------- 1 | from lib.libCorr import Corr 2 | import hashlib 3 | import time 4 | import threading 5 | 6 | p_lock = threading.Lock() 7 | 8 | def c_portscan(d): 9 | if d['dpt'] == 'None': 10 | return 11 | hsh = [d['ips'],d['ipd'],"PortScanner"] 12 | b = Corr() 13 | b.OR([b.neq(443,int(d['spt'])),b.neq(80,int(d['spt']))])\ 14 | .DCounter(hsh,destinct=d['spt'],count=20,limit=300)\ 15 | .CheckNotInList("PortScanner",d['ips'])\ 16 | .AddtoList("PortScanner",d['ips']) 17 | if b.test == True: 18 | with p_lock: 19 | print "{}-{}-PortScan from IP address-src:{}".format(int(time.time()),"000001",d['ips']) 20 | return 21 | return 22 | 23 | def c_tcpdrops(d): 24 | if d['dpt'] == 'None': 25 | return 26 | hsh = [d['ips'],d['aname'],'ExcessiveTCPDrops'] 27 | b = Corr() 28 | b.EQ("Firewall Deny TCP (no connection)",d['aname'])\ 29 | .CheckNotInList("ExcessiveTCPDrops",d['ips'])\ 30 | .Counter(hsh,count=100)\ 31 | .AddtoList("ExcessiveTCPDrops",d['ips']) 32 | if b.test == True: 33 | with p_lock: 34 | print "{}-{}-Excessive TCP DENYs By Source count 100-src:{}-spt:{}-dst:{}-dpt:{}".format(int(time.time()),"0010",d['ips'],d['spt'],d['ipd'],d['dpt']) 35 | return 36 | return 37 | 38 | def c_webtraffic(d): 39 | if d['spt'] == 'None': 40 | return 41 | hsh = [d['ips'],d['ipd'],"ExcessiveWebTraffic"] 42 | b = Corr() 43 | b.OR([b.eq(80,int(d['spt'])),b.eq(443,int(d['spt']))])\ 44 | .CheckNotInList("ExcessiveWebRequests",d['ips'])\ 45 | .Counter(hsh,count=400)\ 46 | .AddtoList("ExcessiveWebRequests",d['ips']) 47 | if b.test == True: 48 | with p_lock: 49 | print "{}-{}-Excessive OutBound WebTraffic 300 times-src:{}-spt:{}-dst:{}-dpt:{}".format(int(time.time()),"0011",d['ips'],d['spt'],d['ipd'],d['dpt']) 50 | return 51 | return 52 | 53 | def c_dns(d): 54 | if d['dpt'] == 'None': 55 | return 56 | hsh = [d['ips'],d['ipd'],d['dpt'],"ExcessiveDNS"] 57 | b = Corr() 58 | b.EQ(53,int(d['dpt']))\ 59 | .EQ("6-302015",d['v_id'])\ 60 | .CheckNotInList("DNSREQUESTERS",d['ips'])\ 61 | .Counter(hsh,count=200)\ 62 | .AddtoList("DNSREQUESTERS",d['ips']) 63 | if b.test == True: 64 | with p_lock: 65 | print "{}-{}-Excesive DNS Triggered 200 times-src:{}-dst:{}-dpt:{}".format(int(time.time()),"0012",d['ips'],d['ipd'],d['dpt']) 66 | return 67 | return 68 | 69 | -------------------------------------------------------------------------------- /parsers/parser.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Cisco-ASA Firewall Rule 4 | CISCO-ASA 5 | Firewall 6 | 0000001 7 | 8 | 9 | %ASA 10 | %ASA- 11 | 12 | 13 | $SCISCOTIMEHOST 14 | 15 | 16 | 20 | %ASA-6-302020 21 | inbound 22 | faddr\s$SFADDR+[^ ]+\sgaddr+\s+$SGADDR+[^ ]+\sladdr\s$SLADDR 23 | 24 | 28 | %ASA-6-302020 29 | outnbound 30 | faddr\s$SFADDR+[^ ]+\sgaddr+\s+$SGADDR+[^ ]+\sladdr\s$SLADDR 31 | 32 | 36 | %ASA-6-302015 37 | inbound 38 | $SSRC/$SSPT\s\($IP\/$PORT\)\s.*\s.*:$SDST/$SDPT 39 | 40 | 44 | %ASA-6-302015 45 | outbound 46 | $SSRC/$SSPT\s\($IP\/$PORT\)\s.*\s.*:$SDST/$SDPT 47 | 48 | 52 | %ASA-6-302013 53 | inbound 54 | $SSRC/$SSPT\s\($IP\/$PORT\)\s.*\s.*:$SDST/$SDPT 55 | 56 | 60 | %ASA-6-106015:\sDeny\sTCP 61 | None 62 | \(no\sconnection\)\sfrom\s$SSRC\/$SSPT\sto\s$SDST\/$SDPT 63 | 64 | 68 | %ASA-4-106023:\sDeny\sprotocol 69 | None 70 | $SSRC.*?:$SDST 71 | 72 | 73 | -------------------------------------------------------------------------------- /.vscode/.ropeproject/config.py: -------------------------------------------------------------------------------- 1 | # The default ``config.py`` 2 | # flake8: noqa 3 | 4 | 5 | def set_prefs(prefs): 6 | """This function is called before opening the project""" 7 | 8 | # Specify which files and folders to ignore in the project. 9 | # Changes to ignored resources are not added to the history and 10 | # VCSs. Also they are not returned in `Project.get_files()`. 11 | # Note that ``?`` and ``*`` match all characters but slashes. 12 | # '*.pyc': matches 'test.pyc' and 'pkg/test.pyc' 13 | # 'mod*.pyc': matches 'test/mod1.pyc' but not 'mod/1.pyc' 14 | # '.svn': matches 'pkg/.svn' and all of its children 15 | # 'build/*.o': matches 'build/lib.o' but not 'build/sub/lib.o' 16 | # 'build//*.o': matches 'build/lib.o' and 'build/sub/lib.o' 17 | prefs['ignored_resources'] = ['*.pyc', '*~', '.ropeproject', 18 | '.hg', '.svn', '_svn', '.git', '.tox'] 19 | 20 | # Specifies which files should be considered python files. It is 21 | # useful when you have scripts inside your project. Only files 22 | # ending with ``.py`` are considered to be python files by 23 | # default. 24 | #prefs['python_files'] = ['*.py'] 25 | 26 | # Custom source folders: By default rope searches the project 27 | # for finding source folders (folders that should be searched 28 | # for finding modules). You can add paths to that list. Note 29 | # that rope guesses project source folders correctly most of the 30 | # time; use this if you have any problems. 31 | # The folders should be relative to project root and use '/' for 32 | # separating folders regardless of the platform rope is running on. 33 | # 'src/my_source_folder' for instance. 34 | #prefs.add('source_folders', 'src') 35 | 36 | # You can extend python path for looking up modules 37 | #prefs.add('python_path', '~/python/') 38 | 39 | # Should rope save object information or not. 40 | prefs['save_objectdb'] = True 41 | prefs['compress_objectdb'] = False 42 | 43 | # If `True`, rope analyzes each module when it is being saved. 44 | prefs['automatic_soa'] = True 45 | # The depth of calls to follow in static object analysis 46 | prefs['soa_followed_calls'] = 0 47 | 48 | # If `False` when running modules or unit tests "dynamic object 49 | # analysis" is turned off. This makes them much faster. 50 | prefs['perform_doa'] = True 51 | 52 | # Rope can check the validity of its object DB when running. 53 | prefs['validate_objectdb'] = True 54 | 55 | # How many undos to hold? 56 | prefs['max_history_items'] = 32 57 | 58 | # Shows whether to save history across sessions. 59 | prefs['save_history'] = True 60 | prefs['compress_history'] = False 61 | 62 | # Set the number spaces used for indenting. According to 63 | # :PEP:`8`, it is best to use 4 spaces. Since most of rope's 64 | # unit-tests use 4 spaces it is more reliable, too. 65 | prefs['indent_size'] = 4 66 | 67 | # Builtin and c-extension modules that are allowed to be imported 68 | # and inspected by rope. 69 | prefs['extension_modules'] = [] 70 | 71 | # Add all standard c-extensions to extension_modules list. 72 | prefs['import_dynload_stdmods'] = True 73 | 74 | # If `True` modules with syntax errors are considered to be empty. 75 | # The default value is `False`; When `False` syntax errors raise 76 | # `rope.base.exceptions.ModuleSyntaxError` exception. 77 | prefs['ignore_syntax_errors'] = False 78 | 79 | # If `True`, rope ignores unresolvable imports. Otherwise, they 80 | # appear in the importing namespace. 81 | prefs['ignore_bad_imports'] = False 82 | 83 | # If `True`, rope will insert new module imports as 84 | # `from import ` by default. 85 | prefs['prefer_module_from_imports'] = False 86 | 87 | # If `True`, rope will transform a comma list of imports into 88 | # multiple separate import statements when organizing 89 | # imports. 90 | prefs['split_imports'] = False 91 | 92 | # If `True`, rope will remove all top-level import statements and 93 | # reinsert them at the top of the module when making changes. 94 | prefs['pull_imports_to_top'] = True 95 | 96 | # If `True`, rope will sort imports alphabetically by module name instead of 97 | # alphabetically by import statement, with from imports after normal 98 | # imports. 99 | prefs['sort_imports_alphabetically'] = False 100 | 101 | # Location of implementation of rope.base.oi.type_hinting.interfaces.ITypeHintingFactory 102 | # In general case, you don't have to change this value, unless you're an rope expert. 103 | # Change this value to inject you own implementations of interfaces 104 | # listed in module rope.base.oi.type_hinting.providers.interfaces 105 | # For example, you can add you own providers for Django Models, or disable the search 106 | # type-hinting in a class hierarchy, etc. 107 | prefs['type_hinting_factory'] = 'rope.base.oi.type_hinting.factory.default_type_hinting_factory' 108 | 109 | 110 | def project_opened(project): 111 | """This function is called after opening the project""" 112 | # Do whatever you like here! 113 | -------------------------------------------------------------------------------- /lib/libCorrParse.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import string 4 | from lib.libCorr import Corr 5 | 6 | class CorrParser: 7 | 8 | def __init_(self): 9 | pass 10 | 11 | def corrTest(self,rule,log): 12 | corr = Corr() 13 | for i in rule['rule']: 14 | for b in i.keys(): 15 | if b == 'NIN': 16 | try: 17 | if not corr.noin(log[i['NIN'][0]],i['NIN'][0]): 18 | return True 19 | except KeyError: 20 | return False 21 | if b == 'IN': 22 | try: 23 | if not corr.doin(log[i['IN'][0]],i['IN'][0]): 24 | return True 25 | except KeyError: 26 | return False 27 | if b == 'NLIST': 28 | try: 29 | if not corr.check_not_list(i['ALIST'][1],log[i['ALIST'][0]]): 30 | return True 31 | except KeyError: 32 | return False 33 | if b == 'ILIST': 34 | try: 35 | if not corr.check_a_list(i['ALIST'][1],log[i['ALIST'][0]]): 36 | return True 37 | except KeyError: 38 | return False 39 | if b == 'ALIST': 40 | try: 41 | if corr.add_to_list(i['ALIST'][1],log[i['ALIST'][0]]): 42 | return True 43 | except KeyError: 44 | return False 45 | if b == "NRANGE": 46 | try: 47 | if not corr.nrange(log[i['NRANGE'][0]],i['NRANGE'][1],i['NRANGE'][2]): 48 | return False 49 | except KeyError: 50 | return False 51 | if b == "IRANGE": 52 | try: 53 | if not corr.irange(log[i['IRANGE'][0]],i['IRANGE'][1],i['IRANGE'][2]): 54 | return False 55 | except KeyError: 56 | return False 57 | if b == "IPCIDR": 58 | try: 59 | if not corr.ipcidr(log[i['IPCIDR'][0]],i['IPCIDR'][1]): 60 | return False 61 | except KeyError: 62 | return False 63 | if b == "NIPRANGE": 64 | try: 65 | if not corr.niprange(log[i['NIPRANGE'][0]],i['NIPRANGE'][1],i['NIPRANGE'][2]): 66 | return False 67 | except KeyError: 68 | return False 69 | if b == "IPRANGE": 70 | try: 71 | if not corr.iprange(log[i['IPRANGE'][0]],i['IPRANGE'][1],i['IPRANGE'][2]): 72 | return False 73 | except KeyError: 74 | return False 75 | if b == "NREX": 76 | try: 77 | if not corr.nreq(log[i['NREX'][0]],i['NREX'][1]): 78 | return False 79 | except KeyError: 80 | return False 81 | if b == "IREX": 82 | try: 83 | if not corr.ireq(log[i['IREX'][0]],i['IREX'][1]): 84 | return False 85 | except KeyError: 86 | return False 87 | if b == "REX": 88 | try: 89 | if not corr.req(log[i['REX'][0]],i['REX'][1]): 90 | return False 91 | except KeyError: 92 | return False 93 | if b == "LE": 94 | try: 95 | if not corr.le(log[i['LE'][0]],i['LE'][1]): 96 | return False 97 | except KeyError: 98 | return False 99 | if b == "LT": 100 | try: 101 | if not corr.lt(log[i['LT'][0]],i['LT'][1]): 102 | return False 103 | except KeyError: 104 | return False 105 | if b == "GE": 106 | try: 107 | if not corr.ge(log[i['GE'][0]],i['GE'][1]): 108 | return False 109 | except KeyError: 110 | return False 111 | if b == "GT": 112 | try: 113 | if not corr.gt(log[i['GT'][0]],i['GT'][1]): 114 | return False 115 | except KeyError: 116 | return False 117 | if b == "NEQ": 118 | try: 119 | if not corr.neq(log[i['NEQ'][0]],i['NEQ'][1]): 120 | return False 121 | except KeyError: 122 | return False 123 | if b == "EQ": 124 | try: 125 | if not corr.eq(log[i['EQ'][0]],i['EQ'][1]): 126 | return False 127 | except KeyError: 128 | return False 129 | return True -------------------------------------------------------------------------------- /lib/libCorr.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import string 4 | import re 5 | import time 6 | import struct 7 | import socket 8 | import hashlib 9 | from netaddr import IPAddress, IPNetwork 10 | from lib.redissettings import rServer 11 | 12 | class Corr(): 13 | 14 | def __init__(self): 15 | self.test = True 16 | self.rServer = rServer() 17 | 18 | def __doHash(self,args): 19 | hsh = ''.join('{}'.format(val.replace(" ","")) for val in args) 20 | hsh = hashlib.sha1(hsh).hexdigest() 21 | return hsh 22 | 23 | def _ip2int(self,addr): 24 | return struct.unpack("!I", socket.inet_aton(addr))[0] 25 | 26 | def __setTest(self,test): 27 | if test == True: 28 | self.test = True 29 | else: 30 | self.test = False 31 | 32 | def neq(self,a,b): 33 | if a != b: 34 | return True 35 | else: 36 | return False 37 | 38 | def eq(self,a,b): 39 | if a == b: 40 | return True 41 | else: 42 | return False 43 | 44 | def gt(self,a,b): 45 | if a > b: 46 | return True 47 | else: 48 | return False 49 | 50 | def ge(self,a,b): 51 | if a >= b: 52 | return True 53 | else: 54 | return False 55 | 56 | def lt(self,a,b): 57 | if a < b: 58 | return True 59 | else: 60 | return False 61 | 62 | def le(self,a,b): 63 | if a <= b: 64 | return True 65 | else: 66 | return False 67 | 68 | def req(self,a,b): 69 | try: 70 | r = re.compile(b) 71 | if r.search(a): 72 | return True 73 | except re.error: 74 | return False 75 | 76 | def nreq(self,a,b): 77 | try: 78 | r = re.compile(b) 79 | if r.search(a): 80 | return False 81 | except re.error: 82 | return True 83 | 84 | def ireq(self,a,b): 85 | try: 86 | r = re.compile(b,re.IGNORECASE) 87 | if r.search(a): 88 | return True 89 | except re.error: 90 | return False 91 | 92 | def irange(self,d,s,e): 93 | if d in range(s,e): 94 | return True 95 | else: 96 | return False 97 | 98 | def nrange(self,d,s,e): 99 | if d in range(s,e): 100 | return False 101 | else: 102 | return True 103 | 104 | def doin(self,a,b): 105 | if a in b: 106 | return True 107 | else: 108 | return False 109 | def noin(self,a,b): 110 | if a in b: 111 | return False 112 | else: 113 | return True 114 | 115 | def ipcidr(self,ip,ips): 116 | if IPAddress(ip) in IPNetwork(ips): 117 | return True 118 | else: 119 | return False 120 | 121 | def iprange(self,ip,ips,ipe): 122 | t = self._ip2int(ip) 123 | s = self._ip2int(ips) 124 | e = self._ip2int(ipe) 125 | if int(t) >= int(s) and int(t) <= int(e): 126 | return True 127 | else: 128 | return False 129 | 130 | def niprange(self,ip,ips,ipe): 131 | t = self._ip2int(ip) 132 | s = self._ip2int(ips) 133 | e = self._ip2int(ipe) 134 | if int(t) >= int(s) and int(t) <= int(e): 135 | return False 136 | else: 137 | return True 138 | 139 | def add_to_list(self,w,a): 140 | try: 141 | c = self.check_a_list(w,a) 142 | if c: 143 | return True 144 | else: 145 | self.rServer.setList(w,a) 146 | return True 147 | except: 148 | return False 149 | 150 | def check_a_list(self,w,a): 151 | try: 152 | c = self.rServer.getList(w) 153 | if a.encode('UTF-8') in c: 154 | return True 155 | else: 156 | return False 157 | except: 158 | return False 159 | 160 | def check_not_list(self,w,a): 161 | try: 162 | c = self.rServer.getList(w) 163 | if a.encode('UTF-8') in c: 164 | return False 165 | else: 166 | return True 167 | except: 168 | return True 169 | 170 | def __checkcounter(self,hsh,count,limit,lent): 171 | if count >= lent: 172 | self.rServer.incr(hsh) 173 | return False 174 | elif count <= lent: 175 | self.rServer.setKey(hsh,limit=count,t=limit) 176 | return True 177 | else: 178 | return False 179 | 180 | def __dcheckcounter(self,hsh,d): 181 | c = self.rServer.getList(hsh) 182 | if not c: 183 | self.rServer.setList(hsh,d) 184 | return False 185 | if d in c: 186 | return False 187 | else: 188 | self.rServer.delete(d) 189 | return True 190 | 191 | def __docounter(self,k,count,limit): 192 | c = self.rServer.getKey(k) 193 | if c == None: 194 | self.rServer.setKey(k,limit=limit,t=0) 195 | return False 196 | f = self.__checkcounter(k,count,limit,int(c)) 197 | if f == True: 198 | self.rServer.delete(k) 199 | return True 200 | else: 201 | return False 202 | 203 | def __dodcounter(self,d,destinct,count,limit): 204 | hsh = self.__doHash(d) 205 | f = "{}{}".format('DESTINCT',hsh) 206 | g = "{}{}".format('EXPIRE',hsh) 207 | p = self.__docounter(g,count,limit) 208 | b = self.__dcheckcounter(f,destinct) 209 | if p == True and b == True: 210 | return True 211 | else: 212 | return False 213 | 214 | def NEQ(self,a,b): 215 | if self.test == False: 216 | return self 217 | else: 218 | c = self.neq(a,b) 219 | self.__setTest(c) 220 | return self 221 | 222 | def EQ(self,a,b): 223 | if self.test == False: 224 | return self 225 | else: 226 | c = self.eq(a,b) 227 | self.__setTest(c) 228 | return self 229 | #Greater Than 230 | def GT(self,a,b): 231 | if self.test == False: 232 | return self 233 | else: 234 | c =self.gt(a,b) 235 | self.__setTest(c) 236 | return self 237 | 238 | #Greater Than Equal To 239 | def GE(self,a,b): 240 | if self.test == False: 241 | return self 242 | else: 243 | c = self.ge(a,b) 244 | self.__setTest(c) 245 | return self 246 | 247 | #Less Than 248 | def LT(self,a,b): 249 | if self.test == False: 250 | return self 251 | else: 252 | c = self.lt(a,b) 253 | self.__setTest(c) 254 | return self 255 | 256 | #Less Than Equal To 257 | def LE(self,a,b): 258 | if self.test == False: 259 | return self 260 | else: 261 | c = self.le(a,b) 262 | self.__setTest(c) 263 | return self 264 | 265 | #Reg Match Pattern 266 | def Reg(self,a,b): 267 | if self.test == False: 268 | return self 269 | else: 270 | c = self.req(a,b) 271 | self.__setTest(c) 272 | return self 273 | 274 | #No Reg Match 275 | def NReg(self,a,b): 276 | if self.test == False: 277 | return self 278 | else: 279 | c = self.nreq(a,b) 280 | self.__setTest(c) 281 | return self 282 | 283 | #Ignore Case Match Pattern 284 | def IReg(self,a,b): 285 | if self.test == False: 286 | return self 287 | else: 288 | c = self.ireq(a,b) 289 | self.__setTest(c) 290 | return self 291 | 292 | #Test in string or in list 293 | def IN(self,a,b): 294 | if self.test == False: 295 | return self 296 | else: 297 | c = self.doin(a,b) 298 | self.__setTest(c) 299 | return self 300 | 301 | #IP in Network Range 302 | def IPinCIDR(self,ip,ips): 303 | if self.test == False: 304 | return self 305 | else: 306 | c = self.ipcidr(ip,ips) 307 | self.__setTest(c) 308 | return self 309 | 310 | def Counter(self,key,count=5,limit=300): 311 | if self.test == False: 312 | return self 313 | else: 314 | c = self.__docounter(self.__doHash(key),count,limit) 315 | self.__setTest(c) 316 | return self 317 | 318 | def DCounter(self,key,destinct="",count=5,limit=300): 319 | if self.test == False: 320 | return self 321 | else: 322 | c = self.__dodcounter(key,destinct,count,limit) 323 | self.__setTest(c) 324 | return self 325 | 326 | def AddtoList(self,w,d): 327 | if self.test == False: 328 | return self 329 | self.add_to_list(w,d) 330 | return self 331 | 332 | def CheckInList(self,w,d): 333 | if self.test == False: 334 | return self 335 | else: 336 | c = self.check_a_list(w,d) 337 | self.__setTest(c) 338 | return self 339 | 340 | def CheckNotInList(self,w,d): 341 | if self.test == False: 342 | return self 343 | else: 344 | c = self.check_not_list(w,d) 345 | self.__setTest(c) 346 | return self 347 | 348 | def OR(self,args): 349 | if self.test == False: 350 | return self 351 | for i in args: 352 | if i == True: 353 | self.test = True 354 | return self 355 | self.test = False 356 | return self 357 | 358 | if __name__ == '__main__': 359 | b = Corr() 360 | b.EQ(1,1).OR([b.eq(3,1),b.gt(1,2)]).EQ(1,1) 361 | print(b.test) 362 | --------------------------------------------------------------------------------