├── .gitignore
├── README.md
├── analyzer
├── config
│ ├── boot.py
│ └── settings.py
└── core
│ ├── server.py
│ ├── shivaaddnewrecord.py
│ ├── shivaconclude.py
│ ├── shivadbconfig.py
│ ├── shivamailparser.py
│ ├── shivamaindb.py
│ ├── shivanotifyerrors.py
│ ├── shivaprocessold.py
│ ├── shivapushtodb.py
│ ├── shivascheduler.py
│ └── trishula
│ ├── __init__.py
│ ├── backend_operations.py
│ ├── domaininfo.py
│ ├── google_safe_api.py
│ ├── learning.py
│ ├── phishing.py
│ ├── phishing_test.py
│ ├── report.py
│ ├── statistics.py
│ └── web.py
├── docs
├── AUTHORS
├── LICENSE
└── User Manual.pdf
├── helpers
├── clearlogs.sh
├── dbcreate.py
├── honeypot.service
├── honeypot.sh
├── maindb.sql
├── restart_receiver.sh
├── setup_exim4.sh
├── shiva.conf
├── tempdb.sql
└── update_shiva_packages.sh
├── hpfeeds
├── hpfeeds.py
├── parser
│ ├── README.md
│ ├── corelate.py
│ ├── dbcreate.py
│ ├── hpfeeds-db.sql
│ └── subscribe.py
└── sendfiles.py
├── install.sh
├── receiver
├── config
│ ├── boot.py
│ └── settings.py
├── core
│ ├── encoding.py
│ ├── queue.py
│ ├── server.py
│ └── smtpd.py
└── handlers
│ ├── forward.py
│ ├── log.py
│ ├── queue.py
│ └── spampot.py
├── trishula.txt
└── web
├── css
└── style.css
├── favicon.png
├── images
└── icons
│ ├── delete.png
│ ├── icon_minus.png
│ ├── icon_plus.png
│ ├── small_change_none.png
│ ├── small_change_to_phishing.png
│ ├── small_change_to_spam.png
│ └── small_marked_by_user.png
├── js
└── requests.js
└── templates
├── email_not_found.html
├── help.html
├── index.html
├── learning.html
├── list_emails.html
├── logs.html
├── parts
├── footer.html
├── header.html
├── learning_overview.html
└── mail_overview.html
└── view_email.html
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[cod]
2 |
3 | # C extensions
4 | *.so
5 |
6 | # Packages
7 | *.egg
8 | *.egg-info
9 | dist
10 | build
11 | eggs
12 | parts
13 | bin
14 | var
15 | sdist
16 | develop-eggs
17 | .installed.cfg
18 | lib
19 | lib64
20 | __pycache__
21 |
22 | # Installer logs
23 | pip-log.txt
24 |
25 | # Swap files
26 | *~
27 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Trishula
2 | -----------
3 | This project is fork of original SHIVA honeypot. It contains module **Trishula** with built-in phishing detection mechanism. For information and installation details see file 'trishula.txt'.
4 |
5 |
6 | About SHIVA
7 | -----------
8 | SHIVA: Spam Honeypot with Intelligent Virtual Analyzer, is an open but controlled relay Spam Honeypot (SpamPot), built on top of Lamson Python framework, with capability of collecting and analyzing all spam thrown at it. Analysis of data captured can be used to get information of phishing attacks, scamming campaigns, malware campaigns, spam botnets, etc. SHIVA is written in Python and currently uses MySQL as its back-end.
9 | SHIVA is released under GNU GPL v3.
10 |
11 |
12 | Mailing List
13 | ------------
14 | Subscribe to SHIVA mailing list - https://public.honeynet.org/mailman/listinfo/shiva.
15 | Mail your queries and suggestions to the mailing list at shiva@public.honeynet.org
16 |
17 |
18 | General SHIVA Documentation
19 | -------------
20 | User manual can be found in the "docs" folder. The manual is divided into various sections
21 | * Introduction
22 | * Setting Up SHIVA
23 | * Configuration
24 | * Running SHIVA
25 | * General Problems and Precautions
26 | * FAQs
27 | * References
28 |
29 | Authors' and license information can also be found in the same folder.
30 |
31 |
32 |
--------------------------------------------------------------------------------
/analyzer/config/boot.py:
--------------------------------------------------------------------------------
1 | from config import settings
2 | from lamson.routing import Router
3 | from lamson.server import Relay, SMTPReceiver, QueueReceiver
4 | from lamson import view, queue
5 | import logging
6 | import logging.config
7 | import jinja2
8 |
9 | logging.config.fileConfig("config/logging.conf")
10 |
11 | # Relay host to actually send the final message to.
12 | settings.relay = Relay(host=settings.relay_config['host'],
13 | port=settings.relay_config['port'], debug=0)
14 |
15 | # Include the maildir option we've set in settings.py
16 | settings.receiver = QueueReceiver(settings.receiver_config['maildir'])
17 |
18 | Router.defaults(**settings.router_defaults)
19 | Router.load(settings.handlers)
20 | Router.RELOAD=True
21 | Router.UNDELIVERABLE_QUEUE=queue.Queue("run/undeliverable")
22 |
23 | view.LOADER = jinja2.Environment(
24 | loader=jinja2.PackageLoader(settings.template_config['dir'],
25 | settings.template_config['module']))
26 |
27 |
--------------------------------------------------------------------------------
/analyzer/config/settings.py:
--------------------------------------------------------------------------------
1 | # This file contains python variables that configure Lamson for email processing.
2 | import logging
3 | import ConfigParser
4 | import os
5 |
6 |
7 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../shiva.conf"
8 | config = ConfigParser.ConfigParser()
9 | config.read(confpath)
10 |
11 | queuePath = config.get('global', 'queuepath')
12 | relayhost = config.get('analyzer', 'relayhost')
13 | relayport = config.getint('analyzer', 'relayport')
14 |
15 | relay_config = {'host': relayhost, 'port': relayport}
16 | receiver_config = {'maildir': queuePath}
17 | handlers = ['app.handlers.sample']
18 | router_defaults = {'host': '.+'}
19 | template_config = {'dir': 'app', 'module': 'templates'}
--------------------------------------------------------------------------------
/analyzer/core/server.py:
--------------------------------------------------------------------------------
1 | """
2 | The majority of the server related things Lamson needs to run, like receivers,
3 | relays, and queue processors.
4 |
5 | SHIVA - QueueReceiver module has been amended to
6 | * Read to configuration file
7 | * Start the scheduler
8 | * Initializing the relay counter and spam list
9 | * And, making call to our customised module shivamailparser
10 | """
11 |
12 | import smtplib
13 | import smtpd
14 | import asyncore
15 | import threading
16 | import socket
17 | import logging
18 | import queue
19 | import mail
20 | import routing
21 | import time
22 | import traceback
23 | import ConfigParser
24 | import os
25 |
26 | from lamson.bounce import PRIMARY_STATUS_CODES, SECONDARY_STATUS_CODES, COMBINED_STATUS_CODES
27 |
28 | import shivascheduler
29 | import shivamailparser
30 | import shivadbconfig
31 | import trishula.learning as learning
32 | import trishula.web as web
33 | import MySQLdb as mdb
34 |
35 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../../../shiva.conf"
36 | shivaconf = ConfigParser.ConfigParser()
37 | shivaconf.read(confpath)
38 |
39 | # Global dictionary to store whitelist email ids
40 | whitelist_ids = {'spammers_email':[]}
41 |
42 |
43 | def undeliverable_message(raw_message, failure_type):
44 | """
45 | Used universally in this file to shove totally screwed messages
46 | into the routing.Router.UNDELIVERABLE_QUEUE (if it's set).
47 | """
48 | if routing.Router.UNDELIVERABLE_QUEUE:
49 | key = routing.Router.UNDELIVERABLE_QUEUE.push(raw_message)
50 |
51 | logging.error("Failed to deliver message because of %r, put it in "
52 | "undeliverable queue with key %r", failure_type, key)
53 |
54 | class SMTPError(Exception):
55 | """
56 | You can raise this error when you want to abort with a SMTP error code to
57 | the client. This is really only relevant when you're using the
58 | SMTPReceiver and the client understands the error.
59 |
60 | If you give a message than it'll use that, but it'll also produce a
61 | consistent error message based on your code. It uses the errors in
62 | lamson.bounce to produce them.
63 | """
64 | def __init__(self, code, message=None):
65 | self.code = code
66 | self.message = message or self.error_for_code(code)
67 |
68 | Exception.__init__(self, "%d %s" % (self.code, self.message))
69 |
70 | def error_for_code(self, code):
71 | primary, secondary, tertiary = str(code)
72 |
73 | primary = PRIMARY_STATUS_CODES.get(primary, "")
74 | secondary = SECONDARY_STATUS_CODES.get(secondary, "")
75 | combined = COMBINED_STATUS_CODES.get(primary + secondary, "")
76 |
77 | return " ".join([primary, secondary, combined]).strip()
78 |
79 |
80 | class Relay(object):
81 | """
82 | Used to talk to your "relay server" or smart host, this is probably the most
83 | important class in the handlers next to the lamson.routing.Router.
84 | It supports a few simple operations for sending mail, replying, and can
85 | log the protocol it uses to stderr if you set debug=1 on __init__.
86 | """
87 | def __init__(self, host='127.0.0.1', port=25, username=None, password=None,
88 | ssl=False, starttls=False, debug=0):
89 | """
90 | The hostname and port we're connecting to, and the debug level (default to 0).
91 | Optional username and password for smtp authentication.
92 | If ssl is True smtplib.SMTP_SSL will be used.
93 | If starttls is True (and ssl False), smtp connection will be put in TLS mode.
94 | It does the hard work of delivering messages to the relay host.
95 | """
96 | self.hostname = host
97 | self.port = port
98 | self.debug = debug
99 | self.username = username
100 | self.password = password
101 | self.ssl = ssl
102 | self.starttls = starttls
103 |
104 | def configure_relay(self, hostname):
105 | if self.ssl:
106 | relay_host = smtplib.SMTP_SSL(hostname, self.port)
107 | else:
108 | relay_host = smtplib.SMTP(hostname, self.port)
109 |
110 | relay_host.set_debuglevel(self.debug)
111 |
112 | if self.starttls:
113 | relay_host.starttls()
114 | if self.username and self.password:
115 | relay_host.login(self.username, self.password)
116 |
117 | assert relay_host, 'Code error, tell Zed.'
118 | return relay_host
119 |
120 | def deliver(self, message, To=None, From=None):
121 | """
122 | Takes a fully formed email message and delivers it to the
123 | configured relay server.
124 |
125 | You can pass in an alternate To and From, which will be used in the
126 | SMTP send lines rather than what's in the message.
127 | """
128 | recipient = To or message['To']
129 | sender = From or message['From']
130 |
131 | hostname = self.hostname or self.resolve_relay_host(recipient)
132 |
133 | try:
134 | relay_host = self.configure_relay(hostname)
135 | except socket.error:
136 | logging.exception("Failed to connect to host %s:%d" % (hostname, self.port))
137 | return
138 |
139 | relay_host.sendmail(sender, recipient, str(message))
140 | #relay_host.sendmail(sender, recipient.split(","), str(message)) # Shiva - sendmail needs 'list' of recipients not strings. Fixed in lamson now.
141 | relay_host.quit()
142 |
143 | def resolve_relay_host(self, To):
144 | import DNS
145 | address, target_host = To.split('@')
146 | mx_hosts = DNS.mxlookup(target_host)
147 |
148 | if not mx_hosts:
149 | logging.debug("Domain %r does not have an MX record, using %r instead.", target_host, target_host)
150 | return target_host
151 | else:
152 | logging.debug("Delivering to MX record %r for target %r", mx_hosts[0], target_host)
153 | return mx_hosts[0][1]
154 |
155 |
156 | def __repr__(self):
157 | """Used in logging and debugging to indicate where this relay goes."""
158 | return "" % (self.hostname, self.port)
159 |
160 |
161 | def reply(self, original, From, Subject, Body):
162 | """Calls self.send but with the from and to of the original message reversed."""
163 | self.send(original['from'], From=From, Subject=Subject, Body=Body)
164 |
165 | def send(self, To, From, Subject, Body):
166 | """
167 | Does what it says, sends an email. If you need something more complex
168 | then look at lamson.mail.MailResponse.
169 | """
170 | msg = mail.MailResponse(To=To, From=From, Subject=Subject, Body=Body)
171 | self.deliver(msg)
172 |
173 |
174 |
175 | class SMTPReceiver(smtpd.SMTPServer):
176 | """Receives emails and hands it to the Router for further processing."""
177 |
178 | def __init__(self, host='127.0.0.1', port=8825):
179 | """
180 | Initializes to bind on the given port and host/ipaddress. Typically
181 | in deployment you'd give 0.0.0.0 for "all internet devices" but consult
182 | your operating system.
183 |
184 | This uses smtpd.SMTPServer in the __init__, which means that you have to
185 | call this far after you use python-daemonize or else daemonize will
186 | close the socket.
187 | """
188 | self.host = host
189 | self.port = port
190 | smtpd.SMTPServer.__init__(self, (self.host, self.port), None)
191 |
192 | def start(self):
193 | """
194 | Kicks everything into gear and starts listening on the port. This
195 | fires off threads and waits until they are done.
196 | """
197 | logging.info("SMTPReceiver started on %s:%d." % (self.host, self.port))
198 | self.poller = threading.Thread(target=asyncore.loop,
199 | kwargs={'timeout':0.1, 'use_poll':True})
200 | self.poller.start()
201 |
202 | def process_message(self, Peer, From, To, Data):
203 | """
204 | Called by smtpd.SMTPServer when there's a message received.
205 | """
206 |
207 | try:
208 | logging.debug("Message received from Peer: %r, From: %r, to To %r." % (Peer, From, To))
209 | routing.Router.deliver(mail.MailRequest(Peer, From, To, Data))
210 | except SMTPError, err:
211 | # looks like they want to return an error, so send it out
212 | return str(err)
213 | undeliverable_message(Data, "Handler raised SMTPError on purpose: %s" % err)
214 | except:
215 | logging.exception("Exception while processing message from Peer: %r, From: %r, to To %r." %
216 | (Peer, From, To))
217 | undeliverable_message(Data, "Error in message %r:%r:%r, look in logs." % (Peer, From, To))
218 |
219 |
220 | def close(self):
221 | """Doesn't do anything except log who called this, since nobody should. Ever."""
222 | logging.error(traceback.format_exc())
223 |
224 |
225 | class QueueReceiver(object):
226 | """
227 | Rather than listen on a socket this will watch a queue directory and
228 | process messages it recieves from that. It works in almost the exact
229 | same way otherwise.
230 | """
231 |
232 | records = [] # Global list that will hold spam records till they're pushed to db.
233 | deep_records = [] # Copy of records
234 | totalRelay = 0 # Global relay counter
235 |
236 |
237 | def __init__(self, queue_dir, sleep=10, size_limit=0, oversize_dir=None):
238 | """
239 | The router should be fully configured and ready to work, the
240 | queue_dir can be a fully qualified path or relative.
241 | """
242 | self.queue = queue.Queue(queue_dir, pop_limit=size_limit,
243 | oversize_dir=oversize_dir)
244 | self.queue_dir = queue_dir
245 | self.sleep = sleep
246 |
247 | def start(self, one_shot=False):
248 | """
249 | Start simply loops indefinitely sleeping and pulling messages
250 | off for processing when they are available.
251 |
252 | If you give one_shot=True it will run once rather than do a big
253 | while loop with a sleep.
254 | """
255 |
256 | """setup web interface and api"""
257 | if not one_shot:
258 | web.main()
259 |
260 | """ remove possible lock file from previous learning """
261 | learning.free_learning_lock()
262 |
263 | logging.info("Queue receiver started on queue dir %s" %
264 | (self.queue_dir))
265 | logging.debug("Sleeping for %d seconds..." % self.sleep)
266 |
267 |
268 |
269 | shivascheduler.schedule()
270 | inq = queue.Queue(self.queue_dir)
271 |
272 | # Get email-id's of spammers. Mail must get relayed to them.
273 | mainDb = shivadbconfig.dbconnectmain()
274 | whitelist = "SELECT `recipients` from `whitelist`"
275 |
276 | try:
277 | mainDb.execute(whitelist)
278 | record = mainDb.fetchone()
279 |
280 | global whitelist_ids
281 |
282 |
283 | if ((record is None) or (record[0] is None)):
284 | whitelist_ids['spammers_email'] = []
285 | else:
286 | whitelist_ids['spammers_email'] = (record[0].encode('utf-8')).split(",")[-100:]
287 | whitelist_ids['spammers_email'] = list(set(whitelist_ids['spammers_email']))
288 |
289 | logging.info("[+] server Module: whitelist recipients:")
290 | for key, value in whitelist_ids.items():
291 | logging.info("key: %s, value: %s" % (key, value))
292 | mainDb.close()
293 | except mdb.Error, e:
294 | logging.critical("[-] Error (Module server.py) - some issue obtaining whitelist: %s" % e)
295 |
296 |
297 | while True:
298 | keys = inq.keys()
299 | for key in keys:
300 | msg = inq.get(key)
301 |
302 | if msg:
303 | logging.debug("Pulled message with key: %r off", key)
304 |
305 | # Shiva - Interupting normal flow execution of QueueReceiver here and calling our
306 | # customized module shivamailparser to deal with mails retrieved from the queue.
307 | # Send "key", which is actually the name of spam file in queue,
308 | # msg", is actually complete mail body, is in MailRequest format
309 |
310 | shivamailparser.main(key, msg)
311 |
312 | # Irrespective of mail relayed or not, it has to be cleared from queue.
313 | # Hence, whether process_message executes or not,
314 | # the control comes back to this point and ultimately spam gets removed from queue
315 | logging.debug("Removed %r key from queue.\n\n", key)
316 |
317 | inq.remove(key)
318 |
319 | if one_shot:
320 | return
321 | else:
322 | time.sleep(self.sleep)
323 |
324 | # Function gets called only when a spam has to be relayed
325 | def process_message(self, msg):
326 | """
327 | Exactly the same as SMTPReceiver.process_message but just designed for the queue's
328 | quirks.
329 | """
330 | #self.msg = self.start.msg
331 |
332 | try:
333 | Peer = self.queue_dir
334 | From = msg['from']
335 | To = [msg['to']]
336 |
337 | logging.debug("Message received from Peer: %r, From: %r, to To %r." % (Peer, From, To))
338 | routing.Router.deliver(msg)
339 | except SMTPError, err:
340 | # looks like they want to return an error, so send it out
341 | logging.exception("Raising SMTPError when running in a QueueReceiver is unsupported.")
342 | undeliverable_message(msg.original, err.message)
343 | except:
344 | logging.exception("Exception while processing message from Peer: "
345 | "%r, From: %r, to To %r." % (Peer, From, To))
346 | undeliverable_message(msg.original, "Router failed to catch exception.")
347 |
--------------------------------------------------------------------------------
/analyzer/core/shivaaddnewrecord.py:
--------------------------------------------------------------------------------
1 | """This module inserts spam's details into a temporary list. This gets called
2 | everytime our analyzer come across a new/distinct spam. First, all the parser
3 | fields are stored as a dictionary and then, that dictionary is appended into
4 | the list.
5 | """
6 |
7 | import logging
8 | import server
9 | import shutil
10 | import re
11 |
12 | from trishula.learning import check_mail
13 |
14 | def main(mailFields, key, msgMailRequest):
15 | """Main function.
16 | Stores the parsed fields as dictionary and then appends it to our
17 | temporary list.
18 | """
19 | logging.info("Inside shivaaddnewrecord Module.")
20 |
21 | rawspampath = server.shivaconf.get('analyzer', 'rawspampath')
22 | queuepath = server.shivaconf.get('global', 'queuepath')
23 | relay_enabled = server.shivaconf.getboolean('analyzer', 'relay')
24 |
25 | records = server.QueueReceiver.records
26 | source = queuepath + "/new/" + key
27 | filename = mailFields['s_id'] + "-" + key
28 |
29 | probability_tuple = (0,0)
30 | url_phishing = False
31 | phish_flag = None
32 | phishing_human_check = None
33 |
34 | # check whether email is imported manually
35 | sensor = mailFields['sensorID']
36 | if not sensor:
37 | sensor = 'default'
38 |
39 |
40 | if re.match('.*phishingImport.*',sensor):
41 | probability_tuple = (-1,-1)
42 | phish_flag = True
43 | phishing_human_check = True
44 | elif re.match('.*spamImport.*',sensor):
45 | probability_tuple = (-1,-1)
46 | phish_flag = False
47 | phishing_human_check = False
48 | else:
49 | # email is not manually imported, compute score
50 | email_verdict = check_mail(mailFields)
51 | probability_tuple = (email_verdict['shiva_prob'],email_verdict['sa_prob'])
52 | url_phishing = email_verdict['urlPhishing']
53 | phish_flag = email_verdict['verdict']
54 |
55 | if phish_flag:
56 | destination = rawspampath + "phishing/" + filename
57 | else:
58 | destination = rawspampath + "spam/" + filename
59 |
60 | shutil.copy2(source, destination) # shutil.copy2() copies the meta-data too
61 |
62 | newRecord = { 'headers':mailFields['headers'],
63 | 'to':mailFields['to'],
64 | 'from':mailFields['from'],
65 | 'subject':mailFields['subject'],
66 | 'date':mailFields['date'],
67 | 'firstSeen':mailFields['firstSeen'],
68 | 'lastSeen':mailFields['lastSeen'],
69 | 'firstRelayed':mailFields['firstRelayed'],
70 | 'lastRelayed':mailFields['lastRelayed'],
71 | 'sourceIP':mailFields['sourceIP'],
72 | 'sensorID':mailFields['sensorID'],
73 | 'text':mailFields['text'],
74 | 'html':mailFields['html'],
75 | 'inlineFileName':mailFields['inlineFileName'],
76 | 'inlineFile':mailFields['inlineFile'],
77 | 'inlineFileMd5':mailFields['inlineFileMd5'],
78 | 'attachmentFileName': mailFields['attachmentFileName'],
79 | 'attachmentFile':mailFields['attachmentFile'],
80 | 'attachmentFileMd5':mailFields['attachmentFileMd5'],
81 | 'links':mailFields['links'],
82 | 'ssdeep':mailFields['ssdeep'],
83 | 's_id':mailFields['s_id'],
84 | 'len':mailFields['len'],
85 | 'phishingHumanCheck': phishing_human_check,
86 | 'derivedPhishingStatus': phish_flag,
87 | 'shivaScore': probability_tuple[0],
88 | 'spamassassinScore': probability_tuple[1],
89 | 'urlPhishing': url_phishing,
90 | 'counter':1,
91 | 'relayed':0 }
92 |
93 | if relay_enabled is True:
94 | relaycounter = server.shivaconf.getint('analyzer', 'globalcounter')
95 |
96 | if (int(server.QueueReceiver.totalRelay) > relaycounter):
97 | logging.info("[+]shivaaddnewrecord Module: Limit reached. No relay.")
98 |
99 | elif next((i for i, sublist in enumerate([myval for myval in server.whitelist_ids.values()]) if mailFields['to'] in sublist), -1) > -1:
100 | logging.info("[+]shivaaddnewrecord Module: Recipient found in white list - relaying")
101 |
102 | # Following 3 lines does the relaying
103 | queuePath = server.shivaconf.get('global', 'queuepath')
104 | processMessage = server.QueueReceiver(queuePath)
105 | processMessage.process_message(msgMailRequest)
106 |
107 | newRecord['relayed'] += 1
108 | server.QueueReceiver.totalRelay += 1
109 | else:
110 | logging.info("[+]shivaaddnewrecord Module: Adding recipient to whitelist and relaying")
111 |
112 | server.whitelist_ids[mailFields['s_id']] = mailFields['to'].split()
113 |
114 | for key, value in server.whitelist_ids.items():
115 | logging.info("key: %s, value: %s" % (key, value))
116 |
117 | # Following 3 lines does the relaying
118 | queuePath = server.shivaconf.get('global', 'queuepath')
119 | processMessage = server.QueueReceiver(queuePath)
120 | processMessage.process_message(msgMailRequest)
121 |
122 | newRecord['relayed'] += 1
123 | server.QueueReceiver.totalRelay += 1
124 |
125 |
126 | records.insert(0, newRecord) #Inserting new record at the first position.
127 | del newRecord
128 |
129 |
--------------------------------------------------------------------------------
/analyzer/core/shivaconclude.py:
--------------------------------------------------------------------------------
1 | """This module decides that whether a spam is new or old. It checks this by
2 | comparing the spam against the records which are already there in temporary
3 | list. It first compares Md5 checksum, if not found, it compares against the
4 | SSDEEP hash. If spam is new, it passes it to shivaaddnewrecord module,
5 | for further processing. If it's an old spam, it passes it to shivaprocessold
6 | module.
7 | """
8 |
9 | import logging
10 |
11 | import server
12 | import ssdeep
13 | import shivaaddnewrecord
14 | import shivaprocessold
15 |
16 | def main(mailFields, key, msgMailRequest):
17 | """Decides if a spam is new or old.
18 | Takes following parameters:
19 | a. mailFields - parsed spam fields,
20 | b. key - spam file name,
21 | c. msgMailRequest - original spam that is to be relayed.
22 |
23 | Passes spam to shivaaddnewrecord module if spam is new or list is empty.
24 | Else, passes spam to shivaprocessold module.
25 | """
26 | logging.info("[+]Inside shivadecide module.")
27 | records = server.QueueReceiver.records
28 |
29 | # Checking if we have any item in our global list.
30 | # If no item: then we will directly push spam details into the list
31 | # Else: Do the processing.
32 | if not records:
33 | shivaaddnewrecord.main(mailFields, key, msgMailRequest)
34 |
35 | else:
36 | if mailFields['text']:
37 | threshold = 75
38 | else:
39 | threshold = 85
40 |
41 | oriLen = int(mailFields['len'])
42 | minLen, maxLen = int(oriLen * 0.90), int(oriLen * 1.10)
43 |
44 | count = 0
45 | for record in records:
46 |
47 | if record['len'] >= minLen and record['len'] <= maxLen:
48 | if mailFields['s_id'] == record['s_id']:
49 | shivaprocessold.main(mailFields, record['s_id'], key, msgMailRequest)
50 | break
51 |
52 | else:
53 | ratio = ssdeep.compare(mailFields['ssdeep'], record['ssdeep'])
54 |
55 | if ratio >= threshold:
56 | shivaprocessold.main(mailFields, record['s_id'], key, msgMailRequest)
57 | break
58 |
59 | count += 1
60 |
61 | if count == len(records):
62 | shivaaddnewrecord.main(mailFields, key, msgMailRequest)
63 |
64 |
65 |
--------------------------------------------------------------------------------
/analyzer/core/shivadbconfig.py:
--------------------------------------------------------------------------------
1 | """This module contains the database connection parameters required by
2 | SHIVA for MySQL connection. This module also has the functions that might
3 | send the error notification mail and copy the spams to "distorted" folder
4 | that cannot be analyzed. To enable error notification, edit the
5 | senderrornotificationmail function.
6 | """
7 |
8 | import os
9 | import logging
10 | import shutil
11 | import smtplib
12 | import ConfigParser
13 |
14 | import MySQLdb as mdb
15 |
16 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../../../shiva.conf"
17 | shivaconf = ConfigParser.ConfigParser()
18 | shivaconf.read(confpath)
19 |
20 | HOST = shivaconf.get('database', 'host')
21 | USER = shivaconf.get('database', 'user')
22 | PASS = shivaconf.get('database', 'password')
23 |
24 |
25 | def dbconnect():
26 | """ Returns MySQL cursor.
27 | Temporary db connection parameters.
28 | """
29 | conn = None
30 |
31 | try:
32 | conn = mdb.connect (host = HOST,
33 | user = USER,
34 | passwd = PASS,
35 | db = "ShivaTemp",
36 | charset='utf8mb4',
37 | use_unicode = True)
38 | conn.autocommit(True)
39 | cursor = conn.cursor()
40 | return cursor
41 |
42 | except mdb.Error, e:
43 | logging.critical("[-] Error (shivadbconfig.py) - %d: %s" % (e.args[0], e.args[1]))
44 |
45 | def dbconnectmain():
46 | """Returns MySQL cursor.
47 | Main db connection parameters.
48 | """
49 | conn1 = None
50 |
51 | try:
52 | conn1 = mdb.connect (host = HOST,
53 | user = USER,
54 | passwd = PASS,
55 | db = "Shiva",
56 | charset='utf8mb4',
57 | use_unicode = True)
58 | conn1.autocommit(True)
59 | cursor = conn1.cursor()
60 | return cursor
61 |
62 | except mdb.Error, e:
63 | logging.critical("[-] Error (shivadbconfig.py) - %d: %s" % (e.args[0], e.args[1]))
64 |
--------------------------------------------------------------------------------
/analyzer/core/shivanotifyerrors.py:
--------------------------------------------------------------------------------
1 | '''
2 | __author__ = b0nd
3 | ver 1.0, 27th Oct, 2013
4 |
5 | Module sends notifications to developer/maintainer
6 |
7 | '''
8 | import shutil
9 | import os
10 | import smtplib
11 | import ConfigParser
12 | import server
13 |
14 |
15 |
16 | ## Error notification in case script confronts any issue
17 | def notifydeveloper(msg):
18 | senderid = server.shivaconf.get('notification', 'sender')
19 | recipient = server.shivaconf.get('notification', 'recipient')
20 | smtphost = server.shivaconf.get('analyzer', 'relayhost')
21 | smtpport = server.shivaconf.get('analyzer', 'relayport')
22 |
23 | message = """From: SHIVA spamp0t
24 | To: Developer
25 | MIME-Version: 1.0
26 | Content-type: text/html
27 | Subject: Master, SHIVA spamp0t confronted an issue
28 | """
29 | message += "Error Message:\n%s" % msg
30 | message += "you shall find sample in distorted directory"
31 |
32 | try:
33 | smtpobj = smtplib.SMTP(smtphost, smtpport)
34 | smtpobj.sendmail(senderid, recipient, message)
35 | print "\n\t[+] Error Notification Mail Sent Successfully"
36 | except smtplib.SMTPException:
37 | print "\n\t[!] Error: unable to send error notification mail via Exim4"
38 |
--------------------------------------------------------------------------------
/analyzer/core/shivaprocessold.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import datetime
3 |
4 | import server
5 |
6 |
7 | def main(mailFields, matchedHash, key, msgMailRequest):
8 | logging.info("[+]Inside shivaprocessold Module.")
9 |
10 |
11 | relay_enabled = server.shivaconf.getboolean('analyzer', 'relay')
12 | records = server.QueueReceiver.records
13 |
14 | for record in records:
15 | if record['s_id'] == matchedHash:
16 |
17 | if mailFields['attachmentFileMd5']:
18 | i = 0
19 | while i < len(mailFields['attachmentFileMd5']):
20 | if mailFields['attachmentFileMd5'][i] not in record['attachmentFileMd5']:
21 | record['attachmentFile'].append(mailFields['attachmentFile'][i])
22 | record['attachmentFileMd5'].append(mailFields['attachmentFileMd5'][i])
23 | record['attachmentFileName'].append(mailFields['attachmentFileName'][i])
24 | i += 1
25 |
26 | if mailFields['links']:
27 | for newLink in mailFields['links']:
28 | if newLink not in record['links']:
29 | record['links'].append(newLink)
30 |
31 | if record['inlineFileMd5'] != mailFields['inlineFileMd5']:
32 | i = 0
33 | while i < len(mailFields['inlineFileMd5']):
34 | if mailFields['inlineFileMd5'][i] not in record['inlineFileMd5']:
35 | record['inlineFile'].append(mailFields['inlineFile'][i])
36 | record['inlineFileMd5'].append(mailFields['inlineFileMd5'][i])
37 | record['inlineFileName'].append(mailFields['inlineFileName'][i])
38 | i += 1
39 |
40 | ipList = record['sourceIP'].split(", ")
41 | if mailFields['sourceIP'] not in ipList:
42 | record['sourceIP'] = record['sourceIP'] + ", " + mailFields['sourceIP']
43 |
44 | sensorIDs = record['sensorID'].split(", ")
45 | if mailFields['sensorID'] not in sensorIDs:
46 | record['sensorID'] = mailFields['sensorID'] + ", " + record['sensorID']
47 |
48 | recipients = record['to'].split(",")
49 | if mailFields['to'] not in recipients:
50 | record['to'] = record['to'] + "," + mailFields['to']
51 |
52 | record['counter'] += 1
53 | logging.info("value of record counter has reached: %s" % record['counter'])
54 |
55 | if relay_enabled is True:
56 | relaycounter = server.shivaconf.getint('analyzer', 'globalcounter')
57 |
58 | if (int(server.QueueReceiver.totalRelay) > relaycounter):
59 | logging.info("[+]shivaprocessold Module: Limit reached. No relay.")
60 | #individualcounter = server.shivaconf.getint('analyzer', 'individualcounter')
61 |
62 | elif next((i for i, sublist in enumerate([myval for myval in server.whitelist_ids.values()]) if mailFields['to'] in sublist), -1) > -1:
63 | logging.info("[+]shivaprocessold Module: Recipient found in white list - relaying")
64 |
65 | # Following 3 lines does the relaying
66 | queuePath = server.shivaconf.get('global', 'queuepath')
67 | processMessage = server.QueueReceiver(queuePath)
68 | processMessage.process_message(msgMailRequest)
69 |
70 | record['relayed'] += 1
71 | server.QueueReceiver.totalRelay += 1
72 | else:
73 | if record['counter'] <= 11:
74 | if record['counter'] == 11:
75 | logging.info("counter is = 11")
76 | logging.info("automated scanning has started - Not relaying anymore")
77 | server.whitelist_ids.pop(mailFields['s_id'], None)
78 |
79 | logging.info("poping automated key")
80 | for key, value in server.whitelist_ids.items():
81 | logging.info("key: %s, value: %s" % (key, value))
82 |
83 | else:
84 | logging.info("[+]shivaprocessold Module: Adding recipient to whitelist and relaying")
85 |
86 | if mailFields['s_id'] in server.whitelist_ids:
87 | logging.info("spam-id in whitlist - extending")
88 | server.whitelist_ids[mailFields['s_id']].append(mailFields['to'])
89 | #mailFields['attachmentFileName'].append(fileName)
90 | else:
91 | logging.info("spam-id not in whitelist - adding")
92 | server.whitelist_ids[mailFields['s_id']] = mailFields['to'].split()
93 |
94 | logging.info("\n\nprocessold after adding new recipient\n\n")
95 | for key, value in server.whitelist_ids.items():
96 | logging.info("key: %s, value: %s" % (key, value))
97 |
98 | # Following 3 lines does the relaying
99 | queuePath = server.shivaconf.get('global', 'queuepath')
100 | processMessage = server.QueueReceiver(queuePath)
101 | processMessage.process_message(msgMailRequest)
102 |
103 | record['relayed'] += 1
104 | server.QueueReceiver.totalRelay += 1
105 |
--------------------------------------------------------------------------------
/analyzer/core/shivapushtodb.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 |
4 | import logging
5 | import datetime
6 | import subprocess
7 | import os
8 | import sys
9 | import json
10 | import cPickle
11 | import copy
12 |
13 | import MySQLdb as mdb
14 |
15 | import server
16 | import shivadbconfig
17 | import shivanotifyerrors
18 |
19 | def push():
20 | logging.info("[+]Inside shivapushtodb Module")
21 | notify = server.shivaconf.getboolean('notification', 'enabled')
22 | exeSql = shivadbconfig.dbconnect()
23 |
24 | attachpath = server.shivaconf.get('analyzer', 'attachpath')
25 | inlinepath = server.shivaconf.get('analyzer', 'inlinepath')
26 |
27 | truncate = ['truncate attachments','truncate links', 'truncate sensors', 'truncate spam']
28 | for query in truncate:
29 | try:
30 | exeSql.execute(query)
31 | except Exception, e:
32 | logging.critical("[-] Error (shivapushtodb) truncate %s" % str(e))
33 | if notify is True:
34 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - truncate %s" % e)
35 |
36 |
37 | for record in server.QueueReceiver.deep_records:
38 | logging.info("Records are %d" % len(server.QueueReceiver.deep_records))
39 |
40 |
41 | phishingHumanCheck = str(record['phishingHumanCheck']).upper()
42 | if not (phishingHumanCheck == 'TRUE' or phishingHumanCheck == 'FALSE'):
43 | phishingHumanCheck = 'NULL'
44 |
45 | derivedPhishingStatus = str(record['derivedPhishingStatus']).upper()
46 | logging.critical(derivedPhishingStatus)
47 | logging.critical(str(record['derivedPhishingStatus']).upper())
48 | if not (derivedPhishingStatus == 'TRUE' or derivedPhishingStatus == 'FALSE'):
49 | derivedPhishingStatus = 'NULL'
50 |
51 | insertSpam = "INSERT INTO `spam`(`id`, `ssdeep`, `to`, `from`, `textMessage`, `htmlMessage`, `subject`, `headers`, `sourceIP`, `sensorID`, `firstSeen`, `relayCounter`, `totalCounter`, `length`, `relayTime`, `shivaScore`, `spamassassinScore`, `derivedPhishingStatus`,`phishingHumanCheck`, `urlPhishing`) VALUES ('" + str(record['s_id']) + "', '" + str(record['ssdeep']) + "', '" + str(record['to']) + "', '" + str(record['from']) + "', '" + str(record['text']) + "', '" + str(record['html']) + "', '" + str(record['subject']) + "', '" + str(record['headers']) + "', '" + str(record['sourceIP']) + "', '" + str(record['sensorID']) + "', '" + str(record['firstSeen']) + "', '" + str(record['relayed']) + "', '" + str(record['counter']) + "', '" + str(record['len']) + "', '" + str(record['firstRelayed']) + "', '" + str(record['shivaScore']) + "', '" + str(record['spamassassinScore']) + "', " + derivedPhishingStatus + ", " + phishingHumanCheck + ", " + str(record['urlPhishing']) +")"
52 | logging.critical('INSRERT: ' + record['s_id'])
53 | try:
54 | exeSql.execute(insertSpam)
55 | except mdb.Error, e:
56 | logging.critical("[-] Error (shivapushtodb insert_spam) - %d: %s" % (e.args[0], e.args[1]))
57 | if notify is True:
58 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - insertSpam %s" % e)
59 |
60 | # Checking for attachments and dumping into directory, if any. Also storing information in database.
61 | if len(record['attachmentFile']) > 0:
62 | i = 0
63 | while i < len(record['attachmentFile']):
64 | fileName = str(record['s_id']) + "-a-" + str(record['attachmentFileName'][i])
65 | path = attachpath + fileName
66 | attachFile = open(path, 'wb')
67 | attachFile.write(record['attachmentFile'][i])
68 | attachFile.close()
69 | #record['attachmentFile'][i] = path
70 | insertAttachment = "INSERT INTO `attachments`(`spam_id`, `file_name`, `attach_type`, `attachmentFileMd5`, `date`, `attachment_file_path`) VALUES ('" + str(record['s_id']) + "', '" + str(mdb.escape_string(record['attachmentFileName'][i])) + "', '" + 'attach' + "', '" + str(record['attachmentFileMd5'][i]) + "', '" + str(record['date']) + "', '" + str(mdb.escape_string(path)) +"')"
71 |
72 | try:
73 | exeSql.execute(insertAttachment)
74 | i += 1
75 |
76 | except mdb.Error, e:
77 | logging.critical("[-] Error (shivapushtodb insert_attachment) - %d: %s" % (e.args[0], e.args[1]))
78 | if notify is True:
79 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - insertAttachment %s" % e)
80 |
81 | # Checking for inline attachment files
82 | if len(record['inlineFile']) > 0:
83 | i = 0
84 | while i < len(record['inlineFile']):
85 | fileName = str(record['s_id']) + "-i-" + str(record['inlineFileName'][i])
86 | path = inlinepath + fileName
87 | attachFile = open(path, 'wb')
88 | attachFile.write(record['inlineFile'][i])
89 | attachFile.close()
90 | insertInline = "INSERT INTO `attachments`(`spam_id`, `file_name`, `attach_type`, `attachmentFileMd5`, `date`, `attachment_file_path`) VALUES ('" + str(record['s_id']) + "', '" + str(mdb.escape_string(record['inlineFileName'][i])) + "', '" + 'inline' + "', '" + str(record['inlineFileMd5'][i]) + "', '" + str(record['date']) + "', '" + str(mdb.escape_string(path)) + "')"
91 |
92 | try:
93 | exeSql.execute(insertInline)
94 | i += 1
95 | except mdb.Error, e:
96 | logging.critical("[-] Error (shivapushtodb insert_inline) - %d: %s" % (e.args[0], e.args[1]))
97 | if notify is True:
98 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - insertInline %s" % e)
99 |
100 | # Checking for links in spams and storing them
101 | if len(record['links']) > 0:
102 | i = 0
103 | for link in record['links']:
104 | insertLink = "INSERT INTO `links` (`spam_id`, `hyperlink`, `date`) VALUES ('" + str(record['s_id']) + "', '" + str(link['raw_link']) + "', '" + str(record['date']) + "')"
105 | try:
106 | exeSql.execute(insertLink)
107 | i += 1
108 | except mdb.Error, e:
109 | logging.critical("[-] Error (shivapushtodb insert_link) - %d: %s" % (e.args[0], e.args[1]))
110 | if notify is True:
111 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - insertLink %s" % e)
112 |
113 | # Extracting and saving name of the sensor
114 | insertSensor = "INSERT INTO `sensors` (`spam_id`, `sensorID`, `date`) VALUES ('" + str(record['s_id']) + "', '" + str(record['sensorID']) + "', '" + str(record['date']) + "')"
115 |
116 | try:
117 | exeSql.execute(insertSensor)
118 | except mdb.Error, e:
119 | logging.critical("[-] Error (shivapushtodb insert_sensor - %d: %s" % (e.args[0], e.args[1]))
120 | if notify is True:
121 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - insertSensor %s" % e)
122 |
123 |
124 | subprocess.Popen(['python', os.path.dirname(os.path.realpath(__file__)) + '/shivamaindb.py'])
125 | exeSql.close()
126 |
127 | def sendfeed():
128 | sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/hpfeeds/")
129 | import hpfeeds
130 |
131 | host = server.shivaconf.get('hpfeeds', 'host')
132 | port = server.shivaconf.getint('hpfeeds', 'port')
133 | ident = server.shivaconf.get('hpfeeds', 'ident')
134 | secret = server.shivaconf.get('hpfeeds', 'secret')
135 | channel = {"parsed": "shiva.parsed", "ip_url": "shiva.ip.url"}
136 |
137 | try:
138 | hpc = hpfeeds.new(host, port, ident, secret)
139 | except Exception, e:
140 | logging.critical("Cannot connect. %s" % e)
141 |
142 | for record in server.QueueReceiver.deep_records:
143 | try:
144 | data = cPickle.dumps(record)
145 | hpc.publish(channel["parsed"], data)
146 | logging.info("Record sent.")
147 | except Exception, e:
148 | logging.critical("[-] Error (shivapushtodb parsed) in publishing to hpfeeds. %s" % e)
149 |
150 | if len(record['links']) > 0:
151 | for link in record['links']:
152 | try:
153 | data = {"id": record['s_id'], "url": link[0]}
154 | data = json.dumps(data)
155 | hpc.publish(channel["ip_url"], data)
156 | except Exception, e:
157 | logging.critical("[-] Error (shivapushtodb link) in publishing to hpfeeds. %s" % e)
158 |
159 | ip_list = record['sourceIP'].split(',')
160 | for ip in ip_list:
161 | try:
162 | data = {"id": record['s_id'], "source_ip": ip}
163 | data = json.dumps(data)
164 | hpc.publish(channel["ip_url"], data)
165 | except Exception, e:
166 | logging.critical("[-] Error (shivapushtodb ip) in publishing to hpfeeds. %s" % e)
167 |
168 | logging.info("[+]shivapushtodb Module: Calling sendfiles module.")
169 | subprocess.Popen(['python', os.path.dirname(os.path.realpath(__file__)) + '/hpfeeds/sendfiles.py'])
170 |
171 | def cleanup():
172 | server.QueueReceiver.deep_records = copy.deepcopy(server.QueueReceiver.records)
173 | del server.QueueReceiver.records[:]
174 | server.QueueReceiver.totalRelay = 0
175 | logging.info("[+]shivapushtodb Module: List and global list counter resetted.")
176 |
177 | def getspammeremails():
178 | mainDb = shivadbconfig.dbconnectmain()
179 | notify = server.shivaconf.getboolean('notification', 'enabled')
180 |
181 | whitelist = "SELECT `recipients` from `whitelist`"
182 |
183 | try:
184 | mainDb.execute(whitelist)
185 | record = mainDb.fetchone()
186 | if ((record is None) or (record[0] is None)):
187 | server.whitelist_ids['spammers_email'] = []
188 |
189 |
190 | else:
191 | server.whitelist_ids['spammers_email'] = (record[0].encode('utf-8')).split(",")[-100:]
192 | server.whitelist_ids['spammers_email'] = list(set(server.whitelist_ids['spammers_email']))
193 |
194 |
195 | logging.info("[+] Pushtodb Module: whitelist recipients:")
196 | for key, value in server.whitelist_ids.items():
197 | logging.info("key: %s, value: %s" % (key, value))
198 |
199 | mainDb.close()
200 |
201 | except mdb.Error, e:
202 | logging.critical("[-] Error (Module shivapushtodb.py) - some issue obtaining whitelist: %s" % e)
203 | if notify is True:
204 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - getspammeremails %s" % e)
205 |
206 | for record in server.QueueReceiver.deep_records:
207 | try:
208 | if record['counter'] < 30:
209 | logging.info("type: %s, record to values: %s" % (type(record['to']), record['to']))
210 | #server.spammers_email.extend(record['to'].split(","))
211 | #server.spammers_email = list(set(server.spammers_email))
212 |
213 | #server.whitelist_ids[record['s_id']].append(record['to'].split(","))
214 | server.whitelist_ids[record['s_id']] = record['to'].split(",")
215 |
216 |
217 | for key, value in server.whitelist_ids.items():
218 | logging.info("New record - key: %s, value: %s" % (key, value))
219 |
220 | except Exception, e:
221 | if notify is True:
222 | shivanotifyerrors.notifydeveloper("[-] Error (Module shivapushtodb.py) - extending whitelist %s" % e)
223 |
--------------------------------------------------------------------------------
/analyzer/core/shivascheduler.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/python
2 | """
3 | Schedules a job to reset individual counters of relayed mails to 0.
4 | This would make sure each spammer finds spamPot relaying everyday.
5 | """
6 | import datetime
7 | import logging
8 |
9 | from apscheduler.scheduler import Scheduler
10 |
11 | import shivapushtodb
12 | import server
13 |
14 | def resetcounter():
15 | logging.info("[+]shivascheduler.py: INSIDE SCHEDULER RESET COUNTER")
16 | shivapushtodb.cleanup()
17 | shivapushtodb.getspammeremails()
18 |
19 | localdb = server.shivaconf.getboolean('database', 'localdb')
20 | hpfeeds = server.shivaconf.getboolean('hpfeeds', 'enabled')
21 |
22 | if localdb is True:
23 | shivapushtodb.push()
24 | if hpfeeds is True:
25 | shivapushtodb.sendfeed()
26 | else:
27 | if hpfeeds is True:
28 | logging.info("[+]shivascheduler.py: Local db is disabled. Sending data to hpfeeds.")
29 | shivapushtodb.sendfeed()
30 |
31 | def schedule():
32 | logging.info("[+]shivascheduler.py: INSIDE SCHEDULER")
33 | sched = Scheduler()
34 | duration = server.shivaconf.getint('analyzer', 'schedulertime')
35 | sched.add_interval_job(resetcounter, minutes=duration)
36 | sched.start()
37 | logging.info("Shiva scheduler, which dumps data into maindb, resets global counter and sends data on hpfeeds, started at %s and would execute every %d minutes " % (datetime.datetime.now(), duration))
38 |
--------------------------------------------------------------------------------
/analyzer/core/trishula/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This is Thrishula package, it provides phishing detection and web interface extensions to Shiva
3 | """
--------------------------------------------------------------------------------
/analyzer/core/trishula/domaininfo.py:
--------------------------------------------------------------------------------
1 | """
2 | This module is responsible for communication with external
3 | services.
4 |
5 | """
6 |
7 | import struct
8 | import urllib
9 | import urllib2
10 | import httplib
11 | import re
12 | import xml.etree.ElementTree
13 | import urlparse
14 | import requests
15 |
16 | import lamson.server
17 | from google_safe_api import SafebrowsinglookupClient
18 |
19 | class RankProvider(object):
20 | """Abstract class for obtaining the page rank (popularity)
21 | from a provider such as Google or Alexa.
22 | TAKEN FROM https://github.com/aablack/websearchapp/blob/master/search/rank_provider.py
23 | """
24 | def __init__(self, host, proxy=None, timeout=30):
25 | """Keyword arguments:
26 | host -- toolbar host address
27 | proxy -- address of proxy server. Default: None
28 | timeout -- how long to wait for a response from the server.
29 | Default: 30 (seconds)
30 |
31 | """
32 | self._opener = urllib2.build_opener()
33 | if proxy:
34 | self._opener.add_handler(urllib2.ProxyHandler({"http": proxy}))
35 |
36 | self._host = host
37 | self._timeout = timeout
38 |
39 | def get_rank(self, url):
40 | """Get the page rank for the specified URL
41 |
42 | Keyword arguments:
43 | url -- get page rank for url
44 |
45 | """
46 | raise NotImplementedError("You must override get_rank()")
47 |
48 |
49 | class AlexaTrafficRank(RankProvider):
50 | """ Get the Alexa Traffic Rank for a URL
51 |
52 | TAKEN FROM https://github.com/aablack/websearchapp/blob/master/search/rank_provider.py
53 |
54 | """
55 | def __init__(self, host="xml.alexa.com", proxy=None, timeout=30):
56 | """Keyword arguments:
57 | host -- toolbar host address: Default: joolbarqueries.google.com
58 | proxy -- address of proxy server (if required). Default: None
59 | timeout -- how long to wait for a response from the server.
60 | Default: 30 (seconds)
61 |
62 | """
63 | super(AlexaTrafficRank, self).__init__(host, proxy, timeout)
64 |
65 | def get_rank(self, url):
66 | """Get the page rank for the specified URL
67 |
68 | Keyword arguments:
69 | url -- get page rank for url
70 |
71 | """
72 | return -1
73 | try:
74 | query = "http://%s/data?%s" % (self._host, urllib.urlencode((
75 | ("cli", 10),
76 | ("dat", "nsa"),
77 | ("ver", "quirk-searchstatus"),
78 | ("uid", "1234"),
79 | ("userip", "192.168.0.1"),
80 | ("url", url))))
81 |
82 | response = self._opener.open(query, timeout=self._timeout)
83 | if response.getcode() == httplib.OK:
84 | data = response.read()
85 |
86 | element = xml.etree.ElementTree.fromstring(data)
87 | for e in element.iterfind("SD"):
88 | popularity = e.find("POPULARITY")
89 | if popularity is not None:
90 | return int(popularity.get("TEXT"))
91 | except:
92 | return -1
93 | return -1
94 |
95 |
96 | class GooglePageRank(RankProvider):
97 | """ Get the google page rank figure using the toolbar API.
98 | Credits to the author of the WWW::Google::PageRank CPAN package
99 | as I ported that code to Python.
100 |
101 | TAKEN FROM https://github.com/aablack/websearchapp/blob/master/search/rank_provider.py
102 |
103 | """
104 | def __init__(self, host="toolbarqueries.google.com", proxy=None, timeout=30):
105 | """Keyword arguments:
106 | host -- toolbar host address: Default: toolbarqueries.google.com
107 | proxy -- address of proxy server (if required). Default: None
108 | timeout -- how long to wait for a response from the server.
109 | Default: 30 (seconds)
110 |
111 | """
112 | super(GooglePageRank, self).__init__(host, proxy, timeout)
113 | self._opener.addheaders = [("User-agent", "Mozilla/4.0 (compatible; \
114 | GoogleToolbar 2.0.111-big; Windows XP 5.1)")]
115 |
116 | def get_rank(self, url):
117 | """
118 | return google page rank of url, -1 on unknown/error
119 | """
120 |
121 | # calculate the hash which is required as part of the get
122 | # request sent to the toolbarqueries url.
123 | try:
124 | ch = '6' + str(self._compute_ch_new("info:%s" % (url)))
125 |
126 | query = "http://%s/tbr?%s" % (self._host, urllib.urlencode((
127 | ("client", "navclient-auto"),
128 | ("ch", ch),
129 | ("ie", "UTF-8"),
130 | ("oe", "UTF-8"),
131 | ("features", "Rank"),
132 | ("q", "info:%s" % (url)))))
133 |
134 | response = self._opener.open(query, timeout=self._timeout)
135 | if response.getcode() == httplib.OK:
136 | data = response.read()
137 | match = re.match("Rank_\d+:\d+:(\d+)", data)
138 | if match:
139 | rank = int(match.group(1))
140 | return int(rank)
141 | except:
142 | return -1
143 | return -1
144 |
145 | @classmethod
146 | def _compute_ch_new(cls, url):
147 | ch = cls._compute_ch(url)
148 | ch = ((ch % 0x0d) & 7) | ((ch / 7) << 2);
149 |
150 | return cls._compute_ch(struct.pack("<20L", *(cls._wsub(ch, i * 9) for i in range(20))))
151 |
152 | @classmethod
153 | def _compute_ch(cls, url):
154 | url = struct.unpack("%dB" % (len(url)), url)
155 | a = 0x9e3779b9
156 | b = 0x9e3779b9
157 | c = 0xe6359a60
158 | k = 0
159 |
160 | length = len(url)
161 |
162 | while length >= 12:
163 | a = cls._wadd(a, url[k+0] | (url[k+1] << 8) | (url[k+2] << 16) | (url[k+3] << 24));
164 | b = cls._wadd(b, url[k+4] | (url[k+5] << 8) | (url[k+6] << 16) | (url[k+7] << 24));
165 | c = cls._wadd(c, url[k+8] | (url[k+9] << 8) | (url[k+10] << 16) | (url[k+11] << 24));
166 |
167 | a, b, c = cls._mix(a, b, c)
168 |
169 | k += 12
170 | length -= 12
171 |
172 | c = cls._wadd(c, len(url));
173 |
174 | if length > 10: c = cls._wadd(c, url[k+10] << 24)
175 | if length > 9: c = cls._wadd(c, url[k+9] << 16)
176 | if length > 8: c = cls._wadd(c, url[k+8] << 8)
177 | if length > 7: b = cls._wadd(b, url[k+7] << 24)
178 | if length > 6: b = cls._wadd(b, url[k+6] << 16)
179 | if length > 5: b = cls._wadd(b, url[k+5] << 8)
180 | if length > 4: b = cls._wadd(b, url[k+4])
181 | if length > 3: a = cls._wadd(a, url[k+3] << 24)
182 | if length > 2: a = cls._wadd(a, url[k+2] << 16)
183 | if length > 1: a = cls._wadd(a, url[k+1] << 8)
184 | if length > 0: a = cls._wadd(a, url[k])
185 |
186 | a, b, c = cls._mix(a, b, c);
187 |
188 | # integer is always positive
189 | return c
190 |
191 | @classmethod
192 | def _mix(cls, a, b, c):
193 | a = cls._wsub(a, b); a = cls._wsub(a, c); a ^= c >> 13;
194 | b = cls._wsub(b, c); b = cls._wsub(b, a); b ^= (a << 8) % 4294967296;
195 | c = cls._wsub(c, a); c = cls._wsub(c, b); c ^= b >>13;
196 | a = cls._wsub(a, b); a = cls._wsub(a, c); a ^= c >> 12;
197 | b = cls._wsub(b, c); b = cls._wsub(b, a); b ^= (a << 16) % 4294967296;
198 | c = cls._wsub(c, a); c = cls._wsub(c, b); c ^= b >> 5;
199 | a = cls._wsub(a, b); a = cls._wsub(a, c); a ^= c >> 3;
200 | b = cls._wsub(b, c); b = cls._wsub(b, a); b ^= (a << 10) % 4294967296;
201 | c = cls._wsub(c, a); c = cls._wsub(c, b); c ^= b >> 15;
202 |
203 | return a, b, c
204 |
205 | @staticmethod
206 | def _wadd(a, b):
207 | return (a + b) % 4294967296
208 |
209 | @staticmethod
210 | def _wsub(a, b):
211 | return (a - b) % 4294967296
212 |
213 |
214 | class RedirectCount(RankProvider):
215 | """
216 | taken from http://www.zacwitte.com/resolving-http-redirects-in-python
217 |
218 | return count of redirection, -1 on error
219 | """
220 |
221 | def __init__(self, host="", proxy=None, timeout=30):
222 | super(RedirectCount, self).__init__(host, proxy, timeout)
223 |
224 | def get_rank(self, url):
225 | real_url = url
226 | if not re.match('(:i)^https?://', url):
227 | real_url = 'http://' + url
228 | return self.resolve_http_redirect(real_url)
229 |
230 | def resolve_http_redirect(self, url, depth=0):
231 |
232 | try:
233 | if depth > 10:
234 | return depth
235 | o = urlparse.urlparse(url,allow_fragments=True)
236 | conn = httplib.HTTPConnection(o.netloc)
237 | path = o.path
238 | if o.query:
239 | path +='?'+o.query
240 | conn.request("HEAD", path)
241 | res = conn.getresponse()
242 | headers = dict(res.getheaders())
243 |
244 | if headers.has_key('location') and headers['location'] != url:
245 | return self.resolve_http_redirect(headers['location'], depth+1)
246 | else:
247 | return depth
248 | except Exception:
249 | return -1
250 |
251 | class LongUrl(RankProvider):
252 | """
253 | get expanded version of URL using longurl.org
254 | """
255 |
256 | def __init__(self, host="", proxy=None, timeout=30):
257 | super(LongUrl, self).__init__(host, proxy, timeout)
258 |
259 | def get_rank(self, url):
260 | """
261 | return 'extended version of url is it was shorten, '' otherwise'
262 | """
263 |
264 | try:
265 |
266 | req_url = 'http://api.longurl.org/v2/expand'
267 | params = {'format':'json', 'url':url}
268 |
269 | data = urllib.urlencode(params)
270 |
271 | r = requests.get(req_url + '?' + data)
272 | response = r.json()
273 | if response and 'long-url' in response:
274 | return response['long-url']
275 |
276 | except Exception:
277 | return ''
278 | return ''
279 |
280 |
281 |
282 | class InPhishTank(RankProvider):
283 | """
284 | Searches for URL in phishtank database
285 | """
286 |
287 | def __init__(self, host="", proxy=None, timeout=30):
288 | self.api_key = lamson.server.shivaconf.get('analyzer','phishtank_api_key')
289 | super(InPhishTank, self).__init__(host, proxy, timeout)
290 |
291 | def get_rank(self, url):
292 | """
293 | return True if url is in phishtank database, False otherwise
294 | """
295 | if not url or not self.api_key:
296 | return False
297 |
298 | try:
299 |
300 | req_url = 'http://checkurl.phishtank.com/checkurl/'
301 | params = {'format':'json',
302 | 'url': url if url.startswith('http') else 'http://' + url,
303 | }
304 |
305 | params['app_key'] = self.api_key
306 |
307 | r = requests.post(req_url,data=params)
308 | response = r.json()
309 | if response and 'results' in response and 'in_database' in response['results']:
310 | return response['results']['in_database']
311 | except Exception:
312 | return False
313 | return False
314 |
315 | class GoogleSafeBrowsingAPI(RankProvider):
316 |
317 | def __init__(self, host="", proxy=None, timeout=30):
318 | api_key = lamson.server.shivaconf.get('analyzer','google_safe_browsing_api_key')
319 |
320 | try:
321 | self.api_key = api_key
322 | self.client = SafebrowsinglookupClient(key=api_key)
323 | except Exception:
324 | self.client = None
325 | pass
326 | super(GoogleSafeBrowsingAPI, self).__init__(host, proxy, timeout)
327 |
328 | def get_rank(self, url):
329 | """
330 | return True if given URL is considered 'phishing','malware' or 'unwanted' by Google Safe Browsing API
331 | """
332 | if not url or not self.client:
333 | return False
334 |
335 | try:
336 | for url,result in self.client.lookup(url).iteritems():
337 | if re.search('(?i)(phishing|malware|unwanted)',result):
338 | return True
339 | except Exception:
340 | return False
341 | return False
342 |
343 |
344 |
345 | def get_domain_info(url):
346 | """
347 | return dictionary containning information about given url
348 |
349 | {
350 | AlexaTrafficRank:
351 | RedirectCount:
352 | GooglePageRank:
353 | LongUrl:
354 | InPhishTank:
355 | GoogleSafeBrowsingAPI:
356 | }
357 | """
358 | domain = re.sub('https?://', '', url)
359 | result = {}
360 | result['raw_link'] = url
361 | providers = (AlexaTrafficRank(), RedirectCount(), GooglePageRank(), LongUrl(), InPhishTank(), GoogleSafeBrowsingAPI())
362 | for p in providers:
363 | result[p.__class__.__name__] = p.get_rank(domain)
364 | return result
365 |
366 |
367 |
--------------------------------------------------------------------------------
/analyzer/core/trishula/google_safe_api.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2015 Julien Sobrier
3 | # All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
6 | # not use this file except in compliance with the License. You may obtain
7 | # a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14 | # License for the specific language governing permissions and limitations
15 | # under the License.
16 | #
17 | # modified by Alexander Bikadorov, 2012 (abiku@cs.tu-berlin.de)
18 |
19 | """ Version 0.2.0
20 |
21 | Google Safe Browsing Lookup library for Python.
22 |
23 | If you need to check less than 10,000 URLs a day against the Google Safe Browsing v2 API (http://code.google.com/apis/safebrowsing/), you can use the Lookup API (http://code.google.com/apis/safebrowsing/lookup_guide.html) as a lighter alternative to the more complex API (http://code.google.com/apis/safebrowsing/developers_guide_v2.html).
24 |
25 | You need to get an API key from Google at http://code.google.com/apis/safebrowsing/key_signup.html """
26 |
27 |
28 | import urllib
29 | import urllib2
30 | import re
31 | import httplib
32 |
33 |
34 | class SafebrowsinglookupClient(object):
35 | def __init__(self, key='', debug=0, error=0):
36 | """ Create a new client. You must pass your Google API key (http://code.google.com/apis/safebrowsing/key_signup.html).
37 |
38 | Arguments:
39 | key: API key.
40 | debug: Set to 1 to print debug & error output to the standard output. 0 (disabled) by default.
41 | error: Set to 1 to print error output to the standard output. 0 (disabled) by default.
42 | """
43 | self.key = key
44 | self.debug = debug
45 | self.error = error
46 | self.last_error = ''
47 | self.version = '0.2'
48 | self.api_version = '3.1'
49 |
50 | if self.key == '':
51 | raise ValueError("Missing API key")
52 |
53 |
54 | def lookup(self, *urls):
55 | """ Lookup a list of URLs against the Google Safe Browsing v2 lists.
56 |
57 | Returns a hash : . The possible values for are: "ok" (no match), "malware", "phishing", "malware,phishing" (match both lists) and "error".
58 |
59 | Arguments:
60 | urls: List of URLs to lookup. The Lookup API allows only 10,000 URL checks a day. If you need more, use the official Google Safe Browsing v2 API implementation (http://code.google.com/p/google-safe-browsing/downloads/list). Each requests must contain 500 URLs at most. The lookup() method will split the list of URLS in blocks of 500 URLs if needed.
61 | """
62 | results = {}
63 | count = 0
64 | while count * 500 < len(urls):
65 | inputs = urls[count * 500 : (count + 1) * 500]
66 | body = len(inputs)
67 |
68 | for url in inputs:
69 | body = str(body) + "\n" + self.__canonical(str(url))
70 |
71 | self.__debug("BODY:\n" + body + "\n\n")
72 | url = 'https://sb-ssl.google.com/safebrowsing/api/lookup?client=%s&key=%s&appver=%s&pver=%s' % ('python', self.key, self.version, self.api_version)
73 | self.__debug("URL: %s" % (url))
74 |
75 | response = ''
76 | try:
77 | response = urllib2.urlopen(url, body)
78 | except Exception, e:
79 | if hasattr(e, 'code') and e.code == httplib.NO_CONTENT: # 204
80 | self.__debug("No match\n")
81 | results.update( self.__ok(inputs) )
82 |
83 | elif hasattr(e, 'code') and e.code == httplib.BAD_REQUEST: # 400
84 | self.__error("Invalid request")
85 | results.update( self.__errors(inputs) )
86 |
87 | elif hasattr(e, 'code') and e.code == httplib.UNAUTHORIZED: # 401
88 | self.__error("Invalid API key")
89 | results.update( self.__errors(inputs) )
90 |
91 | elif hasattr(e, 'code') and e.code == httplib.FORBIDDEN: # 403 (should be 401)
92 | self.__error("Invalid API key")
93 | results.update( self.__errors(inputs) )
94 |
95 | elif hasattr(e, 'code') and e.code == httplib.SERVICE_UNAVAILABLE: # 503
96 | self.__error("Server error, client may have sent too many requests")
97 | results.update( self.__errors(inputs) )
98 |
99 | else:
100 | self.__error("Unexpected server response")
101 | self.__debug(e)
102 | results.update( self.__errors(inputs) )
103 | else:
104 | response_read = response.read()
105 | if not response_read:
106 | self.__debug("No match\n")
107 | results.update( self.__ok(inputs) )
108 | else:
109 | self.__debug("At least 1 match\n")
110 | results.update( self.__parse(response_read.strip(), inputs) )
111 |
112 | count = count + 1
113 |
114 | return results
115 |
116 |
117 |
118 | # Private methods
119 |
120 | # Not much is actually done, full URL canonicalization is not required with the Lookup library according to the API documentation
121 | def __canonical(self, url=''):
122 | # remove leading/ending white spaces
123 | url = url.strip()
124 |
125 | # Remove any embedded tabs and CR/LF characters which aren't escaped.
126 | url = url.replace('\t', '').replace('\r', '').replace('\n', '')
127 |
128 | # make sure whe have a scheme
129 | scheme = re.compile("https?\:\/\/", re.IGNORECASE)
130 | if scheme.match(url) is None:
131 | url = "http://" + url
132 |
133 | return url
134 |
135 |
136 | def __parse(self, response, urls):
137 | lines = response.splitlines()
138 |
139 | if (len(urls) != len(lines)):
140 | self.__error("Number of URLs in the response does not match the number of URLs in the request");
141 | self.__debug( str(len(urls)) + " / " + str(len(lines)) )
142 | self.__debug(response);
143 | return self.__errors(urls);
144 |
145 |
146 | results = { }
147 | for i in range(0, len(lines)):
148 | results.update({urls[i] : lines[i]})
149 |
150 | return results
151 |
152 |
153 | def __errors(self, urls):
154 | results = {}
155 | for url in urls:
156 | results.update({url: 'error'})
157 |
158 | return results
159 |
160 |
161 | def __ok(self, urls):
162 | results = {}
163 | for url in urls:
164 | results.update({url: 'ok'})
165 |
166 | return results
167 |
168 |
169 | def __debug(self, message=''):
170 | if self.debug == 1:
171 | print message
172 |
173 |
174 | def __error(self, message=''):
175 | if self.debug == 1 or self.error == 1:
176 | print message + "\n"
177 | self.last_error = message
178 |
--------------------------------------------------------------------------------
/analyzer/core/trishula/learning.py:
--------------------------------------------------------------------------------
1 | """
2 | This module is responsible honeypot learning and email classification
3 |
4 | """
5 |
6 |
7 | import pickle
8 | import logging
9 | import os
10 |
11 | from sklearn import tree
12 | from sklearn.metrics import f1_score
13 |
14 | import lamson.server
15 | import backend_operations
16 | import statistics
17 |
18 | from phishing import check_url_phishing
19 |
20 |
21 | # files used by module
22 | CLASSIFIER_PKL = 'run/classifier.pkl'
23 | LEARNING_LOCK = 'run/learning.lock'
24 |
25 |
26 | #global variables
27 | global_classifier = None
28 | global_shiva_threshold = 0.5
29 | global_sa_threshold = 0.5
30 |
31 | def __init_classifier():
32 | """
33 | initialize classifier
34 |
35 | loads stored classifier from pickle files if exist,
36 | otherwise it performs learning
37 | """
38 |
39 | global global_classifier
40 | global global_shiva_threshold
41 | global global_sa_threshold
42 |
43 | if global_classifier:
44 | return
45 |
46 |
47 | global_shiva_threshold, global_sa_threshold = backend_operations.get_current_detection_thresholds()
48 | logging.info("Learning: Loaded thresholds: {0} {1}".format(global_shiva_threshold,global_sa_threshold))
49 |
50 | logging.info("Learning: Trying to load classifier from file.")
51 | if os.path.exists(CLASSIFIER_PKL):
52 | classifier_file = open(CLASSIFIER_PKL,'rb')
53 | if classifier_file:
54 | global_classifier = pickle.load(classifier_file)
55 | classifier_file.close()
56 |
57 | if global_classifier:
58 | logging.info("Learning: Classifier successfully loaded.")
59 |
60 | else:
61 | logging.info("Learning: Classifier not found, trying to re-learn...")
62 | learn()
63 |
64 |
65 |
66 | def learn():
67 | """
68 | start honeypot email classifiers learning process
69 | results of learning are stored in database table 'learningstate'
70 | """
71 | if not __check_learning_and_lock():
72 | logging.warn('Learning: attempt to learn honeypot while learning is already in progress. Nothing to do.')
73 | return
74 |
75 | classifier_status = __learn_classifier()
76 | spamassassin_status = __learn_spamassassin()
77 |
78 | shiva_threshold, sa_threshold = __compute_classifier_decision_tresholds()
79 |
80 | global global_shiva_threshold
81 | global global_sa_threshold
82 | global_shiva_threshold = shiva_threshold
83 | global_sa_threshold = sa_threshold
84 |
85 | backend_operations.save_learning_report(classifier_status, spamassassin_status, shiva_threshold, sa_threshold)
86 |
87 | free_learning_lock()
88 |
89 | def __learn_classifier():
90 | """
91 | check if results can be read directly from database or
92 | deep relearning is needed
93 | """
94 |
95 | if not backend_operations.check_stored_rules_results_integrity():
96 | logging.info('DEEP RELEARN')
97 | __deep_relearn()
98 |
99 |
100 |
101 | learning_matrix = statistics.prepare_matrix()
102 |
103 | # see statistics.prepare_matrix()
104 | sample_vectors = map(lambda a: a[1:], learning_matrix[1:])
105 | result_vector = map(lambda a: a[0], learning_matrix[1:])
106 |
107 | if not sample_vectors or not result_vector:
108 | #nothing to - no mails database?
109 | return True
110 |
111 | # create classifier and fit it with samples
112 | classifier = tree.DecisionTreeClassifier(min_samples_leaf=10,max_depth=8,class_weight='balanced',criterion='gini')
113 | classifier.fit(sample_vectors, result_vector)
114 |
115 | global global_classifier
116 | global_classifier = classifier
117 |
118 | # store classifier to picke file
119 | f = open(CLASSIFIER_PKL, 'wb')
120 | pickle.dump(classifier, f, pickle.HIGHEST_PROTOCOL)
121 | f.close()
122 |
123 | logging.info("Learning: Learning of classifier successfully finished.")
124 | return True
125 |
126 |
127 |
128 | def __learn_spamassassin():
129 | """
130 | learn spamassassin Bayes filter on captured emails
131 | return False if error occurs, True otherwise
132 |
133 | NOTE: in this context, spamassassin term 'spam' is equal to phishing and 'ham' to regular spam
134 | """
135 | import subprocess,fnmatch,shlex
136 |
137 | logging.info('Learning - re-learning spamassassin.')
138 | try:
139 | retval = subprocess.call(shlex.split('spamc -K'))
140 | if retval != 0:
141 | logging.error('Learning: spamassassin daemon isn\'t running, exiting')
142 | return
143 | except subprocess.CalledProcessError:
144 | logging.error('Learning: error occered during communication with spamassassin daemon.')
145 | return
146 |
147 | rawspampath = lamson.server.shivaconf.get('analyzer', 'rawspampath')
148 |
149 | phishing_mail_path = rawspampath + "phishing/"
150 | phishing_mail_count = len(fnmatch.filter(os.listdir(phishing_mail_path), '*'))
151 | phishing_learn_cmd = 'sa-learn --spam ' + phishing_mail_path + '*'
152 |
153 | spam_mail_path = rawspampath + "spam/"
154 | spam_mail_count = len(fnmatch.filter(os.listdir(spam_mail_path), '*'))
155 | spam_learn_cmd = 'sa-learn --ham ' + spam_mail_path + '*'
156 |
157 | try:
158 | logging.info('Learning: dropping old spamassassin database.')
159 | retval = subprocess.call(shlex.split('sa-learn --clear'))
160 |
161 | logging.info('Learning: learning spamassassin Bayes filter on {} PHISHING emails in {}.'.format(phishing_mail_count, phishing_mail_path))
162 | retval += subprocess.call(shlex.split(phishing_learn_cmd))
163 |
164 | logging.info('Learning: learning spamassassin Bayes filter on {} SPAM emails in {}.'.format(spam_mail_count, spam_mail_path))
165 | retval += subprocess.call(shlex.split(spam_learn_cmd))
166 |
167 | if retval == 0:
168 | logging.info('Learning: spamassassin successfully learned.')
169 | else:
170 | logging.error('Learning: error occurred during spamassassin learnig process.')
171 |
172 | except subprocess.CalledProcessError as ex:
173 | logging.error('Learning: error occurred during communication with spamassassin daemon.')
174 | logging.error(ex)
175 | return False
176 | return True
177 |
178 |
179 | def get_spamassassin_bayes_score(mailFields):
180 | """
181 | return score [0.00, 1.00] of given mail from spamassassin Bayes filter,
182 | if error occurs, 0 is returned
183 | """
184 | import subprocess,shlex,re
185 |
186 | result = 0.00
187 |
188 | for currentKey in ('text','html'):
189 |
190 | if not mailFields[currentKey]:
191 | continue
192 |
193 | p = subprocess.Popen(shlex.split('spamc --full'),stdin=subprocess.PIPE,stdout=subprocess.PIPE)
194 | spamassassin_output = p.communicate(input=mailFields[currentKey])[0]
195 |
196 | match_bayes = re.search('BAYES_\d\d.*\n.*score:\s+\d+\.\d+]', spamassassin_output)
197 | if match_bayes:
198 | match_score = re.search('\d+\.\d+]',match_bayes.group(0))
199 | score = float(match_score.group(0)[:-1])
200 | result = score if score > result else result
201 |
202 | return result
203 |
204 | def check_mail(mailFields):
205 | """
206 | return computed probability and decision whether email should be considered as phishing
207 |
208 | dict {
209 | verdict: True/False
210 | urlPhishing: True/False
211 | shiva_prob: float
212 | sa_prob: float
213 | }
214 |
215 | """
216 | __init_classifier()
217 | global global_classifier
218 | global global_shiva_threshold
219 | global global_sa_threshold
220 |
221 |
222 | mailVector = process_single_record(mailFields)
223 | logging.critical(mailVector)
224 |
225 | shiva_prob = global_classifier.predict_proba((mailVector,))[0][1]
226 | sa_prob = get_spamassassin_bayes_score(mailFields)
227 |
228 |
229 |
230 | url_phishing = check_url_phishing(mailFields)
231 |
232 | result = {'verdict' : url_phishing or shiva_prob >= global_shiva_threshold or sa_prob >= global_sa_threshold,
233 | 'urlPhishing' : url_phishing,
234 | 'shiva_prob' : shiva_prob,
235 | 'sa_prob' : sa_prob }
236 | logging.info('VERDICT: ' + str(result))
237 |
238 | return result
239 |
240 | def process_single_record(mailFields):
241 | """
242 | applies all phishing rules on email and returns list of results sorted by code of rule
243 | """
244 |
245 | from phishing import rulelist
246 | used_rules = []
247 | computed_results = []
248 | result = []
249 |
250 | for rule in rulelist.get_rules():
251 | rule_result = rule.apply_rule(mailFields)
252 | rule_code = rule.get_rule_code()
253 | result.append({'code': rule_code, 'result': rule_result})
254 | used_rules.append({'code': rule_code, 'description': rule.get_rule_description()})
255 |
256 | db_result = rule_result
257 |
258 |
259 | computed_results.append({'spamId': mailFields['s_id'], 'code': rule.code ,'result': db_result})
260 |
261 | # store result of email to database
262 | backend_operations.store_computed_results(computed_results, used_rules)
263 |
264 | # sort result by rule code in order to ensure order
265 | sorted_rules = sorted(result,key=lambda a: a['code'])
266 |
267 | # extract numerical values for sorted_result_vector
268 | sorted_result_vector = map(lambda a: a['result'],sorted_rules)
269 |
270 | return sorted_result_vector
271 |
272 |
273 | def __deep_relearn():
274 | """
275 | drops all computed results in database a computes everything again
276 |
277 | essential in case of adding new detection rules to honeypot
278 | """
279 | backend_operations.init_deep_relearn()
280 | rercord_count = 0
281 |
282 | while True:
283 | records = backend_operations.retrieve(10, rercord_count)
284 | if len(records) == 0 :
285 | break
286 | for record in records:
287 | process_single_record(record)
288 | rercord_count += 1
289 |
290 | def __check_learning_and_lock():
291 | """
292 | check whether learning can be performed - existence of file LEARNING_LOCK
293 | if file doesn't exist it's created ant True is returned. If file already exists,
294 | it remains untouched and False is returned.
295 | """
296 |
297 | import os.path
298 | if os.path.exists(LEARNING_LOCK):
299 | return False
300 |
301 | open(LEARNING_LOCK, 'a').close()
302 | return True
303 |
304 | def __compute_classifier_decision_tresholds():
305 | """
306 | compute optimal thresholds for marking emails as phishing
307 | using F1 function
308 |
309 | threashold is value between 0.4 and 0.6
310 |
311 | return tuple (shiva_threshold,spamassasin_threshold)
312 | """
313 | classification_results = backend_operations.get_detection_results_for_thresholds()
314 |
315 | # no reason to shift shiva score when KNN classifier is used
316 | shiva_thres = .5
317 |
318 | default_result = (shiva_thres,.5,)
319 |
320 | # return default if there are suitable emails
321 | if not classification_results:
322 | return (default_result)
323 |
324 | expected_results = []
325 | for line in classification_results:
326 | if line[3] != None:
327 | expected_results.append(line[3])
328 | else:
329 | expected_results.append(1 if line[2] == 1 else 0)
330 |
331 | best_thres_sa = 0.5
332 | best_score_sa = 0.
333 |
334 | try:
335 | # go through possible thresholds and find best suitable threshold for spamassassin classifier
336 | for i in range(40, 60, 1):
337 | current_thres = i / 100.0
338 |
339 | sa_result = map(lambda a: 1 if a[1] > current_thres else 0, classification_results)
340 |
341 | # don't compute f1_score if we have all zeroes or ones
342 | if all(sa_result) or not any(sa_result):
343 | continue
344 |
345 | sa_score = f1_score(expected_results, sa_result, average='binary')
346 |
347 | if best_score_sa <= sa_score:
348 | best_score_sa = sa_score
349 | best_thres_sa = current_thres
350 |
351 | return (shiva_thres, best_thres_sa,)
352 |
353 | except Exception, e:
354 | logging.error(e)
355 |
356 | # return default thresholds if error occurs
357 | return default_result
358 |
359 |
360 | def free_learning_lock():
361 | """
362 | delete fiLe LEARNING_LOCK if exits
363 | WARNING:
364 | should be used only during restarting of honeypot in order to recover from error
365 | """
366 | if os.path.exists(LEARNING_LOCK):
367 | os.remove(LEARNING_LOCK)
--------------------------------------------------------------------------------
/analyzer/core/trishula/report.py:
--------------------------------------------------------------------------------
1 | """
2 | Module send email notification about phishing emails
3 | """
4 |
5 | import smtplib
6 | from email.mime.multipart import MIMEMultipart
7 | from email.mime.text import MIMEText
8 | from email.mime.message import MIMEMessage
9 | from email.utils import formataddr
10 | from email import message_from_file
11 |
12 | import lamson.server
13 | import backend_operations
14 | from time import strftime
15 |
16 |
17 | from mako.template import Template
18 |
19 | def send_phishing_report(mailFields):
20 |
21 | report_from = lamson.server.shivaconf.get('analyzer', 'phishing_report_from')
22 | report_to = lamson.server.shivaconf.get('analyzer', 'phishing_report_to')
23 | domain_root = lamson.server.shivaconf.get('web', 'address')
24 |
25 | smtphost = lamson.server.shivaconf.get('analyzer', 'relayhost')
26 | smtpport = lamson.server.shivaconf.get('analyzer', 'relayport')
27 |
28 | msg = MIMEMultipart('mixed')
29 | msg['Subject'] = "Automatic phishing detection report"
30 | msg['From'] = formataddr(('SHIVA honeypot', report_from,))
31 | msg['To'] = formataddr(('', report_to,))
32 |
33 |
34 |
35 | if not mailFields['s_id']:
36 | return
37 |
38 | raw_path = lamson.server.shivaconf.get('analyzer', 'rawspampath')
39 | phish_path = raw_path + 'phishing/'
40 |
41 |
42 | phish_file_name = ''
43 | from os import walk
44 | for _, _, filenames in walk(phish_path):
45 | for filename in filenames:
46 | if filename.startswith(mailFields['s_id']):
47 | phish_file_name = filename
48 | break
49 |
50 | if not phish_file_name:
51 | return
52 |
53 |
54 | links = backend_operations.get_permament_url_info_for_email(mailFields['s_id'])
55 |
56 | has_phishtank = any(map(lambda a: a['InPhishTank'],links))
57 | has_googlesba = any(map(lambda a: a['GoogleSafeBrowsingAPI'],links))
58 |
59 | detected = backend_operations.get_last_seen_date(mailFields['s_id'])
60 | if detected:
61 | detected_str = detected.strftime("%Y-%m-%d %H:%M:%S")
62 | else:
63 | detected_str = 'unknown'
64 |
65 | template_str = """
66 | SHIVA honeypot: suspicious email was caught
67 |
68 | Overview:
69 | Timestamp: ${detected_timestamp|h}
70 | Subject: ${phishing_subject}
71 | Sender: ${phishing_from}
72 | Recipient: ${phishing_to}
73 | Link: http://${web_iterface_url}/view_email?email_id=${email_id}
74 |
75 | % if in_phishtank == True:
76 | Links in PhishTank:
77 | % for link_info in links_info:
78 | % if link_info['InPhishTank'] == True:
79 | ${link_info['raw_link']}
80 | %endif
81 | % endfor
82 | % endif
83 |
84 | % if in_googlesba == True:
85 | Dangerous links in Gogole Safe Browsing API:
86 | % for link_info in links_info:
87 | % if link_info['GoogleSafeBrowsingAPI'] == True:
88 | ${link_info['raw_link']}
89 | %endif
90 | % endfor
91 | % endif
92 |
93 |
94 | """
95 | template = Template(template_str, output_encoding='utf-8', encoding_errors='replace')
96 |
97 | text_message = template.render(phishing_to=mailFields['to'],
98 | phishing_from=mailFields['from'],
99 | phishing_subject=mailFields['subject'],
100 | web_iterface_url=domain_root,
101 | email_id=mailFields['s_id'],
102 | in_phishtank=has_phishtank,
103 | in_googlesba=has_googlesba,
104 | links_info=links,
105 | detected_timestamp=detected_str)
106 |
107 |
108 | textpart = MIMEText(text_message, 'plain', 'utf-8')
109 | textpart['Content-Transfer-Encoding: 8bit']
110 |
111 | phish_file = open(phish_path + phish_file_name, 'rb')
112 |
113 | rfc822part = MIMEMessage(message_from_file(phish_file))
114 | phish_file.close()
115 |
116 | rfc822part['Content-Description'] = 'Original Message'
117 | rfc822part['Content-Disposition'] = 'inline'
118 | msg.attach(textpart)
119 | msg.attach(rfc822part)
120 |
121 |
122 | try:
123 | smtpobj = smtplib.SMTP(smtphost, smtpport)
124 | smtpobj.sendmail(report_from, report_to, msg.as_string())
125 | print "\n\t[+] Phishing notification sent successfully"
126 | except smtplib.SMTPException:
127 | print "\n\t[!] Error: unable to send error phishing notification mail via Exim4"
128 |
129 |
--------------------------------------------------------------------------------
/analyzer/core/trishula/statistics.py:
--------------------------------------------------------------------------------
1 | """
2 | Module generate simple statistics
3 | """
4 | import logging
5 |
6 | import matplotlib.pyplot as plot
7 | import numpy as np
8 |
9 |
10 | import backend_operations
11 |
12 | def prepare_matrix():
13 | """
14 | reads results of learning into database and returns them as a matrix
15 | suitable for further processing
16 |
17 | Method should be called when database is in consistent state.
18 |
19 | Produced matrix has format [M + 1, N + 1]
20 |
21 | M is count of emails in database, first two rows contains rule codes.
22 | Firs column of each of M rows contains derived status (1 for phishing, 0 for spam)
23 |
24 |
25 | Entries [0][0], [1][0] are constant with no practical meaning
26 |
27 | Matrix format:
28 |
29 |
30 | [ '_code' , code1 , code2 ... codeN ]
31 | [ derived_status1 , rule_1_1_result, rule_1_2_result ... rule_1_N_result ]
32 | [ derived_status2 , rule_2_1_result, rule_2_2_result ... rule_2_N_result ]
33 | . . . .
34 | . . . .
35 | . . . .
36 | [ derived_statusM , rule_M_1_result, rule_M_2_result ... rule_M_N_result ]
37 | """
38 | matrix = []
39 |
40 | # indicator of walkthrough
41 | first_loop = True;
42 |
43 | for email_id in backend_operations.get_email_ids():
44 |
45 | email_result = backend_operations.get_results_of_email(email_id)
46 | if 'rules' in email_result:
47 |
48 | #sort rules to ensure same order in all rows of matrix
49 | sorted_rules = sorted(email_result['rules'],key=lambda a: a['code'])
50 |
51 | #add first row into matrix (codes) during first walkthrough
52 | if first_loop:
53 | first_row = ['_derived_result']
54 | first_row.extend(map(lambda a: a['code'], sorted_rules))
55 |
56 | matrix.append(first_row)
57 | first_loop = False
58 |
59 |
60 | # append row of matrix
61 | sorted_resuls_vector = [1] if email_result['derivedStatus'] else [0]
62 | sorted_resuls_vector.extend(map(lambda a: a['result'], sorted_rules))
63 | matrix.append(sorted_resuls_vector)
64 |
65 | # write matrix to file
66 | out_file = open('../../../web/learning_output.csv','w')
67 | if out_file:
68 | for row in matrix:
69 | out_file.write(','.join(map(lambda a: str(a), row)))
70 | out_file.write('\n')
71 | out_file.close()
72 |
73 | return matrix
74 |
75 | def generate_rules_graph(data={}, title='', filename=''):
76 | """
77 | generate graph of rule matching percentage based on email sensors
78 | for input format description, see
79 | """
80 |
81 | color_list = 'rgbcmyk'
82 | color_index = 0
83 | rule_codes = data['_rule_codes']
84 |
85 | for sensor, rule_vals in data.iteritems():
86 | if sensor.startswith('_rule') or sensor.startswith('_total'):
87 | continue
88 | current_color = color_list[color_index % 7]
89 | color_index += 1
90 | sensor_total = data['_total_' + sensor]
91 |
92 | logging.critical(str(rule_vals))
93 |
94 | plot.plot(np.arange(len(rule_vals)) + 0.5, map(lambda a: (a / float(sensor_total)) * 100, rule_vals),
95 | label=sensor + ' ({})'.format(sensor_total),
96 | linestyle='-',
97 | linewidth=0.8,
98 | color=current_color,
99 | markerfacecolor=current_color,
100 | markersize=12,
101 | marker='o',
102 | antialiased=True)
103 |
104 | plot.xticks(np.arange(len(rule_codes)) + 0.5, rule_codes)
105 | plot.legend(loc='upper center', bbox_to_anchor=(0.5,-0.2))
106 |
107 |
108 | plot.grid(True)
109 | plot.xlabel('Rules',fontsize=18)
110 | plot.ylabel('Percentage of matching rules',fontsize=18)
111 |
112 | final_title = 'Statistics of rules matching by source of email\n'
113 | if title:
114 | final_title = title + "\n"
115 | plot.title(final_title, fontsize=20)
116 |
117 | final_filename = 'rules_graph.png'
118 | if filename:
119 | final_filename = filename
120 |
121 | fig = plot.gcf()
122 | fig.set_size_inches(15, 8)
123 | plot.savefig('../../../web/images/' + final_filename, bbox_inches='tight')
124 | plot.close()
125 |
126 | def generate_roc_graph(data=[]):
127 | """
128 | generate ROC curve of detection True positive/False negative rate
129 | """
130 | from sklearn import metrics
131 |
132 | if not data:
133 | return
134 |
135 | shiva_score_probs = map(lambda a: a[0], data)
136 | spamass_score_probs = map(lambda a: a[1], data)
137 | derived_results = map(lambda a: a[2], data)
138 |
139 | fpr_shiva, tpr_shiva, _ = metrics.roc_curve(derived_results, shiva_score_probs, pos_label=1)
140 | fpr_spamass, tpr_spamass, _= metrics.roc_curve(derived_results, spamass_score_probs, pos_label=1)
141 |
142 | roc_auc_shiva = metrics.auc(fpr_shiva, tpr_shiva)
143 | roc_auc_spamass = metrics.auc(fpr_spamass, tpr_spamass)
144 |
145 | plot.figure()
146 | plot.plot(fpr_shiva, tpr_shiva, label='ROC curve SHIVA (area = %0.2f)' % roc_auc_shiva)
147 | plot.plot(fpr_spamass, tpr_spamass, label='ROC curve spamassassin (area = %0.2f)' % roc_auc_spamass)
148 | plot.plot([0, 1], [0, 1], 'k--')
149 | plot.xlim([0.0, 1.0])
150 | plot.ylim([0.0, 1.05])
151 | plot.xlabel('False Positive Rate')
152 | plot.ylabel('True Positive Rate')
153 | plot.title('Shiva honeypot classification ROC')
154 | plot.legend(loc="lower right")
155 | plot.savefig('../../../web/images/roc_graph.png', bbox_inches='tight')
156 | plot.close()
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
--------------------------------------------------------------------------------
/analyzer/core/trishula/web.py:
--------------------------------------------------------------------------------
1 | import cherrypy
2 | import datetime
3 | import threading
4 | import time
5 | import subprocess
6 | import os
7 |
8 |
9 | from mako.template import Template
10 | from mako.lookup import TemplateLookup
11 |
12 | import lamson.server
13 |
14 | import backend_operations
15 | import learning
16 | import statistics
17 |
18 | import logging
19 |
20 |
21 |
22 | class WebServer():
23 |
24 | def __init__(self, in_params):
25 | self.startup_time = in_params['startup_time'] if in_params['startup_time'] else None
26 | self.attachmentsPath = in_params['attachmentsPath']
27 | self.rawHtmlPath = in_params['rawHtmlPath']
28 | self.honypotLogFile = in_params['honeypot_log_file']
29 | self.template_lookup = TemplateLookup(directories=[in_params['templates_root']])
30 |
31 | # index page
32 | @cherrypy.expose
33 | def index(self):
34 | return self.index_template()
35 |
36 | # view email page
37 | @cherrypy.expose
38 | def view_email(self,email_id = ''):
39 | return self.email_detail_template(email_id)
40 |
41 | # go throught all emails
42 | @cherrypy.expose
43 | def list_emails(self,start=0,count=30):
44 | return self.list_emails_template(start, count)
45 |
46 | # learning page
47 | @cherrypy.expose
48 | def learning(self):
49 | return self.learning_template(backend_operations.get_learning_overview())
50 |
51 | # logs accessibility
52 | @cherrypy.expose
53 | def logs(self):
54 | return self.log_file_template()
55 | # help page
56 | @cherrypy.expose
57 | def help(self):
58 | return self.help_template()
59 |
60 | # honeypot manipulation
61 |
62 | @cherrypy.expose
63 | def delete_email(self,email_id = ''):
64 | backend_operations.delete_spam(email_id)
65 |
66 | @cherrypy.expose
67 | def mark_as_phishing(self,email_id = ''):
68 | backend_operations.mark_as_phishing(email_id)
69 |
70 | @cherrypy.expose
71 | def mark_as_spam(self,email_id = ''):
72 | backend_operations.mark_as_spam(email_id)
73 |
74 | @cherrypy.expose
75 | def relearn(self):
76 | learning.learn()
77 | raise cherrypy.HTTPRedirect("/stats")
78 |
79 | @cherrypy.expose
80 | def stats(self):
81 | statistics.generate_rules_graph(backend_operations.get_global_results_for_statistics(),
82 | title='Statistics of rules matching by email class',
83 | filename='global_rules_graph.png')
84 | statistics.generate_rules_graph(backend_operations.get_rule_results_for_statistics(),
85 | title='Statistics of rules matching by source of email',
86 | filename='source_rules_graph.png'
87 | )
88 | # statistics.generate_roc_graph((backend_operations.get_data_for_roc_curves()))
89 | raise cherrypy.HTTPRedirect("/learning")
90 |
91 |
92 |
93 |
94 |
95 | # templates ====================================================================
96 |
97 | def index_template(self):
98 | title='SHIVA honeypot: mainpage'
99 | start = 0
100 | count = 10
101 | overview_list=backend_operations.get_overview(start,count)
102 | learning_overview_list=backend_operations.get_learning_overview(5)
103 |
104 | total_mails = backend_operations.get_mail_count()
105 | today_mails = backend_operations.get_mail_count_for_date(datetime.date.today(), datetime.date.today() + datetime.timedelta(days=1))
106 |
107 | uptime_str = 'uknown'
108 | uptime = time.time() - self.startup_time if self.startup_time else 0
109 | if uptime > 0:
110 | days, remainder = divmod(uptime, 24 * 60 * 60)
111 | hours, remainder = divmod(remainder, 60 * 60)
112 | minutes, _ = divmod(remainder, 60)
113 | uptime_str = "{:.0f} days {:.0f} hours {:.0f} minutes".format(days,hours,minutes)
114 |
115 |
116 | template = Template('<%include file="index.html"/>', lookup=self.template_lookup, output_encoding='utf-8', encoding_errors='replace')
117 | return template.render(title=title, overview_list=overview_list, start=start,count=count,report_overview=learning_overview_list, uptime=uptime_str, total_mails=total_mails, today_mails=today_mails)
118 |
119 |
120 |
121 | def overview_template(self, overview_list, title, start=0, count=10):
122 | template = Template('<%include file="overview.html"/>', lookup=self.template_lookup, output_encoding='utf-8', encoding_errors='replace')
123 | return template.render(headline=title, title=title, overview_list=overview_list, start=start, count=count)
124 |
125 |
126 | def email_detail_template(self, email_id=''):
127 | title='SHIVA honeypot: view email'
128 |
129 | emails = backend_operations.retrieve_by_ids([email_id])
130 |
131 | # display error message and terminate
132 | if not emails:
133 | template = Template('<%include file="view_email.html"/>', lookup=self.template_lookup, output_encoding='utf-8', encoding_errors='replace')
134 | return template.render(title=title)
135 |
136 | mailFields = emails[0]
137 |
138 |
139 |
140 | if mailFields:
141 | # store html content to static file if it doesn't exist
142 | staticHtmlFile = self.rawHtmlPath + '/' + email_id
143 |
144 | if not os.path.exists(staticHtmlFile):
145 | f = open(staticHtmlFile, 'w')
146 | if f:
147 | f.write(mailFields['html'].encode('utf8'))
148 | f.close()
149 | else:
150 | staticHtmlFile = ''
151 |
152 | email_result = backend_operations.get_results_of_email(mailFields['s_id'])
153 | template = Template('<%include file="view_email.html"/>', lookup=self.template_lookup, output_encoding='utf-8', encoding_errors='replace')
154 | return template.render(title=title, email_result=email_result, mailFields=mailFields, attachmentsPath=self.attachmentsPath,staticHtmlFile=staticHtmlFile)
155 |
156 |
157 | def learning_template(self,report_overview=[]):
158 | template = Template('<%include file="learning.html"/>', lookup=self.template_lookup, output_encoding='utf-8', encoding_errors='replace')
159 | return template.render(title='SHIVA honeypot: learning',report_overview=report_overview)
160 |
161 |
162 | def log_file_template(self):
163 | log_lines = []
164 | try:
165 | out = subprocess.check_output(['tail', '-n', '100', self.honypotLogFile])
166 | for o in out.splitlines():
167 | log_lines.append(o)
168 | except subprocess.CalledProcessError:
169 | pass
170 |
171 | template = Template('<%include file="logs.html"/>', lookup=self.template_lookup)
172 | return template.render(headline="SHIVA honeypot: log file view", title="SHIVA honeypot: log file view", rows=log_lines)
173 |
174 |
175 | def list_emails_template(self,start=0,count=30):
176 | title='SHIVA honeypot: list emails'
177 | headline_title = 'SHIVA honeypot: list {0} emails starting from {1}'.format(start,count)
178 |
179 | overview_list=backend_operations.get_overview(start,count)
180 | total = backend_operations.get_mail_count()
181 |
182 | template = Template('<%include file="list_emails.html"/>', lookup=self.template_lookup)
183 | return template.render(headline=headline_title, title=title, overview_list=overview_list, total=int(total), start=int(start), count=int(count))
184 |
185 | def help_template(self,report_overview=[]):
186 | template = Template('<%include file="help.html"/>', lookup=self.template_lookup, output_encoding='utf-8', encoding_errors='replace')
187 | return template.render(title='SHIVA honeypot: help')
188 |
189 |
190 | def error_page_401(status, message, traceback, version):
191 | return '401 UNAUTHORIZED ACCESS '
192 |
193 | # configuration ================================================================
194 |
195 | def prepare_http_server():
196 | staticRoot = os.path.dirname(os.path.realpath(__file__)) + "/../../../../../../../"
197 | attachmentsPath = '/shiva/attachments'
198 | rawHtmlPath = '/shiva/raw_html'
199 |
200 |
201 | web_interface_address = '127.0.0.1'
202 | web_interface_port = '8080'
203 | web_bind_config = lamson.server.shivaconf.get('web', 'address')
204 | auth_login = lamson.server.shivaconf.get('web','username')
205 | auth_pass = lamson.server.shivaconf.get('web','password')
206 |
207 | if web_bind_config:
208 | web_interface_address, web_interface_port = web_bind_config.split(':')
209 |
210 | in_params = {'startup_time' : time.time(),
211 | 'attachmentsPath' : attachmentsPath,
212 | 'rawHtmlPath' : staticRoot + rawHtmlPath,
213 | 'templates_root' : staticRoot + 'web/templates/'
214 | }
215 | cherrypy.config.update({'server.socket_host': web_interface_address,
216 | 'server.socket_port': int(web_interface_port),
217 | })
218 | cherrypy.config.update({'error_page.401': error_page_401})
219 |
220 | checkpassword = cherrypy.lib.auth_basic.checkpassword_dict({auth_login : auth_pass,})
221 |
222 | conf = {
223 | '/': {
224 | 'tools.sessions.on': True,
225 | 'tools.staticdir.root': staticRoot,
226 | 'tools.auth_basic.on': True,
227 | 'tools.auth_basic.realm': 'web interface',
228 | 'tools.auth_basic.checkpassword': checkpassword,
229 | },
230 | '/static': {
231 | 'tools.staticdir.on': True,
232 | 'tools.staticdir.dir': './web/'
233 | },
234 | '/attachments': {
235 | 'tools.staticdir.on': True,
236 | 'tools.staticdir.dir': '.' + attachmentsPath
237 | },
238 | '/raw_html': {
239 | 'tools.staticdir.on': True,
240 | 'tools.staticdir.dir': '.' + rawHtmlPath
241 | },
242 |
243 | '/favicon.ico': {
244 | 'tools.staticfile.on': True,
245 | 'tools.staticfile.filename': staticRoot + 'web/favicon.png'
246 | }
247 | }
248 |
249 | log_dir = os.path.dirname(os.path.realpath(__file__)) + "/../../../../../analyzer/logs/"
250 | if not os.path.isdir(log_dir):
251 | logging.warn("Logging directory doesn't exist, using /tmp/")
252 | log_dir = '/tmp/'
253 |
254 | access_log_path = log_dir + "web_access.log"
255 | error_log_path = log_dir + "web_error.log"
256 |
257 | if not os.path.exists(access_log_path):
258 | open(access_log_path, 'a').close()
259 |
260 | if not os.path.exists(error_log_path):
261 | open(error_log_path, 'a').close()
262 |
263 | cherrypy.log.screen = False
264 | cherrypy.log.error_log.propagate = False
265 | cherrypy.log.access_log.propagate = False
266 | cherrypy.log.error_file = error_log_path
267 | cherrypy.log.access_file = access_log_path
268 |
269 | cherrypy._cprequest.Response.timeout = 600
270 |
271 | in_params['honeypot_log_file'] = log_dir + 'lamson.log'
272 | cherrypy.quickstart(WebServer(in_params),'/',conf)
273 |
274 | def main():
275 | """ Start web server in new thread"""
276 | t = threading.Thread(target=prepare_http_server)
277 | t.start()
278 |
279 |
280 |
--------------------------------------------------------------------------------
/docs/AUTHORS:
--------------------------------------------------------------------------------
1 | Authors
2 |
3 | Sumit Sharma Lead Developer sumit.iips@gmail.com
4 | Rahul Binjve Developer @RahulBinjve
5 |
--------------------------------------------------------------------------------
/docs/User Manual.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/docs/User Manual.pdf
--------------------------------------------------------------------------------
/helpers/clearlogs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | > lamson.err
3 | > lamson.log
4 | > lamson.out
5 | > logger.log
6 | > web_access.log
7 | > web_error.log
8 | > maindb.log
9 | echo
10 | ls -l
11 |
12 |
--------------------------------------------------------------------------------
/helpers/dbcreate.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import ConfigParser
4 |
5 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../shiva/shiva.conf"
6 | print "confpath: ", confpath
7 | shivaconf = ConfigParser.ConfigParser()
8 | shivaconf.read(confpath)
9 |
10 | HOST = shivaconf.get('database', 'host')
11 | USER = shivaconf.get('database', 'user')
12 | PASSWD = shivaconf.get('database', 'password')
13 |
14 | def tempdb():
15 | try:
16 | os.system("mysql -h " + HOST + " -u " + USER + " --password=" + PASSWD + " < tempdb.sql")
17 | print "Temporary database created."
18 | except Exception, e:
19 | print e
20 | sys.exit(1)
21 |
22 | def maindb():
23 | try:
24 | os.system("mysql -h " + HOST + " -u " + USER + " --password=" + PASSWD + " < maindb.sql")
25 | print "Main database created."
26 | except Exception, e:
27 | print e
28 | sys.exit(1)
29 |
30 | if __name__ == '__main__':
31 | tempdb()
32 | maindb()
33 |
--------------------------------------------------------------------------------
/helpers/honeypot.service:
--------------------------------------------------------------------------------
1 | # sample systemd service for honeypot
2 |
3 | [Unit]
4 | Description=Shiva email honeypot with analyzer
5 |
6 | #dhclient is called from /etc/rc.local
7 | #at this point, we interface have assigned address
8 | After=rc-local.service
9 |
10 | #services required by analyzer
11 | After=spamassassin.service
12 | After=mysql.service
13 |
14 |
15 | [Service]
16 | Type=oneshot
17 | User=SHIVA_USER
18 | ExecStart=INSTALL_PATH/shiva/honeypot.sh start
19 | ExecStop=/etc/init.d/honeypot.sh stop
20 | RemainAfterExit=yes
21 |
22 | [Install]
23 | WantedBy=multi-user.target
24 |
--------------------------------------------------------------------------------
/helpers/honeypot.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # init script for honeypot startup and shutdown
4 | #
5 | # if listen_interface and web_interface are given, ip on interfaces is detected and stored into 'shiva.conf'
6 | #
7 | listen_interface=""
8 | web_interface=""
9 |
10 |
11 | if [ "$UID" == "0" ] || [ "$EUID" == "0" ]
12 | then
13 | printf "\n[!] Do not run shiva as root.\n\n"
14 | exit 1
15 | fi
16 |
17 | base_dir=INSTALL_PATH/shiva
18 | reciever_dir=$base_dir/shivaReceiver/receiver
19 | analyzer_dir=$base_dir/shivaAnalyzer/analyzer
20 |
21 | listen_address=""
22 | web_address=""
23 |
24 |
25 | tmp_address=$(ip address show $listen_interface | grep inet\ | awk '{print $2}' | sed 's/\/.*//g')
26 | if [[ $tmp_address =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
27 | listen_address=$tmp_address
28 | fi
29 |
30 | tmp_address=$(ip address show $web_interface | grep inet\ | awk '{print $2}' | sed 's/\/.*//g')
31 | if [[ $tmp_address =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
32 | web_address=$tmp_address
33 | fi
34 |
35 |
36 | #replace 'listenhost' directive
37 | if [ ! -z $listen_address ]; then
38 | sed -r -i "s/listenhost[[:space:]+]:[[:space:]+][0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/listenhost\ :\ $listen_address/g" $base_dir/shiva.conf
39 | echo "Assigning IP address for honeypot: " $listen_address
40 | fi
41 |
42 | #replace 'web address' directive
43 | if [ ! -z $web_address ]; then
44 | sed -r -i "s/address[[:space:]+]:[[:space:]+][0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/address\ :\ $web_address/g" $base_dir/shiva.conf
45 | echo "Assigning IP address for web interface: "$web_address
46 | fi
47 |
48 | stop() {
49 | cd $reciever_dir
50 | source ../bin/activate
51 | if [ -f run/smtp.pid ] && kill -0 `cat run/smtp.pid`; then
52 | lamson stop 2> /dev/null
53 | fi
54 |
55 | cd $analyzer_dir
56 | source ../bin/activate
57 | if [ -f run/smtp.pid ] && kill -0 `cat run/smtp.pid`; then
58 | lamson stop 2> /dev/null
59 | fi
60 | }
61 |
62 | start() {
63 | cd $reciever_dir
64 | source ../bin/activate
65 | if [ -f run/smtp.pid ] && kill -0 `cat run/smtp.pid`; then
66 | echo "Reciverer is already running"
67 | else
68 | echo "Starting reciever..."
69 | lamson start -FORCE
70 | fi
71 |
72 | cd $analyzer_dir
73 | source ../bin/activate
74 | if [ -f run/smtp.pid ] && kill -0 `cat run/smtp.pid`; then
75 | echo "Analyzer is already running"
76 | else
77 | echo "Starting analyzer..."
78 | lamson start -FORCE
79 | fi
80 | }
81 |
82 | case "$1" in
83 | start)
84 | start
85 | ;;
86 |
87 | stop)
88 | stop
89 | ;;
90 |
91 | restart)
92 | stop
93 | start
94 | ;;
95 |
96 | esac
97 |
98 | exit 0
99 |
--------------------------------------------------------------------------------
/helpers/maindb.sql:
--------------------------------------------------------------------------------
1 | -- SQL file for main database.
2 |
3 | SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO";
4 | SET time_zone = "+00:00";
5 |
6 |
7 | --
8 | -- Database: `Shiva`
9 | --
10 |
11 | -- --------------------------------------------------------
12 |
13 | -- Creating database and using it
14 |
15 | CREATE DATABASE `Shiva` COLLATE=utf8mb4_unicode_ci;
16 | USE `Shiva`;
17 |
18 | --
19 | -- Table structure for table `attachment`
20 | --
21 |
22 | CREATE TABLE IF NOT EXISTS `attachment` (
23 | `id` int(11) NOT NULL AUTO_INCREMENT,
24 | `date` date NOT NULL,
25 | `md5` char(32) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
26 | `attachment_file_name` varchar(200) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
27 | `attachment_file_path` mediumtext NOT NULL,
28 | `attachment_file_type` varchar(50) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
29 | `spam_id` char(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
30 | PRIMARY KEY (`id`),
31 | KEY `spam_id` (`spam_id`),
32 | KEY `md5` (`md5`),
33 | KEY `attachment_file_name` (`attachment_file_name`)
34 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci AUTO_INCREMENT=1 ;
35 |
36 | -- --------------------------------------------------------
37 |
38 | --
39 | -- Table structure for table `inline`
40 | --
41 |
42 | CREATE TABLE IF NOT EXISTS `inline` (
43 | `id` int(11) NOT NULL AUTO_INCREMENT,
44 | `date` date NOT NULL,
45 | `md5` char(32) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
46 | `inline_file_name` varchar(200) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
47 | `inline_file_path` mediumtext NOT NULL,
48 | `spam_id` char(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
49 | PRIMARY KEY (`id`),
50 | KEY `spam_id` (`spam_id`),
51 | KEY `md5` (`md5`),
52 | KEY `inline_file_name` (`inline_file_name`)
53 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci AUTO_INCREMENT=1 ;
54 |
55 | -- --------------------------------------------------------
56 |
57 | --
58 | -- Table structure for table `ip`
59 | --
60 |
61 | CREATE TABLE IF NOT EXISTS `ip` (
62 | `id` int(11) NOT NULL AUTO_INCREMENT,
63 | `date` date NOT NULL,
64 | `sourceIP` varchar(16) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
65 | PRIMARY KEY (`id`),
66 | KEY `sourceIP` (`sourceIP`),
67 | KEY `date` (`date`)
68 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
69 |
70 | -- --------------------------------------------------------
71 |
72 | --
73 | -- Table structure for table `ip_spam`
74 | --
75 |
76 | CREATE TABLE IF NOT EXISTS `ip_spam` (
77 | `id` int(11) NOT NULL AUTO_INCREMENT,
78 | `ip_id` int(11) NOT NULL,
79 | `spam_id` char(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
80 | PRIMARY KEY (`id`),
81 | KEY `ip_id` (`ip_id`),
82 | KEY `spam_id` (`spam_id`)
83 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
84 |
85 | -- --------------------------------------------------------
86 |
87 | --
88 | -- Table structure for table `links`
89 | --
90 |
91 | CREATE TABLE IF NOT EXISTS `links` (
92 | `id` int(11) NOT NULL AUTO_INCREMENT,
93 | `date` date NOT NULL,
94 | `hyperLink` varchar(1000) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
95 | `spam_id` char(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
96 | PRIMARY KEY (`id`),
97 | KEY `spam_id` (`spam_id`),
98 | KEY `hyperLink` (`hyperLink`),
99 | KEY `date` (`date`)
100 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
101 |
102 |
103 |
104 | -- --------------------------------------------------------
105 |
106 | --
107 | -- Table structure for table `permamentlinkdetails`
108 | --
109 |
110 | CREATE TABLE IF NOT EXISTS `permamentlinkdetails` (
111 | `id` int(11) NOT NULL AUTO_INCREMENT,
112 | `hyperLink` varchar(1000) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
113 | `date` datetime NOT NULL,
114 | `longHyperLink` varchar(1000) CHARACTER SET utf8 COLLATE utf8_unicode_ci,
115 | `redirectCount` int(3) DEFAULT -1,
116 | `googlePageRank` int(3) DEFAULT -1,
117 | `alexaTrafficRank` int(10) DEFAULT -1,
118 | `inPhishTank` BOOL DEFAULT FALSE,
119 | `googleSafeAPI` BOOL DEFAULT FALSE,
120 | PRIMARY KEY (`id`),
121 | KEY `hyperLink` (`hyperLink`)
122 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
123 |
124 |
125 | -- --------------------------------------------------------
126 |
127 | --
128 | -- Table structure for table `learning results`
129 | --
130 |
131 | CREATE TABLE IF NOT EXISTS `learningresults` (
132 | `id` int(11) NOT NULL AUTO_INCREMENT,
133 | `ruleId` int(11) NOT NULL,
134 | `spamId` char(32) NOT NULL,
135 | `result` int(11) NOT NULL COMMENT 'result of the rule',
136 | PRIMARY KEY (`id`)
137 | ) ENGINE=InnoDB AUTO_INCREMENT=1 ;
138 |
139 |
140 | -- --------------------------------------------------------
141 |
142 | -- --------------------------------------------------------
143 |
144 | --
145 | -- Table structure for table `relay`
146 | --
147 |
148 | CREATE TABLE IF NOT EXISTS `relay` (
149 | `id` int(11) NOT NULL AUTO_INCREMENT,
150 | `date` date NOT NULL,
151 | `firstRelayed` datetime NOT NULL COMMENT 'date of first relay',
152 | `lastRelayed` datetime NOT NULL COMMENT 'date of last relay',
153 | `totalRelayed` int(11) NOT NULL DEFAULT '0' COMMENT 'Total Mails Relayed Till Date',
154 | `spam_id` char(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
155 | `sensorID` varchar(50) NOT NULL,
156 | PRIMARY KEY (`id`),
157 | KEY `spam_id` (`spam_id`),
158 | KEY `sensorID` (`sensorID`)
159 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
160 |
161 | -- --------------------------------------------------------
162 |
163 | --
164 | -- Table structure for table `sdate`
165 | --
166 |
167 | CREATE TABLE IF NOT EXISTS `sdate` (
168 | `id` int(11) NOT NULL AUTO_INCREMENT,
169 | `date` date NOT NULL,
170 | `firstSeen` datetime NOT NULL COMMENT 'First Occurance of Spam',
171 | `lastSeen` datetime NOT NULL COMMENT 'Last Occurance of Spam',
172 | `todaysCounter` int(11) NOT NULL,
173 | PRIMARY KEY (`id`),
174 | KEY `firstSeen` (`firstSeen`),
175 | KEY `lastSeen` (`lastSeen`),
176 | KEY `date` (`date`)
177 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
178 |
179 | -- --------------------------------------------------------
180 |
181 | --
182 | -- Table structure for table `sdate_spam`
183 | --
184 |
185 | CREATE TABLE IF NOT EXISTS `sdate_spam` (
186 | `id` int(11) NOT NULL AUTO_INCREMENT,
187 | `spam_id` char(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
188 | `date_id` int(11) NOT NULL,
189 | PRIMARY KEY (`id`),
190 | KEY `spam_id` (`spam_id`),
191 | KEY `date_id` (`date_id`)
192 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
193 |
194 | -- --------------------------------------------------------
195 |
196 | --
197 | -- Table structure for table `sensor`
198 | --
199 |
200 | CREATE TABLE IF NOT EXISTS `sensor` (
201 | `id` int(11) NOT NULL AUTO_INCREMENT,
202 | `date` date NOT NULL,
203 | `sensorID` varchar(50) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Shiva sensor id',
204 | PRIMARY KEY (`id`),
205 | KEY `sensorID` (`sensorID`)
206 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
207 |
208 | -- --------------------------------------------------------
209 |
210 | --
211 | -- Table structure for table `sensor_spam`
212 | --
213 |
214 | CREATE TABLE IF NOT EXISTS `sensor_spam` (
215 | `id` int(11) NOT NULL AUTO_INCREMENT,
216 | `sensor_id` int(11) NOT NULL,
217 | `spam_id` char(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
218 | PRIMARY KEY (`id`),
219 | KEY `sensor_id` (`sensor_id`),
220 | KEY `spam_id` (`spam_id`)
221 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
222 |
223 | -- --------------------------------------------------------
224 |
225 | --
226 | -- Table structure for table `spam`
227 | --
228 |
229 | CREATE TABLE IF NOT EXISTS `spam` (
230 | `id` char(32) NOT NULL COMMENT 'Md5 of combination of fields',
231 | `from` varchar(200) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
232 | `subject` varchar(200) CHARACTER SET utf8 COLLATE utf8_unicode_ci DEFAULT NULL,
233 | `to` longtext CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
234 | `textMessage` mediumtext CHARACTER SET utf8 COLLATE utf8_unicode_ci COMMENT 'body of spam in text format',
235 | `htmlMessage` mediumtext CHARACTER SET utf8 COLLATE utf8_unicode_ci COMMENT 'body of spam in html format',
236 | `totalCounter` int(11) NOT NULL COMMENT 'total count of spam till date',
237 | `ssdeep` varchar(120) DEFAULT NULL COMMENT 'SSDeep hash of the mail',
238 | `headers` text NOT NULL COMMENT 'Header of Spam',
239 | `length` int(11) NOT NULL COMMENT 'Length of the spam',
240 | `shivaScore` float DEFAULT -1.0 NOT NULL COMMENT 'computed phishing score',
241 | `spamassassinScore` float DEFAULT -1.0 NOT NULL COMMENT 'spamassassin Bayes phishing score',
242 | `phishingHumanCheck` BOOL COMMENT 'messaged marked as phishing by human',
243 | `derivedPhishingStatus` BOOL DEFAULT NULL COMMENT 'status computed for message: NULL - not assigned, true - phishing, false - spam',
244 | `urlPhishing` BOOL DEFAULT FALSE COMMENT 'indicator whether some form of direct URL phishing was found',
245 | PRIMARY KEY (`id`),
246 | KEY `subject` (`subject`),
247 | KEY `totalCounter` (`totalCounter`),
248 | KEY `headers` (`headers`(767)),
249 | KEY `textMessage` (`textMessage`(255)),
250 | KEY `htmlMessage` (`htmlMessage`(255))
251 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci;
252 |
253 | -- TODO triger on update phishingHumanCheck
254 |
255 | -- --------------------------------------------------------
256 |
257 | --
258 | -- Table structure for table `whitelist`
259 | --
260 |
261 | CREATE TABLE IF NOT EXISTS `whitelist` (
262 | `id` int(11) NOT NULL AUTO_INCREMENT,
263 | `recipients` mediumtext CHARACTER SET utf8 COLLATE utf8_unicode_ci DEFAULT NULL,
264 | PRIMARY KEY (`id`)
265 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci AUTO_INCREMENT=1 ;
266 |
267 | -- --------------------------------------------------------
268 |
269 | --
270 | -- Table structure for table `learning state`
271 | --
272 |
273 | CREATE TABLE IF NOT EXISTS `learningreport` (
274 | `id` int(11) NOT NULL AUTO_INCREMENT,
275 | `learningDate` datetime NOT NULL,
276 | `learningMailCount` int(10) NOT NULL,
277 | `spamassassinStatus` char(10),
278 | `shivaStatus` char(10),
279 | `shiva_threshold` float DEFAULT 0.5 NOT NULL,
280 | `sa_threshold` float DEFAULT 0.5 NOT NULL,
281 | PRIMARY KEY (`id`)
282 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci AUTO_INCREMENT=1 ;
283 |
284 | -- --------------------------------------------------------
285 |
286 |
287 | -- --------------------------------------------------------
288 |
289 | --
290 | -- Table structure for table `rules`
291 | --
292 |
293 | CREATE TABLE IF NOT EXISTS `rules` (
294 | `id` int(11) NOT NULL AUTO_INCREMENT,
295 | `code` char(10) NOT NULL,
296 | `description` mediumtext CHARACTER SET utf8 COLLATE utf8_unicode_ci COMMENT 'description of the rule',
297 | PRIMARY KEY (`id`)
298 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci AUTO_INCREMENT=1 ;
299 |
300 |
301 | -- --------------------------------------------------------
302 |
303 | -- --------------------------------------------------------
304 |
305 | --
306 | -- Table structure for table `learning results`
307 | --
308 |
309 | CREATE TABLE IF NOT EXISTS `learningresults` (
310 | `id` int(11) NOT NULL AUTO_INCREMENT,
311 | `ruleId` int(11) NOT NULL,
312 | `spamId` char(32) NOT NULL,
313 | `result` int(11) NOT NULL COMMENT 'result of the rule',
314 | PRIMARY KEY (`id`)
315 | ) ENGINE=InnoDB AUTO_INCREMENT=1 ;
316 |
317 |
318 |
319 | -- --------------------------------------------------------
320 |
321 | --
322 | -- views used for statistics computations
323 | --
324 | CREATE OR REPLACE VIEW rules_overview_view AS
325 | SELECT r.code,SUBSTRING_INDEX(se.sensorID,',',1) as `SensorID`,sum(if(lr.result <= 0,0,1)) as result
326 | FROM spam s
327 | INNER JOIN learningresults lr on s.id = lr.spamId
328 | INNER JOIN rules r on r.id = lr.ruleId
329 | INNER JOIN sensor_spam sse on s.id = sse.spam_id
330 | INNER JOIN sensor se on sse.sensor_id = se.id
331 | GROUP BY SUBSTRING_INDEX(se.sensorID,',',1),r.code,r.description;
332 |
333 |
334 | CREATE OR REPLACE VIEW global_overview_view AS
335 | SELECT r.code, if(s.derivedPhishingStatus = 1, 'phishing', 'spam') as `type`,sum(if(lr.result <= 0,0,1)) as result
336 | FROM spam s
337 | INNER JOIN learningresults lr on s.id = lr.spamId
338 | INNER JOIN rules r on r.id = lr.ruleId
339 | GROUP BY s.derivedPhishingStatus,r.code,r.description;
340 |
341 |
342 | -- ---------------------------------------------------------
343 | --
344 | -- view used for rule integrity checking
345 | -- if any line is returned, there is inconsistency in stored rules result
346 | -- and deep relearning must be performed
347 | -- Inconsitency can be caused by adding new rules to honeypot without deep relearn
348 | --
349 | CREATE OR REPLACE VIEW ruleresults_integrity_check_view AS
350 | SELECT spamId
351 | FROM learningresults
352 | GROUP BY spamId
353 | HAVING (select count(*) from rules) <> count(ruleId)
354 | OR (select count(*) from rules) <> count(distinct ruleId)
355 | UNION
356 | SELECT id FROM spam WHERE id NOT IN (SELECT spamId FROM learningresults)
357 | UNION
358 | SELECT spamId FROM learningresults WHERE spamId NOT IN (SELECT id FROM spam);
359 |
360 |
361 |
362 | -- ---------------------------------------------------------
363 | --
364 | -- view used for detection thresholds calculation
365 | -- only email NOT mathing regex (spam|phishing)Import are considered
366 | -- since there is no point of of considering then in computing of
367 | -- detection effectivness
368 | --
369 | CREATE OR REPLACE VIEW email_classification_view AS
370 | SELECT s.shivaScore, s.spamassassinScore, s.derivedPhishingStatus, s.phishingHumanCheck
371 | FROM sensor se
372 | INNER JOIN sensor_spam ss ON ss.sensor_id = se.id
373 | INNER JOIN spam s ON s.id = ss.spam_id
374 | WHERE sensorID NOT regexp '.*(spam|phishing)Import.*';
375 |
--------------------------------------------------------------------------------
/helpers/restart_receiver.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | source ../bin/activate
3 |
4 | # There is a bug at the receiver end, which is yet to be fixed. Probably lamson doesn't breaks the network connections after they are done.
5 | # Hence, gradually the total number of connections in "established" state reaches ~1020 and system stops accepting connections at TCP port 25.
6 | # Following is a dirty fix to restart "receiver" every xx minutes to clear connections
7 | restart_receiver () {
8 |
9 | lamson stop -ALL run
10 | sleep 30
11 |
12 | #cd logs
13 | #bash clearlogs.sh
14 | #cd ..
15 |
16 | echo -e "\n\t[+] scheduler script stopped SHIVA-receiver"
17 | echo -en "\t[+] number of established connection before stopping receiver: "
18 | netstat -natp | grep -i estab | wc -l
19 | echo -e "\t[+] attempting to restart receiver....."
20 |
21 | lamson start -FORCE
22 |
23 | if [ $? != 0 ]; then
24 | echo -e "\t[-] Error: Most probably TCP port 25 is engaged...sleeping for 30 seconds before reattempting\n"
25 | sleep 30
26 | restart_receiver
27 | fi
28 | }
29 |
30 | restart_receiver
--------------------------------------------------------------------------------
/helpers/setup_exim4.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | #step 1
4 | sed -i -e '32a\daemon_smtp_ports=2500' /etc/exim4/exim4.conf.template
5 |
6 | #step 2
7 | sed -i s/dc_eximconfig_configtype=\'local\'/dc_eximconfig_configtype=\'internet\'/ /etc/exim4/update-exim4.conf.conf
8 |
9 | #step 3
10 | service exim4 restart
11 |
--------------------------------------------------------------------------------
/helpers/shiva.conf:
--------------------------------------------------------------------------------
1 | # Shiva Global Configuration File
2 |
3 |
4 | [global]
5 | # Path for Receiver to dump mails and Analyzer to retrieve spams from
6 | queuepath : somepath
7 |
8 |
9 | [receiver]
10 | # Host to listen on for incoming spams. It could be NATed or Public IP
11 | listenhost : 127.0.0.1
12 |
13 | # Port for running SMTP receiver (integer value only)
14 | listenport : 2525
15 |
16 | # Name of the sensor
17 | sensorname : shiva
18 |
19 | # Boolean value to enable/disable SMTP Authentication. (True/False)
20 | authenabled : False
21 |
22 | # Username(s) for SMTP AUTH. Combination of user1:pass1, user2:pass2 .... userN:passN would work
23 | # e.g. smtpuser : user1,user2,user3,userN (no space, just comma separated)
24 | smtpuser : user
25 |
26 | # Password(s) for SMTP AUTH
27 | # e.g. smtppasswd : pass1,pass2,pass3,passN (no space, just comma separated)
28 | smtppasswd : password
29 |
30 |
31 | [analyzer]
32 | # Enable/disable relaying of spams. Beware! Do not disable relaying and do not relay too much
33 | relay : False
34 |
35 | # Number of times an individual spam is to be relayed (integer)
36 | individualcounter : 30
37 |
38 | # Number of total spams to be relayed in a specific time (integer value only)
39 | globalcounter : 120
40 |
41 | # The port on which exim is listening (integer value only)
42 | relayport : 2500
43 |
44 | # Host on which exim is listening
45 | relayhost : 127.0.0.1
46 |
47 | # Path where distorted spam(s), which could not be analyzed, would be dumped
48 | undeliverable_path : somepath
49 |
50 | # Duration (in minutes) to be passed to shivascheduler (integer value only)
51 | schedulertime : 120
52 |
53 | # Path where raw spam samples would be dumped
54 | rawspampath : somepath
55 |
56 | # Path to dump attachments
57 | attachpath : somepath
58 |
59 | # Path to dump inline attachments
60 | inlinepath : somepath
61 |
62 | # PhishTank api key
63 | phishtank_api_key :
64 |
65 | # Google Safe Browsing API key :
66 | google_safe_browsing_api_key :
67 |
68 | # send phishing reports via email? (True/False)
69 | send_phishing_report : False
70 |
71 | # phishing report sender address (email address)
72 | phishing_report_from : shiva@honeypot
73 |
74 | # phishing report recipient address (email address)
75 | phishing_report_to : someone@somewhere
76 |
77 | [database]
78 | # Store data in local databases (True/False)
79 | localdb : True
80 |
81 | # MySQL host to connect
82 | host : 127.0.0.1
83 |
84 | # MySQL username
85 | user : root
86 |
87 | # MySQL password
88 | password : password
89 |
90 |
91 | [hpfeeds]
92 | # Boolean value to enable/disable hpfeeds (True/False)
93 | enabled : False
94 |
95 | # Hpfeeds host
96 | host : hpfriends.honeycloud.net
97 |
98 | # Hpfeeds port (integer value only)
99 | port : 20000
100 |
101 | # Ident for hpfeeds
102 | ident : wN7TesWe
103 |
104 | # Secret key for hpfeeds
105 | secret : 8koAbA4P0PMhnW9Q
106 |
107 | # Path where raw spam samples published on hpfeeds would be dumped
108 | hpfeedspam : somepath
109 |
110 | # Path where attachments published on hpfeeds would be dumped
111 | hpfeedattach : somepath
112 |
113 | [notification]
114 | # Boolean value to enable/disable notification email to developer/maintainer (True/False)
115 | enabled : False
116 |
117 | # sender email-id (any mail-id shall work; even the bogus ones)
118 | sender : my.spamp0t@anydomain.com
119 |
120 | # recipient email-id (suggested to be of developer / maintainer)
121 | recipient : b0nd.g4h@gmail.com
122 |
123 | [web]
124 | # IP addres:port to bind web interface
125 | address : 127.0.0.1:8080
126 |
127 | # username for web interface
128 | username : shiva
129 |
130 | # password for web interface
131 |
132 | password : password
133 |
--------------------------------------------------------------------------------
/helpers/tempdb.sql:
--------------------------------------------------------------------------------
1 | -- SQL file for temporary database
2 |
3 | SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO";
4 | SET time_zone = "+00:00";
5 |
6 |
7 | --
8 | -- Database: `ShivaTemp`
9 | --
10 |
11 | -- --------------------------------------------------------
12 |
13 | -- Creating database and using it
14 |
15 | CREATE DATABASE `ShivaTemp` COLLATE=utf8mb4_unicode_ci;
16 | USE `ShivaTemp`;
17 |
18 | --
19 | -- Table structure for table `attachments`
20 | --
21 |
22 | CREATE TABLE IF NOT EXISTS `attachments` (
23 | `id` int(255) NOT NULL AUTO_INCREMENT,
24 | `spam_id` varchar(32) NOT NULL COMMENT 'MD5 of spam from spam table, foreign key',
25 | `file_name` varchar(100) DEFAULT NULL COMMENT 'Name of the attachment file',
26 | `attach_type` varchar(12) DEFAULT NULL COMMENT 'Could be either inline/attachment',
27 | `attachment_file_path` mediumtext NOT NULL,
28 | `attachmentFileMd5` varchar(32) DEFAULT NULL COMMENT 'MD5 of the attachment',
29 | `date` date NOT NULL,
30 | PRIMARY KEY (`id`)
31 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci AUTO_INCREMENT=1 ;
32 |
33 | -- --------------------------------------------------------
34 |
35 | --
36 | -- Table structure for table `links`
37 | --
38 |
39 | CREATE TABLE IF NOT EXISTS `links` (
40 | `id` int(255) NOT NULL AUTO_INCREMENT,
41 | `spam_id` varchar(32) NOT NULL COMMENT 'MD5 of spam from spam table, foreign key',
42 | `longHyperLink` varchar(255) COMMENT 'Long version of Hyperlink from the spam',
43 | `hyperlink` varchar(1000) DEFAULT NULL COMMENT 'Hyperlink from the spam',
44 | `date` date NOT NULL,
45 | PRIMARY KEY (`id`)
46 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
47 |
48 | -- --------------------------------------------------------
49 |
50 | --
51 | -- Table structure for table `sensors`
52 | --
53 |
54 | CREATE TABLE IF NOT EXISTS `sensors` (
55 | `id` int(255) NOT NULL AUTO_INCREMENT,
56 | `spam_id` varchar(32) NOT NULL COMMENT 'MD5 of spam from spam table, foreign key',
57 | `sensorID` varchar(100) DEFAULT NULL COMMENT 'Sensor where spam was received',
58 | `date` date NOT NULL,
59 | PRIMARY KEY (`id`)
60 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ;
61 |
62 | -- --------------------------------------------------------
63 |
64 | --
65 | -- Table structure for table `spam`
66 | --
67 |
68 | CREATE TABLE IF NOT EXISTS `spam` (
69 | `id` varchar(32) NOT NULL COMMENT 'MD5 of the spam',
70 | `ssdeep` varchar(120) DEFAULT NULL COMMENT 'SSDeep hash of the mail',
71 | `to` longtext CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
72 | `from` varchar(200) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL,
73 | `textMessage` mediumtext CHARACTER SET utf8 COLLATE utf8_unicode_ci COMMENT 'body of spam in text format',
74 | `htmlMessage` mediumtext CHARACTER SET utf8 COLLATE utf8_unicode_ci COMMENT 'body of spam in html format',
75 | `subject` varchar(200) CHARACTER SET utf8 COLLATE utf8_unicode_ci DEFAULT NULL,
76 | `headers` text NOT NULL COMMENT 'Header of Spam',
77 | `sourceIP` text CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL COMMENT 'IPs from where mail has been received',
78 | `sensorID` varchar(50) CHARACTER SET utf8 COLLATE utf8_unicode_ci NOT NULL COMMENT 'Shiva sensor id',
79 | `firstSeen` datetime NOT NULL COMMENT 'First Occurance of Spam',
80 | `relayCounter` int(11) NOT NULL DEFAULT '0' COMMENT 'Mails Relayed in an hour',
81 | `relayTime` datetime NOT NULL COMMENT 'date of first relay',
82 | `totalCounter` int(11) NOT NULL COMMENT 'total count of spam till date',
83 | `length` int(11) NOT NULL COMMENT 'Length of the spam',
84 | `shivaScore` float DEFAULT -1.0 NOT NULL COMMENT 'computed phishing score',
85 | `spamassassinScore` float DEFAULT -1.0 NOT NULL COMMENT 'spamassassin Bayes phishing score',
86 | `phishingHumanCheck` BOOL COMMENT 'messaged marked as phishing by human',
87 | `derivedPhishingStatus` BOOL DEFAULT NULL COMMENT 'status computed for message: NULL - not assigned, true - phishing, false - spam',
88 | `urlPhishing` BOOL DEFAULT FALSE COMMENT 'indicator whether some form of direct URL phishing was found',
89 | PRIMARY KEY (`id`)
90 | ) ENGINE=InnoDB DEFAULT COLLATE=utf8mb4_unicode_ci;
91 |
--------------------------------------------------------------------------------
/helpers/update_shiva_packages.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | cp WORK_PATH/analyzer/core/*.py WORK_PATH/shiva/shivaAnalyzer/lib/python2.7/site-packages/lamson
4 |
5 | cp WORK_PATH/analyzer/core/trishula/*.py WORK_PATH/shiva/shivaAnalyzer/lib/python2.7/site-packages/lamson/trishula/
6 |
7 |
--------------------------------------------------------------------------------
/hpfeeds/hpfeeds.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2010-2013 Mark Schloesser %s having ident -> %s at %s" % (channel, ident, datetime.datetime.now())
32 | data = cPickle.loads(str(payload))
33 | queue.put(data)
34 |
35 | def on_error(payload):
36 | print "Error occured."
37 | for i in range(WORKERS):
38 | queue.put(None)
39 | hpc.stop()
40 |
41 | hpc.subscribe(channel)
42 | print "subscribed"
43 | try:
44 | hpc.run(on_message, on_error)
45 | except hpfeeds.FeedException, e:
46 | print e
47 |
48 | def dumpspam():
49 | while 1:
50 | record = queue.get()
51 | if record is None:
52 | break
53 | else:
54 | fname = record['s_id']
55 | data = cPickle.dumps(record)
56 | with open("spams/" + fname, 'wb') as fp:
57 | fp.write(data)
58 | fp.write("\n")
59 |
60 | for i in range(WORKERS):
61 | t = threading.Thread(target=dumpspam, args=[])
62 | t.start()
63 |
64 | try:
65 | receive()
66 | except KeyboardInterrupt, e:
67 | for i in range(WORKERS):
68 | queue.put(None)
69 | sys.exit(0)
70 |
--------------------------------------------------------------------------------
/hpfeeds/sendfiles.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import threading
4 | import json
5 | import os
6 | import sys
7 | import ConfigParser
8 | import shutil
9 | import base64
10 |
11 | import hpfeeds
12 |
13 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../../../../shiva.conf"
14 | shivaconf = ConfigParser.ConfigParser()
15 | shivaconf.read(confpath)
16 |
17 | host = shivaconf.get('hpfeeds', 'host')
18 | port = shivaconf.getint('hpfeeds', 'port')
19 | ident = shivaconf.get('hpfeeds', 'ident')
20 | secret = shivaconf.get('hpfeeds', 'secret')
21 |
22 | path = {'raw_spam' : shivaconf.get('analyzer', 'rawspampath'),
23 | 'attach' : shivaconf.get('analyzer', 'attachpath'),
24 | 'hpfeedspam' : shivaconf.get('hpfeeds', 'hpfeedspam'),
25 | 'hpfeedattach' : shivaconf.get('hpfeeds', 'hpfeedattach')}
26 |
27 | channels = {'raw_spam': 'shiva.raw',
28 | 'attachments': 'shiva.attachments'}
29 | lock = threading.Lock()
30 |
31 | def send_raw():
32 | files = [f for f in os.listdir(path['raw_spam']) if os.path.isfile(os.path.join(path['raw_spam'], f))]
33 |
34 | if len(files) > 0:
35 | for f in files:
36 | print "sending raw spam on hpfeeds channel shiva.raw"
37 | spam_id = f.split('-')[0]
38 | ip = f.split('-')[-2]
39 |
40 | with open(path['raw_spam']+f) as fp:
41 | spamfile = fp.read()
42 |
43 | d = {'s_id': spam_id, 'spamfile': spamfile, 'ip': ip, 'name': f}
44 | data = json.dumps(d)
45 | with lock:
46 | hpc.publish(channels['raw_spam'], data)
47 | print "Raw Published"
48 |
49 | shutil.move(path['raw_spam']+f, path['hpfeedspam'])
50 | else:
51 | print "nothing to send on hpfeeds channel shiva.raw"
52 |
53 |
54 | def send_attach():
55 | files = [f for f in os.listdir(path['attach']) if os.path.isfile(os.path.join(path['attach'], f))]
56 |
57 | if len(files) > 0:
58 | for f in files:
59 | print "sending attachment %s on hpfeeds channel shiva.attachments" % f
60 | spam_id = f.split('-')[0]
61 | name = f.split('-')[2]
62 |
63 | with open(path['attach']+f) as fp:
64 | attachment = base64.b64encode(fp.read())
65 |
66 | d = {'s_id': spam_id, 'attachment': attachment, 'name': name}
67 | data = json.dumps(d)
68 | with lock:
69 | hpc.publish(channels['attachments'], data)
70 | print "[+] Attachment Published"
71 |
72 | shutil.move(path['attach']+f, path['hpfeedattach'])
73 | else:
74 | print "nothing to send on hpfeeds channel shiva.attachments"
75 |
76 | def main():
77 | try:
78 | raw_thread = threading.Thread(target = send_raw, args = []).run()
79 | attach_thread = threading.Thread(target = send_attach, args = []).run()
80 | except Exception, e:
81 | print "[-] shivasendfiles main: Error. %s" % e
82 | sys.exit(1)
83 |
84 | try:
85 | while raw_thread.isAlive() or attach_thread.isAlive():
86 | pass
87 | except Exception, e:
88 | pass
89 |
90 | if __name__ == '__main__':
91 | hpc = hpfeeds.new(host, port, ident, secret)
92 | main()
93 |
--------------------------------------------------------------------------------
/install.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash -e
2 |
3 | # Installs SHIVA
4 | # Tested on Ubuntu 12.04 & 13.04 and Mint 15
5 |
6 | clear
7 |
8 | banner ()
9 | {
10 | cat << EOF
11 |
12 | ########################################################################
13 |
14 | SHIVA Installer Script
15 | Ver 0.2
16 |
17 | _/ _/
18 | _/_/_/ _/_/_/ _/ _/ _/_/_/
19 | _/_/ _/ _/ _/ _/ _/ _/ _/
20 | _/_/ _/ _/ _/ _/ _/ _/ _/
21 | _/_/_/ _/ _/ _/ _/ _/_/_/
22 |
23 |
24 | Cheers to:
25 | Honeynet Project
26 | &
27 | garage4hackers
28 |
29 | Report bugs to author
30 | ########################################################################
31 |
32 | EOF
33 | }
34 |
35 |
36 | WORK_PATH=$(pwd)
37 | mkdir -p shiva
38 | INSTALL_PATH=$WORK_PATH/shiva
39 |
40 | prerequisites () {
41 | printf "\n\n[*] Checking for the prerequisites in system.\n"
42 | pkgs=("python" "g++" "python-dev" "python-virtualenv" "exim4-daemon-light" "libmysqlclient-dev" "make" "libffi-dev" "libfuzzy-dev" "automake" "autoconf" "libpng12-dev" "libfreetype6-dev" "libxft-dev" "libblas-dev" "liblapack-dev" "gfortran" "spamassassin" "mysql-server" "mysql-client")
43 |
44 | missing_counter=0
45 | for needed_pkg in "${pkgs[@]}"
46 | do
47 | if ! dpkg -s $needed_pkg > /dev/null 2>&1; then
48 | printf "\t[-] Missing package: %s\n" "$needed_pkg"
49 | missing_counter=$[$missing_counter + 1]
50 | else
51 | printf "\t[+] Required package found: (%s)\n" "$needed_pkg"
52 | fi
53 | done
54 |
55 | if ((missing_counter > 0)); then
56 | printf "\n[\n\e[1;31m[!] Error!\e[0m Minimum %d package(s) missing. Install required package(s) (refer to User Manual) and re-run this script.....aborting installation\n\n" "$missing_counter"
57 | printf "sudo apt-get install and likewise"
58 | exit 1
59 | fi
60 | }
61 |
62 | helpers () {
63 | printf "\n\n[*] Copying helper files.\n"
64 | cp -v $WORK_PATH/helpers/dbcreate.py $INSTALL_PATH/
65 | cp -v $WORK_PATH/helpers/maindb.sql $INSTALL_PATH/
66 | cp -v $WORK_PATH/helpers/shiva.conf $INSTALL_PATH/
67 | cp -v $WORK_PATH/helpers/tempdb.sql $INSTALL_PATH/
68 | cp -v $WORK_PATH/helpers/setup_exim4.sh $INSTALL_PATH/
69 |
70 | printf "\n\n[*] Generating update script.\n"
71 | WORK_PATH_ESC=$(echo "$WORK_PATH" | sed -e 's/[\/&]/\\&/g')
72 | sed "s/WORK_PATH/$WORK_PATH_ESC/g" $WORK_PATH/helpers/update_shiva_packages.sh > $INSTALL_PATH/update_shiva_packages.sh && chmod u+x $INSTALL_PATH/update_shiva_packages.sh
73 |
74 | printf "\n\n[*] Generating init script.\n"
75 | SHIVA_USER=$(whoami)
76 | sed "s/SHIVA_USER/$SHIVA_USER/g" $WORK_PATH/helpers/honeypot.service | sed "s/INSTALL_PATH/$WORK_PATH_ESC/g"> $INSTALL_PATH/honeypot.service
77 | sed "s/INSTALL_PATH/$WORK_PATH_ESC/g" $WORK_PATH/helpers/honeypot.sh > $INSTALL_PATH/honeypot.sh
78 |
79 |
80 | }
81 |
82 | dbcreate () {
83 | printf "\nDo you wish to store analyzed data in database?"
84 | printf "\nYou can opt to have following setups:"
85 | printf "\n\t[+] Store data in local/remote database, or"
86 | printf "\n\t[+] Do not store but push all data to hpfeeds, or"
87 | printf "\n\t[+] Store data in local/remote database and push data to hpfeeds as well\n\n"
88 | read -p "[Y]es/[N]o... " choice
89 | case $choice in
90 | [Yy]*) sed -i 's/localdb : False/localdb : True/g' $INSTALL_PATH/shiva.conf
91 | printf "\n[*] Steps to setup local databases."
92 | printf "\n\t[+] Make sure you've 'mysql-client' and 'mysql-server' installed."
93 | printf "\n\t[+] Edit the shiva/shiva.conf file and"
94 | printf "\n\t provide neccesary connection parameters in 'database' section."
95 | printf "\n\t[+] Execute dbcreate.py in shiva folder as \"python dbcreate.py\"\n"
96 | printf "\n\t[+] Refer to User Manual for detailed instructions\n"
97 | printf "\n\t[+] For remote database; provide necessary connection parameters in 'database' section\n"
98 | read -p "Press enter to continue installation...";;
99 |
100 | [Nn]*) printf "\n[*] Setting \"localdb : False\" in shiva.conf.\n";;
101 |
102 | *) die "Wrong choice!"
103 | esac
104 | }
105 |
106 | receiver () {
107 | printf "\n\n[*] Setting up SHIVA Receiver!\n"
108 |
109 | cd $INSTALL_PATH
110 |
111 | printf "\n[*] Creating Virtual Environment: \n"
112 | virtualenv shivaReceiver
113 | cd shivaReceiver
114 | source bin/activate
115 |
116 | printf "\n[*] Installing required python modules for receiver:\n"
117 | easy_install -U distribute
118 | pip install apscheduler==2.1.2
119 | pip install docutils
120 | pip install python-daemon==2.0.2
121 |
122 | printf "\n[*] Installing Lamson (receiver) and creating project: \n"
123 | pip install lamson==1.3.4
124 | lamson gen -project receiver
125 |
126 | printf "\n[*] Copying neccesary files: \n"
127 | cp -v $WORK_PATH/receiver/core/encoding.py $INSTALL_PATH/shivaReceiver/lib/python2.7/site-packages/lamson/
128 | cp -v $WORK_PATH/receiver/core/queue.py $INSTALL_PATH/shivaReceiver/lib/python2.7/site-packages/lamson/
129 | cp -v $WORK_PATH/receiver/core/server.py $INSTALL_PATH/shivaReceiver/lib/python2.7/site-packages/lamson/
130 | cp -v $WORK_PATH/receiver/core/smtpd.py $INSTALL_PATH/shivaReceiver/lib/python2.7/site-packages/lamson/
131 |
132 | cp -v $WORK_PATH/receiver/config/boot.py $INSTALL_PATH/shivaReceiver/receiver/config/
133 | cp -v $WORK_PATH/receiver/config/settings.py $INSTALL_PATH/shivaReceiver/receiver/config/
134 |
135 | cp -v $WORK_PATH/receiver/handlers/forward.py $INSTALL_PATH/shivaReceiver/receiver/app/handlers
136 | cp -v $WORK_PATH/receiver/handlers/log.py $INSTALL_PATH/shivaReceiver/receiver/app/handlers/
137 | cp -v $WORK_PATH/receiver/handlers/queue.py $INSTALL_PATH/shivaReceiver/receiver/app/handlers/
138 | cp -v $WORK_PATH/receiver/handlers/spampot.py $INSTALL_PATH/shivaReceiver/receiver/app/handlers/
139 |
140 | cp -v $WORK_PATH/helpers/clearlogs.sh $INSTALL_PATH/shivaReceiver/receiver/logs/
141 | cp -v $WORK_PATH/helpers/restart_receiver.sh $INSTALL_PATH/shivaReceiver/receiver/
142 |
143 |
144 | printf "[+] Setting up Shiva Receiver done!\n"
145 | deactivate
146 | }
147 |
148 | analyzer () {
149 | printf "\n\n[*] Setting up SHIVA Analyzer!"
150 |
151 | cd $INSTALL_PATH
152 | printf "\n[*] Creating Virtual Environment:\n"
153 | virtualenv shivaAnalyzer
154 | cd shivaAnalyzer
155 | source bin/activate
156 |
157 | printf "\n[*] Installing required python modules for analyzer:\n"
158 | pip install numpy==1.9.2
159 | pip install scipy==0.16.0
160 | pip install scikit-learn==0.17
161 | pip install matplotlib==1.4.3
162 | easy_install -U distribute
163 | pip install cython==0.20.2
164 | pip install apscheduler==2.1.2
165 | pip install MySQL-python==1.2.5
166 | pip install ssdeep==3.1
167 | pip install docutils==0.12
168 | pip install pbr==0.9
169 | pip install python-daemon==2.0.2
170 | pip install beautifulsoup4==4.4.0
171 | pip install cherrypy==3.8.0
172 | pip install requests==2.8.1
173 | pip install mako==1.0.3
174 | pip install python-Levenshtein==0.12.0
175 |
176 | printf "\n[*] Installing Lamson (analyzer) and creating project:\n"
177 | pip install lamson==1.3.4
178 | lamson gen -project analyzer
179 |
180 | printf "\n[*] Copying neccesary files:\n"
181 | cp -v $WORK_PATH/analyzer/core/server.py $INSTALL_PATH/shivaAnalyzer/lib/python2.7/site-packages/lamson/
182 | cp -v $WORK_PATH/analyzer/core/*.py $INSTALL_PATH/shivaAnalyzer/lib/python2.7/site-packages/lamson/
183 |
184 |
185 | mkdir -p $INSTALL_PATH/shivaAnalyzer/lib/python2.7/site-packages/lamson/shiva_phishing/
186 | cp -v $WORK_PATH/analyzer/core/shiva_phishing/*.py $INSTALL_PATH/shivaAnalyzer/lib/python2.7/site-packages/lamson/shiva_phishing/
187 |
188 | mkdir -p $INSTALL_PATH/shivaAnalyzer/lib/python2.7/site-packages/lamson/hpfeeds/
189 | cp -rv $WORK_PATH/hpfeeds/sendfiles.py $INSTALL_PATH/shivaAnalyzer/lib/python2.7/site-packages/lamson/hpfeeds/
190 | cp -rv $WORK_PATH/hpfeeds/hpfeeds.py $INSTALL_PATH/shivaAnalyzer/lib/python2.7/site-packages/lamson/hpfeeds/
191 |
192 | cp -v $WORK_PATH/analyzer/config/boot.py $INSTALL_PATH/shivaAnalyzer/analyzer/config/
193 | cp -v $WORK_PATH/analyzer/config/settings.py $INSTALL_PATH/shivaAnalyzer/analyzer/config/
194 |
195 | cp -v $WORK_PATH/helpers/clearlogs.sh $INSTALL_PATH/shivaAnalyzer/analyzer/logs/
196 |
197 | cp -v $WORK_PATH/helpers/generate_stats.sh $INSTALL_PATH/shivaAnalyzer/bin/
198 |
199 | printf "\n[+] Setting up Shiva Analyzer done!\n"
200 | deactivate
201 |
202 | }
203 |
204 | create_dirs () {
205 | printf "\n[*] Creating necessary folders and updating configuration files.....\n"
206 |
207 | mkdir $INSTALL_PATH/queue
208 | mkdir $INSTALL_PATH/queue/new
209 | mkdir $INSTALL_PATH/queue/cur
210 | mkdir $INSTALL_PATH/queue/tmp
211 | mkdir $INSTALL_PATH/distorted
212 | mkdir $INSTALL_PATH/attachments
213 | mkdir $INSTALL_PATH/attachments/inlines
214 | mkdir $INSTALL_PATH/attachments/hpfeedattach
215 | mkdir $INSTALL_PATH/rawspams
216 | mkdir $INSTALL_PATH/rawspams/spam
217 | mkdir $INSTALL_PATH/rawspams/phishing
218 | mkdir $INSTALL_PATH/rawspams/hpfeedspam
219 | mkdir $INSTALL_PATH/raw_html
220 |
221 | ESCAPED_PATH=$(echo $INSTALL_PATH | sed -s 's/\//\\\//g')
222 |
223 | # Now changing the paths in shiva.conf
224 | sed -i "s/queuepath : somepath/queuepath : $ESCAPED_PATH\/queue\//g" $INSTALL_PATH/shiva.conf
225 | sed -i "s/undeliverable_path : somepath/undeliverable_path : $ESCAPED_PATH\/distorted\//g" $INSTALL_PATH/shiva.conf
226 | sed -i "s/rawspampath : somepath/rawspampath : $ESCAPED_PATH\/rawspams\//g" $INSTALL_PATH/shiva.conf
227 | sed -i "s/hpfeedspam : somepath/hpfeedspam : $ESCAPED_PATH\/rawspams\/hpfeedspam\//g" $INSTALL_PATH/shiva.conf
228 | sed -i "s/attachpath : somepath/attachpath : $ESCAPED_PATH\/attachments\//g" $INSTALL_PATH/shiva.conf
229 | sed -i "s/inlinepath : somepath/inlinepath : $ESCAPED_PATH\/attachments\/inlines\//g" $INSTALL_PATH/shiva.conf
230 | sed -i "s/hpfeedattach : somepath/hpfeedattach : $ESCAPED_PATH\/attachments\/hpfeedattach\//g" $INSTALL_PATH/shiva.conf
231 | printf "\n[+] All done - phew!!!. Refer to User Manual to further customize exim MTA, shiva.conf configuration file and starting honeyp0t\n\n"
232 |
233 | }
234 |
235 | die () {
236 | printf "\n\e[1;31m[!] Error!\e[0m $1\n"
237 | exit 1
238 | }
239 |
240 | installation () {
241 | prerequisites
242 | helpers
243 | dbcreate
244 | receiver
245 | analyzer
246 | create_dirs
247 | }
248 |
249 | banner
250 | printf "If anything goes wrong, delete newly created directory 'shiva' and start again\n"
251 | read -p "Press enter to continue installation...";
252 | if [ "$UID" == "0" ] || [ "$EUID" == "0" ]
253 | then
254 | printf "\n[!] Drop your privileges and run as non-root user.....aborting installation\n\n"
255 | exit 1
256 | fi
257 |
258 | installation
259 |
260 |
--------------------------------------------------------------------------------
/receiver/config/boot.py:
--------------------------------------------------------------------------------
1 | from config import settings
2 | from lamson.routing import Router
3 | from lamson.server import Relay, SMTPReceiver
4 | from lamson import view, queue
5 | import logging
6 | import logging.config
7 | import jinja2
8 |
9 | logging.config.fileConfig("config/logging.conf")
10 |
11 | # the relay host to actually send the final message to
12 | settings.relay = Relay(host=settings.relay_config['host'],
13 | port=settings.relay_config['port'], debug=1)
14 |
15 | # where to listen for incoming messages
16 | settings.receiver = SMTPReceiver(settings.receiver_config['host'],
17 | settings.receiver_config['port'])
18 |
19 | Router.defaults(**settings.router_defaults)
20 | Router.load(settings.handlers)
21 | Router.RELOAD=True
22 | Router.UNDELIVERABLE_QUEUE=queue.Queue("run/undeliverable")
23 |
24 | view.LOADER = jinja2.Environment(
25 | loader=jinja2.PackageLoader(settings.template_config['dir'],
26 | settings.template_config['module']))
--------------------------------------------------------------------------------
/receiver/config/settings.py:
--------------------------------------------------------------------------------
1 | # This file contains python variables that configure Lamson for email processing.
2 | import logging
3 | import ConfigParser
4 | import os
5 |
6 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../shiva.conf"
7 | config = ConfigParser.ConfigParser()
8 | config.read(confpath)
9 | host = config.get('receiver', 'listenhost')
10 | port = config.getint('receiver', 'listenport')
11 |
12 | relay_config = {'host': 'localhost', 'port': 8825}
13 | receiver_config = {'host': host, 'port': port}
14 | handlers = ['app.handlers.spampot']
15 | router_defaults = {'host': '.+'}
16 | template_config = {'dir': 'app', 'module': 'templates'}
--------------------------------------------------------------------------------
/receiver/core/queue.py:
--------------------------------------------------------------------------------
1 | """
2 | Simpler queue management than the regular mailbox.Maildir stuff. You
3 | do get a lot more features from the Python library, so if you need
4 | to do some serious surgery go use that. This works as a good
5 | API for the 90% case of "put mail in, get mail out" queues.
6 | """
7 |
8 | import mailbox
9 | from lamson import mail
10 | import hashlib
11 | import socket
12 | import time
13 | import os
14 | import errno
15 | import logging
16 | import ConfigParser
17 |
18 | # we calculate this once, since the hostname shouldn't change for every
19 | # email we put in a queue
20 | #HASHED_HOSTNAME = hashlib.md5(socket.gethostname()).hexdigest()
21 | #HOSTNAME = socket.gethostname()
22 |
23 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../../../shiva.conf"
24 | config = ConfigParser.ConfigParser()
25 | config.read(confpath)
26 | sensorName = config.get('receiver', 'sensorname')
27 | PEER = '' # Shiva - PEER gets its value from server.py module(process_message) when ever a new spam is received
28 |
29 | class SafeMaildir(mailbox.Maildir):
30 | def _create_tmp(self):
31 | global PEER
32 | logging.critical("PEER in queue: %s", PEER) # Shiva - Appending IP address of source to file name
33 | now = time.time()
34 | uniq = "%s.M%sP%sQ%s-%s-%s" % (int(now), int(now % 1 * 1e6), os.getpid(),
35 | mailbox.Maildir._count, PEER[0], sensorName)
36 | path = os.path.join(self._path, 'tmp', uniq)
37 | try:
38 | os.stat(path)
39 | except OSError, e:
40 | if e.errno == errno.ENOENT:
41 | mailbox.Maildir._count += 1
42 | try:
43 | return mailbox._create_carefully(path)
44 | except OSError, e:
45 | if e.errno != errno.EEXIST:
46 | raise
47 | else:
48 | raise
49 |
50 | # Fall through to here if stat succeeded or open raised EEXIST.
51 | raise mailbox.ExternalClashError('Name clash prevented file creation: %s' % path)
52 |
53 |
54 | class QueueError(Exception):
55 |
56 | def __init__(self, msg, data):
57 | Exception.__init__(self, msg)
58 | self._message = msg
59 | self.data = data
60 |
61 |
62 | class Queue(object):
63 | """
64 | Provides a simplified API for dealing with 'queues' in Lamson.
65 | It currently just supports maildir queues since those are the
66 | most robust, but could implement others later.
67 | """
68 |
69 | def __init__(self, queue_dir, safe=True, pop_limit=0, oversize_dir=None):
70 | """
71 | This gives the Maildir queue directory to use, and whether you want
72 | this Queue to use the SafeMaildir variant which hashes the hostname
73 | so you can expose it publicly.
74 |
75 | The pop_limit and oversize_queue both set a upper limit on the mail
76 | you pop out of the queue. The size is checked before any Lamson
77 | processing is done and is based on the size of the file on disk. The
78 | purpose is to prevent people from sending 10MB attachments. If a
79 | message is over the pop_limit then it is placed into the
80 | oversize_dir (which should be a maildir).
81 |
82 | The oversize protection only works on pop messages off, not
83 | putting them in, get, or any other call. If you use get you can
84 | use self.oversize to also check if it's oversize manually.
85 | """
86 | self.dir = queue_dir
87 |
88 | if safe:
89 | self.mbox = SafeMaildir(queue_dir)
90 | else:
91 | self.mbox = mailbox.Maildir(queue_dir)
92 |
93 | self.pop_limit = pop_limit
94 |
95 | if oversize_dir:
96 | if not os.path.exists(oversize_dir):
97 | osmb = mailbox.Maildir(oversize_dir)
98 |
99 | self.oversize_dir = os.path.join(oversize_dir, "new")
100 |
101 | if not os.path.exists(self.oversize_dir):
102 | os.mkdir(self.oversize_dir)
103 | else:
104 | self.oversize_dir = None
105 |
106 | def push(self, message):
107 | """
108 | Pushes the message onto the queue. Remember the order is probably
109 | not maintained. It returns the key that gets created.
110 | """
111 | return self.mbox.add(str(message))
112 |
113 | def pop(self):
114 | """
115 | Pops a message off the queue, order is not really maintained
116 | like a stack.
117 |
118 | It returns a (key, message) tuple for that item.
119 | """
120 | for key in self.mbox.iterkeys():
121 | over, over_name = self.oversize(key)
122 |
123 | if over:
124 | if self.oversize_dir:
125 | logging.info("Message key %s over size limit %d, moving to %s.",
126 | key, self.pop_limit, self.oversize_dir)
127 | os.rename(over_name, os.path.join(self.oversize_dir, key))
128 | else:
129 | logging.info("Message key %s over size limit %d, DELETING (set oversize_dir).",
130 | key, self.pop_limit)
131 | os.unlink(over_name)
132 | else:
133 | try:
134 | msg = self.get(key)
135 | except QueueError, exc:
136 | raise exc
137 | finally:
138 | self.remove(key)
139 | return key, msg
140 |
141 | return None, None
142 |
143 | def get(self, key):
144 | """
145 | Get the specific message referenced by the key. The message is NOT
146 | removed from the queue.
147 | """
148 | msg_file = self.mbox.get_file(key)
149 |
150 | if not msg_file:
151 | return None
152 |
153 | msg_data = msg_file.read()
154 |
155 | try:
156 | return mail.MailRequest(self.dir, None, None, msg_data)
157 | except Exception, exc:
158 | logging.exception("Failed to decode message: %s" % exc, msg_data)
159 | return None
160 |
161 |
162 | def remove(self, key):
163 | """Removes the queue, but not returned."""
164 | self.mbox.remove(key)
165 |
166 | def count(self):
167 | """Returns the number of messages in the queue."""
168 | return len(self.mbox)
169 |
170 | def clear(self):
171 | """
172 | Clears out the contents of the entire queue.
173 | Warning: This could be horribly inefficient since it
174 | basically pops until the queue is empty.
175 | """
176 | # man this is probably a really bad idea
177 | while self.count() > 0:
178 | self.pop()
179 |
180 | def keys(self):
181 | """
182 | Returns the keys in the queue.
183 | """
184 | return self.mbox.keys()
185 |
186 | def oversize(self, key):
187 | if self.pop_limit:
188 | file_name = os.path.join(self.dir, "new", key)
189 | return os.path.getsize(file_name) > self.pop_limit, file_name
190 | else:
191 | return False, None
192 |
193 |
194 |
195 |
--------------------------------------------------------------------------------
/receiver/core/server.py:
--------------------------------------------------------------------------------
1 | """
2 | The majority of the server related things Lamson needs to run, like receivers,
3 | relays, and queue processors.
4 | """
5 |
6 | import smtplib
7 | import smtpd
8 | import asyncore
9 | import threading
10 | import socket
11 | import ConfigParser
12 | import logging
13 | import os
14 | from lamson import queue, mail, routing
15 | import time
16 | import traceback
17 | from lamson.bounce import PRIMARY_STATUS_CODES, SECONDARY_STATUS_CODES, COMBINED_STATUS_CODES
18 | import subprocess
19 |
20 | from apscheduler.scheduler import Scheduler
21 |
22 | def undeliverable_message(raw_message, failure_type):
23 | """
24 | Used universally in this file to shove totally screwed messages
25 | into the routing.Router.UNDELIVERABLE_QUEUE (if it's set).
26 | """
27 | if routing.Router.UNDELIVERABLE_QUEUE:
28 | key = routing.Router.UNDELIVERABLE_QUEUE.push(raw_message)
29 |
30 | logging.error("Failed to deliver message because of %r, put it in "
31 | "undeliverable queue with key %r", failure_type, key)
32 |
33 | class SMTPError(Exception):
34 | """
35 | You can raise this error when you want to abort with a SMTP error code to
36 | the client. This is really only relevant when you're using the
37 | SMTPReceiver and the client understands the error.
38 |
39 | If you give a message than it'll use that, but it'll also produce a
40 | consistent error message based on your code. It uses the errors in
41 | lamson.bounce to produce them.
42 | """
43 | def __init__(self, code, message=None):
44 | self.code = code
45 | self.message = message or self.error_for_code(code)
46 |
47 | Exception.__init__(self, "%d %s" % (self.code, self.message))
48 |
49 | def error_for_code(self, code):
50 | primary, secondary, tertiary = str(code)
51 |
52 | primary = PRIMARY_STATUS_CODES.get(primary, "")
53 | secondary = SECONDARY_STATUS_CODES.get(secondary, "")
54 | combined = COMBINED_STATUS_CODES.get(primary + secondary, "")
55 |
56 | return " ".join([primary, secondary, combined]).strip()
57 |
58 |
59 | class Relay(object):
60 | """
61 | Used to talk to your "relay server" or smart host, this is probably the most
62 | important class in the handlers next to the lamson.routing.Router.
63 | It supports a few simple operations for sending mail, replying, and can
64 | log the protocol it uses to stderr if you set debug=1 on __init__.
65 | """
66 | def __init__(self, host='127.0.0.1', port=25, username=None, password=None,
67 | ssl=False, starttls=False, debug=0):
68 | """
69 | The hostname and port we're connecting to, and the debug level (default to 0).
70 | Optional username and password for smtp authentication.
71 | If ssl is True smtplib.SMTP_SSL will be used.
72 | If starttls is True (and ssl False), smtp connection will be put in TLS mode.
73 | It does the hard work of delivering messages to the relay host.
74 | """
75 | self.hostname = host
76 | self.port = port
77 | self.debug = debug
78 | self.username = username
79 | self.password = password
80 | self.ssl = ssl
81 | self.starttls = starttls
82 |
83 | def configure_relay(self, hostname):
84 | if self.ssl:
85 | relay_host = smtplib.SMTP_SSL(hostname, self.port)
86 | else:
87 | relay_host = smtplib.SMTP(hostname, self.port)
88 |
89 | relay_host.set_debuglevel(self.debug)
90 |
91 | if self.starttls:
92 | relay_host.starttls()
93 | if self.username and self.password:
94 | relay_host.login(self.username, self.password)
95 |
96 | assert relay_host, 'Code error, tell Zed.'
97 | return relay_host
98 |
99 | def deliver(self, message, To=None, From=None):
100 | """
101 | Takes a fully formed email message and delivers it to the
102 | configured relay server.
103 |
104 | You can pass in an alternate To and From, which will be used in the
105 | SMTP send lines rather than what's in the message.
106 | """
107 | recipient = To or message['To']
108 | sender = From or message['From']
109 |
110 | hostname = self.hostname or self.resolve_relay_host(recipient)
111 |
112 | try:
113 | relay_host = self.configure_relay(hostname)
114 | except socket.error:
115 | logging.exception("Failed to connect to host %s:%d" % (hostname, self.port))
116 | return
117 |
118 | relay_host.sendmail(sender, recipient, str(message))
119 | relay_host.quit()
120 |
121 | def resolve_relay_host(self, To):
122 | import DNS
123 | address, target_host = To.split('@')
124 | mx_hosts = DNS.mxlookup(target_host)
125 |
126 | if not mx_hosts:
127 | logging.debug("Domain %r does not have an MX record, using %r instead.", target_host, target_host)
128 | return target_host
129 | else:
130 | logging.debug("Delivering to MX record %r for target %r", mx_hosts[0], target_host)
131 | return mx_hosts[0][1]
132 |
133 |
134 | def __repr__(self):
135 | """Used in logging and debugging to indicate where this relay goes."""
136 | return "" % (self.hostname, self.port)
137 |
138 |
139 | def reply(self, original, From, Subject, Body):
140 | """Calls self.send but with the from and to of the original message reversed."""
141 | self.send(original['from'], From=From, Subject=Subject, Body=Body)
142 |
143 | def send(self, To, From, Subject, Body):
144 | """
145 | Does what it says, sends an email. If you need something more complex
146 | then look at lamson.mail.MailResponse.
147 | """
148 | msg = mail.MailResponse(To=To, From=From, Subject=Subject, Body=Body)
149 | self.deliver(msg)
150 |
151 |
152 |
153 | class SMTPReceiver(smtpd.SMTPServer):
154 | #class SMTPReceiver(SMTPServer):
155 | """Receives emails and hands it to the Router for further processing."""
156 |
157 | def __init__(self, host='127.0.0.1', port=8825):
158 | """
159 | Initializes to bind on the given port and host/ipaddress. Typically
160 | in deployment you'd give 0.0.0.0 for "all internet devices" but consult
161 | your operating system.
162 |
163 | This uses smtpd.SMTPServer in the __init__, which means that you have to
164 | call this far after you use python-daemonize or else daemonize will
165 | close the socket.
166 | """
167 | self.credentials = None
168 | self.authenabled = False
169 |
170 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../../../shiva.conf"
171 | config = ConfigParser.ConfigParser()
172 | config.read(confpath)
173 | self.authenabled = config.getboolean('receiver', 'authenabled')
174 |
175 | if self.authenabled:
176 | self.credentials = {'username' : config.get('receiver', 'smtpuser'), 'password' : config.get('receiver', 'smtppasswd')}
177 |
178 | self.host = host
179 | self.port = port
180 | smtpd.SMTPServer.__init__(
181 | self,
182 | (self.host, self.port),
183 | None,
184 | require_authentication=self.authenabled,
185 | credentials=self.credentials
186 | )
187 | #SMTPServer.__init__(self, (self.host, self.port), None)
188 |
189 | #-----------------------
190 | sched = Scheduler()
191 | logging.info("Scheduling Job.")
192 | sched.add_interval_job(restart, minutes=90)
193 | sched.start()
194 | #-----------------------
195 |
196 | def start(self):
197 | """
198 | Kicks everything into gear and starts listening on the port. This
199 | fires off threads and waits until they are done.
200 | """
201 |
202 | logging.info("SMTPReceiver started on %s:%d." % (self.host, self.port))
203 | self.poller = threading.Thread(target=asyncore.loop,
204 | kwargs={'timeout':0.1, 'use_poll':True})
205 | self.poller.start()
206 |
207 | def process_message(self, Peer, From, To, Data):
208 | """
209 | Called by smtpd.SMTPServer when there's a message received.
210 | """
211 | queue.PEER = Peer # iSpam - Passing Remote socket to variable PEER defined in 'queue.py' module
212 |
213 | try:
214 | logging.debug("Message received from Peer: %r, From: %r, to To %r." % (Peer, From, To))
215 | routing.Router.deliver(mail.MailRequest(Peer, From, To, Data))
216 | except SMTPError, err:
217 | # looks like they want to return an error, so send it out
218 | return str(err)
219 | undeliverable_message(Data, "Handler raised SMTPError on purpose: %s" % err)
220 | except:
221 | logging.exception("Exception while processing message from Peer: %r, From: %r, to To %r." %
222 | (Peer, From, To))
223 | undeliverable_message(Data, "Error in message %r:%r:%r, look in logs." % (Peer, From, To))
224 |
225 |
226 | def close(self):
227 | """Doesn't do anything except log who called this, since nobody should. Ever."""
228 | logging.error(traceback.format_exc())
229 |
230 |
231 |
232 | class QueueReceiver(object):
233 | """
234 | Rather than listen on a socket this will watch a queue directory and
235 | process messages it recieves from that. It works in almost the exact
236 | same way otherwise.
237 | """
238 |
239 | def __init__(self, queue_dir, sleep=12, size_limit=0, oversize_dir=None):
240 | """
241 | The router should be fully configured and ready to work, the
242 | queue_dir can be a fully qualified path or relative.
243 | """
244 | self.queue = queue.Queue(queue_dir, pop_limit=size_limit,
245 | oversize_dir=oversize_dir)
246 | self.queue_dir = queue_dir
247 | self.sleep = sleep
248 |
249 | def start(self, one_shot=False):
250 | """
251 | Start simply loops indefinitely sleeping and pulling messages
252 | off for processing when they are available.
253 |
254 | If you give one_shot=True it will run once rather than do a big
255 | while loop with a sleep.
256 | """
257 | logging.info("In Receiver server.py")
258 | logging.info("Queue receiver started on queue dir %s" %
259 | (self.queue_dir))
260 | logging.debug("Sleeping for %d seconds..." % self.sleep)
261 |
262 | inq = queue.Queue(self.queue_dir)
263 |
264 | while True:
265 | keys = inq.keys()
266 |
267 | for key in keys:
268 | msg = inq.get(key)
269 |
270 | if msg:
271 | logging.debug("Pulled message with key: %r off", key)
272 | self.process_message(msg)
273 | logging.debug("Removed %r key from queue.", key)
274 |
275 | inq.remove(key)
276 |
277 | if one_shot:
278 | return
279 | else:
280 | time.sleep(self.sleep)
281 |
282 | def process_message(self, msg):
283 | """
284 | Exactly the same as SMTPReceiver.process_message but just designed for the queue's
285 | quirks.
286 | """
287 |
288 | try:
289 | Peer = self.queue_dir # this is probably harmless but I should check it
290 | From = msg['from']
291 | To = [msg['to']]
292 |
293 | logging.debug("Message received from Peer: %r, From: %r, to To %r." % (Peer, From, To))
294 | routing.Router.deliver(msg)
295 | except SMTPError, err:
296 | # looks like they want to return an error, so send it out
297 | logging.exception("Raising SMTPError when running in a QueueReceiver is unsupported.")
298 | undeliverable_message(msg.original, err.message)
299 | except:
300 | logging.exception("Exception while processing message from Peer: "
301 | "%r, From: %r, to To %r." % (Peer, From, To))
302 | undeliverable_message(msg.original, "Router failed to catch exception.")
303 |
304 | def restart():
305 | logging.info("Stopping receiver!")
306 | try:
307 | subprocess.Popen(["bash", "restart_receiver.sh"])
308 | except Exception, e:
309 | logging.error(e)
310 |
311 |
312 |
313 |
--------------------------------------------------------------------------------
/receiver/handlers/forward.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | 2 Implements a forwarding handler that will take anything it receives and
4 | 3 forwards it to the relay host. It is intended to use with the
5 | 4 lamson.routing.RoutingBase.UNDELIVERABLE_QUEUE if you want mail that Lamson
6 | 5 doesn't understand to be delivered like normal. The Router will dump
7 | 6 any mail that doesn't match into that queue if you set it, and then you can
8 | 7 load this handler into a special queue receiver to have it forwarded on.
9 | 8
10 | 9 BE VERY CAREFUL WITH THIS. It should only be used in testing scenarios as
11 | 10 it can turn your server into an open relay if you're not careful. You
12 | 11 are probably better off writing your own version of this that knows a list
13 | 12 of allowed hosts your machine answers to and only forwards those.
14 | 13 """
15 |
16 | from lamson.routing import route, stateless
17 | from config import settings
18 | import logging, os
19 | import spampot
20 |
21 | #os.system("echo 'forward-after-import' >> /root/Desktop/LamsonHoneyMail/MyMailServer/test.txt")
22 |
23 | def forward_handler():
24 | @route("(to)@(host)", to=".+", host=".+")
25 | @stateless
26 | def START(message, to=None, host=None):
27 | logging.debug("MESSAGE to %s@%s forwarded to the relay host.", to, host)
28 | settings.relay.deliver(message)
29 | #test_fd = open("/root/Desktop/LamsonHoneyMail/MyMailServer/test.txt", 'a')
30 | debugFd = open(spampot.cwd + '/' + spampot.debugFile, 'a')
31 | debugFd.write("[+] >>>>>>> Executed handlers.FORWARD \n")
32 | debugFd.close()
33 |
--------------------------------------------------------------------------------
/receiver/handlers/log.py:
--------------------------------------------------------------------------------
1 | from lamson.routing import route, stateless
2 | import logging, os
3 | import spampot
4 |
5 | def log_handler():
6 | @route("(to)@(host)", to=".+", host=".+")
7 | @stateless
8 | def START(message, to=None, host=None):
9 | #logging.debug("MESSAGE to %s@%s:\n%s" % (to, host, str(message)))
10 | logging.debug("MESSAGE to %s@%s" % (to, host))
11 |
--------------------------------------------------------------------------------
/receiver/handlers/queue.py:
--------------------------------------------------------------------------------
1 | """
2 | 2 Implements a handler that puts every message it receives into
3 | 3 the run/queue directory. It is intended as a debug tool so you
4 | 4 can inspect messages the server is receiving using mutt or
5 | 5 the lamson queue command.
6 | 6 """
7 |
8 | from lamson.routing import route_like, stateless, nolocking, route
9 | from lamson import queue, handlers
10 | import logging
11 | import os, time
12 | from config import settings # for forward handler
13 | import shutil
14 | import re
15 | import spampot
16 |
17 | def queue_handler():
18 | @route("(to)@(host)", to=".+", host=".+")
19 | @stateless
20 | @nolocking
21 | def START(message, to=None, host=None):
22 | q = queue.Queue(spampot.pathOfQueue)
23 | '''
24 | lamson has been found missing delivering mails to recipients in 'cc' and 'bcc'.
25 | Upto this point, lamson perfectly determines recipients combining 'to' and 'host' variables but always pushes 'message' in queue.
26 | The issue is, 'message' contains just the original 'To' recipent. Hence, though lamson successfully determines 'cc'/'bcc', the 'message' misses that.
27 | Using following dirty regex, 'To' field is replaced with next 'cc'/'bcc' recipient with each iteration.
28 | If, including 'To', there are 4 recipient in 'cc'/'bcc', total 5 mails would be pushed in queue.
29 |
30 | '''
31 | email = "%s@%s" % (to, host)
32 | message = str(message).replace("%", "%%")
33 | new_msg = re.sub(r'(?m)^\To:.*\n?', 'To: %s\n', message, 1) % (email,)
34 | q.push(new_msg)
--------------------------------------------------------------------------------
/receiver/handlers/spampot.py:
--------------------------------------------------------------------------------
1 | # Customised handler
2 | # Atm, only handler that gets called and is loaded in settings.py modules
3 | # It further loads the handlers - log and queue
4 | # queue handler further takes care of calling "forward" handler
5 |
6 | import log, queue, forward # Our modules in app/handlers
7 | import logging, os
8 | import ConfigParser
9 |
10 | confpath = os.path.dirname(os.path.realpath(__file__)) + "/../../../../shiva.conf"
11 | config = ConfigParser.ConfigParser()
12 | config.read(confpath)
13 | host = config.get('receiver', 'listenhost')
14 |
15 | pathOfQueue = config.get('global', 'queuepath') # Path of queue where new message arrives
16 | pathOfUndelivered = 'run/undeliverable/new/' # Path for undelivered mails
17 | pathOfArchive = 'logs/archive/' # Archive path for all spams received, push them to DB later
18 |
19 | log.log_handler()
20 | queue.queue_handler()
--------------------------------------------------------------------------------
/web/css/style.css:
--------------------------------------------------------------------------------
1 | thead {
2 | text-align: center;
3 | }
4 |
5 | thead tr {
6 | background: lightgray;
7 | }
8 |
9 | table {
10 | border-collapse: separate;
11 | border-bottom: 10px;
12 | }
13 | hoverTable {
14 | width:100%;
15 | border-collapse:collapse;
16 | }
17 | .hoverTable td{
18 | padding:7px;
19 | }
20 | tr.regularRow {
21 | background: #f2f2f2;
22 | }
23 | tr.regularRow:hover {
24 | background-color: #ffff99;
25 | }
26 |
27 | tr.newRow {
28 | background: #d7eef4;
29 | }
30 | tr.newRow:hover {
31 | background-color: #ffff99;
32 | }
33 | span.id_link {
34 | font-family: "Courier New", Courier, monospace;
35 | }
36 |
37 | td {
38 | vertical-align: top;
39 | }
40 |
41 | td.subject_td {
42 | width: 50em;
43 | }
44 |
45 | iframe {
46 | border-color: black;
47 | border-width: 3px;
48 | }
49 |
50 |
51 |
--------------------------------------------------------------------------------
/web/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/favicon.png
--------------------------------------------------------------------------------
/web/images/icons/delete.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/images/icons/delete.png
--------------------------------------------------------------------------------
/web/images/icons/icon_minus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/images/icons/icon_minus.png
--------------------------------------------------------------------------------
/web/images/icons/icon_plus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/images/icons/icon_plus.png
--------------------------------------------------------------------------------
/web/images/icons/small_change_none.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/images/icons/small_change_none.png
--------------------------------------------------------------------------------
/web/images/icons/small_change_to_phishing.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/images/icons/small_change_to_phishing.png
--------------------------------------------------------------------------------
/web/images/icons/small_change_to_spam.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/images/icons/small_change_to_spam.png
--------------------------------------------------------------------------------
/web/images/icons/small_marked_by_user.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mertam/shiva/5260de042b804663f0f19712be688c3ebc58c627/web/images/icons/small_marked_by_user.png
--------------------------------------------------------------------------------
/web/js/requests.js:
--------------------------------------------------------------------------------
1 | function mark_as_spam(email_id) {
2 | var xhttp = new XMLHttpRequest();
3 | xhttp.onreadystatechange = function() {
4 | if (xhttp.readyState == 4 && xhttp.status == 200) {
5 | location.reload()
6 | }
7 | }
8 |
9 | if (confirm("Mark email as spam?") == true) {
10 | xhttp.open("GET", "mark_as_spam?email_id=" + email_id , true);
11 | xhttp.send();
12 | }
13 | }
14 |
15 | function mark_as_phishing(email_id) {
16 | var xhttp = new XMLHttpRequest();
17 | xhttp.onreadystatechange = function() {
18 | if (xhttp.readyState == 4 && xhttp.status == 200) {
19 | location.reload()
20 | }
21 | }
22 |
23 | if (confirm("Mark email as phishing?") == true) {
24 | xhttp.open("GET", "mark_as_phishing?email_id=" + email_id , true);
25 | xhttp.send();
26 | }
27 | }
28 |
29 | function delete_email(email_id) {
30 | var xhttp = new XMLHttpRequest();
31 | xhttp.onreadystatechange = function() {
32 | if (xhttp.readyState == 4 && xhttp.status == 200) {
33 | location.reload()
34 | }
35 | }
36 |
37 | if (confirm("Permamently remove email from honeypot?") == true) {
38 | xhttp.open("GET", "delete_email?email_id=" + email_id , true);
39 | xhttp.send();
40 | }
41 | }
--------------------------------------------------------------------------------
/web/templates/email_not_found.html:
--------------------------------------------------------------------------------
1 | <%include file="parts/header.html" />
2 |
3 | Email not found
4 |
5 | <%include file="parts/footer.html" />
--------------------------------------------------------------------------------
/web/templates/help.html:
--------------------------------------------------------------------------------
1 | <%include file="parts/header.html" />
2 |
3 | SHIVA honeypot: HELP
4 |
5 | How to work with honeypot
6 | On the main page is presented overview of last e-mails caught by honeypot. Two probabilities (Bayes and built-in classifier ) and classification result are also presented.
7 |
You can view details of email by clicking on it's id, delete it from honeypot
8 | by clicking on the
9 | or mark is as spam/phishing by clicking on or
10 |
11 |
12 |
Sensor
13 | Sensor represents source of email.
14 |
15 |
16 |
17 |
18 | sensor
19 | description
20 |
21 |
22 |
23 |
24 | shiva
25 | email was caught by honeypot
26 |
27 |
28 | .*spamImport.*
29 | email was manually imported to honeypot as spam
30 |
31 |
32 | .*phishingImport.*
33 | email was manually imported to honeypot as phishing
34 |
35 |
36 |
37 |
38 | Phishing probabilities
39 | Honeypot computes two probabilities. Each score represents probability that email is phishing.
40 |
Classifier probability is result of phishing detection technique implemented in honeypot.
41 | Bayes probability is result of Bayes classifier in Spamassassin.
42 | IMPORTANT imported emails (sensors .*spamImport.*,.*phishingImport.*) have always default score -1, they are used as learning data only
43 |
44 |
45 | Learning honeypot
46 | After instalation, hoenypot is learnt on the provided data samples. This process is described in provided package documentation.
47 |
When honeypot catches emails, it's possible to manually relearn it. It will take into considerations cought emails as well.
48 |
By default, Bayes classifier boundary for for marking email is 0.5. This boundary can be shifted by re-learning to number ininterval <0.4,0.6>
49 |
50 |
URL phishing
51 | Several techniques for detecting phishing by URL is implemented (blacklists,typosquatting,HTTPS->HTTP,known domain name in other part of URL).
52 | If at least one if found in email, it's automatically marked as phishing.
53 |
54 |
55 |
56 |
57 |
58 | Examples
59 | Examples of email caught by honeypot
60 |
61 |
62 |
63 |
64 | Id
65 | Last seen
66 | Subject
67 | Classif. prob.
68 | Bayes prob.
69 | URL phishing
70 | SensorID
71 | Status
72 | Actions
73 | Maked by human
74 |
75 |
76 |
77 |
78 |
79 | c541e48ea5f2fae9d3cbc67b8589874e
80 | 2015-11-21 19:55:41
81 | Hello Mate,
82 |
83 |
84 | 0.200
85 | 0.540
86 | NO
87 | shiva
88 | PHISHING
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 | This email was caught by honeypot (it has sensor 'shiva'). Computed scores were 0.2 and 0.54. Email was classified as 'phishing'.
98 |
99 |
100 |
101 |
102 |
103 | Id
104 | Last seen
105 | Subject
106 | Classif. prob.
107 | Bayes prob.
108 | URL phishing
109 | SensorID
110 | Status
111 | Actions
112 | Maked by human
113 |
114 |
115 |
116 |
117 |
118 |
119 | 3d4d6fb1e8488f56b5ddd09ab41d20bc
120 | 2015-11-22 09:07:20
121 | invitation EXCELLENCE IN HORIZON 2020 PROJECT DESIGN AND PROPOSAL
122 | WRITNING
123 |
124 |
125 | 0.600
126 | 0.500
127 | NO
128 | shiva
129 | SPAM
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 | This email was caught by honeypot (it has sensor 'shiva'). Computed scores were 0.6 and 0.5. Email was classified as 'phishing', but operator changed verdict to 'spam'
141 |
142 |
143 |
144 | Exmaples of emails imported to honeypot
145 |
146 |
147 |
148 | Id
149 | Last seen
150 | Subject
151 | Classif. prob.
152 | Bayes prob.
153 | URL phishing
154 | SensorID
155 | Status
156 | Actions
157 | Maked by human
158 |
159 |
160 |
161 |
162 | 4e66d13f1f4dfa9ec6e349687e83c4ba
163 | 2015-11-22 10:07:52
164 | SunTrust Bank: IMPORTANT FRAUD ALERT
165 |
166 |
167 | -1.000
168 | -1.000
169 | NO
170 | phishingImport_corpus
171 | PHISHING
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 | This email was imported honeypot (it has sensor 'phishingImport_corpus'). Email was automatickly marked as phishing. Since it was imported, 'Marked by human' mark is shown.
183 |
184 |
185 |
186 |
187 |
188 |
189 | Id
190 | Last seen
191 | Subject
192 | Classif. prob.
193 | Bayes prob.
194 | URL phishing
195 | SensorID
196 | Status
197 | Actions
198 | Maked by human
199 |
200 |
201 |
202 |
203 |
204 | b976320a9d3f34a9c543475237f83ba8
205 | 2015-11-22 10:07:36
206 | First Minute 2015 v Chorvatsku
207 |
208 |
209 | -1.000
210 | -1.000
211 | NO
212 | spamImport_muni_handfilter
213 | SPAM
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 | This email was imported to honeypot (it has sensor 'spamImport_muni_handfilter'). Email was automatickly marked as spam. Since it was imported, 'Marked by human' mark is shown.
226 |
227 |
228 |
229 | Imported email is shown among new ones
230 |
231 |
232 |
233 |
234 |
235 | Id
236 | Last seen
237 | Subject
238 | Classif. prob.
239 | Bayes prob.
240 | URL phishing
241 | SensorID
242 | Status
243 | Actions
244 | Maked by human
245 |
246 |
247 |
248 |
249 |
250 | 6060ca566d4ac4ee1ee9932232face24
251 | 2015-11-30 12:52:22
252 | Re:ISO qualified PCBA contract manufacturer - from China
253 | -1.000
254 | -1.000
255 | NO
256 | spamImport_shiva_handfilter
257 | SPAM
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 | Email from imported data is shown as new one. This may happen. Honeypot uses fyzzy hashing in order to indetify uniqe emails.
269 | If new email is the same or similar enough to existing one, only timestamp of existing mail is updated. This is the reason why old email is displayed as new.
270 |
271 |
272 | Incomming emails are sometimes malformed or they're missing subject/text part. In this case, this malformed email may be matched to existing malformed email.
273 |
274 |
275 |
276 | <%include file="parts/footer.html" />
277 |
278 |
--------------------------------------------------------------------------------
/web/templates/index.html:
--------------------------------------------------------------------------------
1 | <%include file="parts/header.html" />
2 |
3 | SHIVA honeypot: mainpage
4 |
5 | Runtime statistics
6 |
7 |
8 |
9 | uptime
10 | ${uptime|h}
11 |
12 |
13 | emails today
14 | ${today_mails|}
15 |
16 |
17 | emails in database
18 | ${total_mails|h}
19 |
20 |
21 |
22 | Overview of last ${count|h} emails
23 |
24 | <%include file="parts/mail_overview.html" />
25 |
26 | <%include file="parts/learning_overview.html" />
27 |
28 | <%include file="parts/footer.html" />
--------------------------------------------------------------------------------
/web/templates/learning.html:
--------------------------------------------------------------------------------
1 | <%include file="parts/header.html" />
2 |
3 | SHIVA honeypot: learning
4 |
5 | <%include file="parts/learning_overview.html"/>
6 |
7 |
8 |
9 |
10 |
11 | <%include file="parts/footer.html"/>
--------------------------------------------------------------------------------
/web/templates/list_emails.html:
--------------------------------------------------------------------------------
1 | <%include file="parts/header.html" />
2 |
3 | SHIVA honeypot: list ${count|h} emails starting from ${start|h}
4 |
5 | <%include file="parts/mail_overview.html"/>
6 |
7 |
8 | Nagivate:
9 |
10 | % if start == 0:
11 | <<
12 | <
13 | % else:
14 | <%
15 | actual_start = (start - count) if (start - count) > 0 else 0
16 | %>
17 |
18 | <<
19 | <
20 |
21 | % endif
22 |
23 | % if start + count < total:
24 |
25 | <%
26 | actual_start = start + count
27 | end = total - (total % count)
28 | %>
29 | >
30 | >>
31 |
32 | % else:
33 | >
34 | >>
35 | % endif
36 |
37 | <%include file="parts/footer.html"/>
--------------------------------------------------------------------------------
/web/templates/logs.html:
--------------------------------------------------------------------------------
1 | <%include file="parts/header.html" />
2 |
3 | ${headline|h}
4 |
5 |
6 |
7 | % for log_row in rows:
8 | ${log_row|h}
9 | % endfor
10 |
11 |
12 |
13 |
14 | <%include file="parts/footer.html"/>
--------------------------------------------------------------------------------
/web/templates/parts/footer.html:
--------------------------------------------------------------------------------
1 |
2 |
10 |
11 | <%
12 | import datetime
13 | current_date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
14 | %>
15 | Rendered: ${current_date}
16 |