├── README.txt ├── sandyfiles.sql ├── src ├── __init__.py ├── attribution.py ├── create_html_template.py ├── daemon-example.py ├── daemonn.py ├── execute.py ├── extract_domain.py ├── hachoir.py ├── java_analysis.py ├── java_malware_sig.py ├── msl.py ├── mthread.py ├── mthreadbk.py ├── mysqlstuffs.py ├── proxy │ ├── .svn │ │ ├── entries │ │ ├── format │ │ ├── pristine │ │ │ ├── 98 │ │ │ │ └── 98f88925b98865cda184c1f70daeb37085d70bcd.svn-base │ │ │ └── b6 │ │ │ │ └── b660d0c3899eac087bf45a00a1be1bc292fc15e7.svn-base │ │ └── wc.db │ ├── __init__.py │ ├── build │ │ └── http-parser │ │ │ ├── LICENSE │ │ │ ├── Makefile.ext │ │ │ ├── NOTICE │ │ │ ├── PKG-INFO │ │ │ ├── README.rst │ │ │ ├── THANKS │ │ │ ├── TODO.md │ │ │ ├── build │ │ │ ├── lib.linux-i686-2.7 │ │ │ │ └── http_parser │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── _socketio.py │ │ │ │ │ ├── http.py │ │ │ │ │ ├── pyparser.py │ │ │ │ │ ├── reader.py │ │ │ │ │ └── util.py │ │ │ └── temp.linux-i686-2.7 │ │ │ │ └── http_parser │ │ │ │ └── http_parser.o │ │ │ ├── examples │ │ │ ├── httpparser.py │ │ │ ├── httpparser_from_file.py │ │ │ └── httpstream.py │ │ │ ├── http_parser │ │ │ ├── __init__.py │ │ │ ├── _socketio.py │ │ │ ├── http.py │ │ │ ├── http_parser.c │ │ │ ├── http_parser.gyp │ │ │ ├── http_parser.h │ │ │ ├── parser.c │ │ │ ├── parser.pyx │ │ │ ├── parser.so │ │ │ ├── pyparser.py │ │ │ ├── pyversion_compat.h │ │ │ ├── reader.py │ │ │ └── util.py │ │ │ ├── pip-egg-info │ │ │ └── http_parser.egg-info │ │ │ │ └── PKG-INFO │ │ │ └── setup.py │ ├── http-parser │ │ ├── .travis.yml │ │ ├── LICENSE │ │ ├── MANIFEST.in │ │ ├── Makefile │ │ ├── NOTICE │ │ ├── README.rst │ │ ├── THANKS │ │ ├── TODO.md │ │ ├── build │ │ │ ├── lib.linux-i686-2.7 │ │ │ │ └── http_parser │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── _socketio.py │ │ │ │ │ ├── http.py │ │ │ │ │ ├── parser.so │ │ │ │ │ ├── pyparser.py │ │ │ │ │ ├── reader.py │ │ │ │ │ └── util.py │ │ │ └── temp.linux-i686-2.7 │ │ │ │ └── http_parser │ │ │ │ ├── http_parser.o │ │ │ │ └── parser.o │ │ ├── debian │ │ │ ├── changelog │ │ │ ├── clean │ │ │ ├── compat │ │ │ ├── control │ │ │ ├── copyright │ │ │ ├── python-http-parser.preinst │ │ │ ├── pyversions │ │ │ ├── rules │ │ │ ├── source │ │ │ │ └── format │ │ │ └── watch │ │ ├── dist │ │ │ └── http_parser-0.8.1-py2.7-linux-i686.egg │ │ ├── examples │ │ │ ├── httpparser.py │ │ │ ├── httpparser_from_file.py │ │ │ └── httpstream.py │ │ ├── http_parser.egg-info │ │ │ └── PKG-INFO │ │ ├── http_parser │ │ │ ├── __init__.py │ │ │ ├── _socketio.py │ │ │ ├── http.py │ │ │ ├── http_parser.c │ │ │ ├── http_parser.gyp │ │ │ ├── http_parser.h │ │ │ ├── parser.c │ │ │ ├── parser.pyx │ │ │ ├── parser.so │ │ │ ├── pyparser.py │ │ │ ├── pyversion_compat.h │ │ │ ├── reader.py │ │ │ └── util.py │ │ ├── setup.py │ │ ├── testing │ │ │ └── test_parse_from_stream.py │ │ └── tox.ini │ ├── httpparser.py │ ├── msqlhttp.py │ ├── multiproc.py │ ├── multiproc_ff.py │ ├── multiproc_java.py │ ├── multiproc_links.py │ ├── multiproc_links_ie.py │ ├── parse.py │ ├── proxy.py │ ├── proxy_links.py │ ├── pysph.py │ ├── run.sh │ ├── run_mprocess.py │ ├── run_mprocess_2.py │ ├── run_mprocess_ff.py │ ├── run_mprocess_ff_1.py │ ├── run_mprocess_ie.py │ ├── sele.py │ ├── sele_ff.py │ ├── sele_ie.py │ ├── sele_java.py │ └── yara_scan_links.py ├── readme ├── samples │ ├── __init__.py │ ├── build │ │ └── readme │ ├── dfiles │ │ └── readme │ ├── jfiles │ │ ├── binaries │ │ │ └── readme │ │ ├── decompiled │ │ │ └── readme │ │ └── extracted │ │ │ └── readme │ ├── o │ │ └── readme │ └── run.py ├── sched.py ├── timeout.py ├── yara_scan.py └── ziplib_test.py └── yara-ctypes ├── .travis.yml ├── MANIFEST.in ├── README.rst ├── build └── lib.linux-x86_64-2.7 │ └── yara │ ├── __init__.py │ ├── libyara_wrapper.py │ ├── rules.py │ ├── rules │ ├── example │ │ └── packer_rules.yar │ └── hbgary │ │ ├── antidebug.yar │ │ ├── compiler.yar │ │ ├── compression.yar │ │ ├── fingerprint.yar │ │ ├── integerparsing.yar │ │ ├── libs.yar │ │ ├── microsoft.yar │ │ └── sockets.yar │ ├── scan.py │ └── version.py ├── dist └── yara-1.6.1-py2.7.egg ├── distribute-0.6.25-py2.7.egg ├── distribute-0.6.25.tar.gz ├── distribute_setup.py ├── docs ├── Makefile ├── make.bat └── source │ ├── conf.py │ ├── howto │ ├── build.rst │ ├── install.rst │ └── scan.rst │ ├── index.rst │ └── yara │ ├── libyara_wrapper.rst │ ├── rules.rst │ └── scan.rst ├── libs ├── ELF │ ├── 32bit │ │ └── libyara.so │ └── 64bit │ │ └── libyara.so └── WindowsPE │ ├── 32bit │ └── libyara.dll │ └── 64bit │ └── libyara.dll ├── setup.py ├── tests ├── __init__.py ├── broken_rules.yar ├── test_libyara_wrapper.py └── test_rules.py ├── yara.egg-info ├── PKG-INFO └── not-zip-safe └── yara ├── __init__.py ├── libyara_wrapper.py ├── rules.py ├── rules ├── browser │ ├── .exploits.yar.kate-swp │ ├── exploit_kits.yar │ └── exploits.yar ├── clam_av │ ├── clam_av.yar │ └── test.yar ├── comment_crew │ └── comment_crew.yar ├── example │ └── packer_rules.yar ├── hbgary │ ├── antidebug.yar │ ├── compiler.yar │ ├── compression.yar │ ├── fingerprint.yar │ ├── integerparsing.yar │ ├── libs.yar │ ├── microsoft.yar │ └── sockets.yar ├── ie │ └── exploit.yar ├── java │ └── exploit.yar ├── jsclassifier.yar ├── jsclassifier │ └── plugindetect.yar ├── jsunpack │ └── jsunpack.yar ├── urlclassifier.yar └── urlclassifier │ ├── blackhole.yar │ ├── cool.yar │ ├── crimeboss.yar │ ├── critxpack.yar │ ├── fiesta.yar │ ├── g01pack.yar │ ├── impact.yar │ ├── neutrino.yar │ ├── nuclear.yar │ ├── popads.yar │ ├── redkit.yar │ ├── safepack.yar │ ├── sakura.yar │ ├── sofosfo.yar │ ├── styx.yar │ ├── sweetorange.yar │ └── tds.yar ├── scan.py └── version.py /README.txt: -------------------------------------------------------------------------------- 1 | sandy 2 | ===== 3 | 4 | Static and Dynamic exploit analysis framework. 5 | 6 | http://www.garage4hackers.com/entry.php?b=2532 7 | 8 | This is the backend code powering [Sandy] at exploit-analysis.com. Hence there would be lot of hard coded path names and no centrilized config files. It would take a bit of time for me to beautify this code.The Front end php data viewer is not included with code. 9 | 10 | Requirements: 11 | 12 | Ubuntu/Linux Os 13 | 14 | 15 | To Run: 16 | 17 | 1) Create Database sandyfiles from sandyfiles.sql. 18 | 2) Replace all mysql configurations with your username,password,and database filename. 19 | 3) Run Sandy Java Analysis , 20 | 21 | 22 | Starting Sandy After restart: 23 | 24 | Make sure network is up so that the Vmsphere server has an ubuntu machine and 2 windows machine that can communicate to each other. 25 | 26 | Make sure DNS is set: 27 | 28 | #cat /etc/resolv.conf 29 | 30 | If no DNS set add a DSN server 31 | 32 | #vi /etc/resolv.conf 33 | 34 | nameserver 8.8.8.8 35 | nameserver 4.4.2.2 36 | 37 | 38 | Starting Sandy Processors: 39 | 40 | cd /var/scan/expscanner/sandbox/sandbox/src/ 41 | 42 | nohup python samples/run.py & 43 | nohup python proxy/run_mprocess_ff.py & 44 | nohup python proxy/run_mprocess_ie.py & 45 | nohup python proxy/run_mprocess.py & 46 | 47 | 48 | Confirm the Jobs are running: 49 | 50 | /var/scan/expscanner/sandbox/sandbox/src# jobs -l 51 | [1] 17613 Running nohup python samples/run.py & 52 | [2] 28140 Running nohup python proxy/run_mprocess_ff.py & 53 | [3]- 28143 Running nohup python proxy/run_mprocess_ie.py & 54 | [4]+ 28146 Exit 1 nohup python proxy/run_mprocess.py 55 | 56 | 57 | What each Jobs do: 58 | 59 | #python samples/run.py — > This should start the Module for processing Jar files 60 | 61 | #python proxy/run_mprocess_ff.py —> This start the job for processing url , that are to be processed using firefox [by default] 62 | 63 | #python proxy/run_mprocess_ie.py —> This starts the Jobs for processing urls, that are to be processed using Internet Explorer [optional choice ] 64 | 65 | 66 | #python proxy/run_mprocess.py —> This start the jobs for processing urls for dynamic analysis for jar files. 67 | 68 | 69 | 70 | That should be it: 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2010-2013 Cuckoo Sandbox Developers. 2 | # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org 3 | # See the file 'docs/LICENSE' for copying permission. 4 | -------------------------------------------------------------------------------- /src/attribution.py: -------------------------------------------------------------------------------- 1 | import re,logging 2 | from msl import autoattribinsert,insertattribinsert 3 | 4 | 5 | def getextension(mime_typedb): 6 | mime_string = mime_typedb 7 | if "application/msword" in mime_string: 8 | logging.debug( "File is: doc") 9 | return "doc" 10 | 11 | elif "ms-excel" in mime_string: 12 | logging.debug( "File is: xls") 13 | return "xls" 14 | elif "rtf" in mime_string: 15 | logging.debug( "File is: rtf") 16 | return "rtf" 17 | else: 18 | logging.debug( "File is: unknown") 19 | return "" 20 | 21 | def getfiletype(mime_typedb): 22 | mime_string = mime_typedb 23 | if "doc" in mime_string: 24 | logging.debug( "File is: doc") 25 | return "application/msword" 26 | 27 | elif "xls" in mime_string: 28 | logging.debug( "File is: xls") 29 | return "application/ms-excel" 30 | 31 | elif "rtf" in mime_string: 32 | logging.debug( "File is: rtf") 33 | return "application/rtf" 34 | 35 | else: 36 | logging.debug( "File is: unknown") 37 | return "" 38 | 39 | def autoattrib(metadata,uid,extension): 40 | 41 | #print type(metadata),metadata[1] 42 | re_author =re.compile("Author: (.*?)'") 43 | re_cdata = re.compile("Creation date: (.*?)'") 44 | re_lmod = re.compile("Last modification: (.*?)'") 45 | re_title = re.compile("Title: (.*?)'") 46 | re_mime =re.compile("MIME type: (.*?)'") 47 | smetadata = "'".join(metadata) 48 | #smetadata = smetatdata.encode('utf-8') 49 | #print smetadata 50 | 51 | try: 52 | 53 | #re for getting data form metadata 54 | if re_author.search(smetadata): 55 | author_name = re_author.search(smetadata) 56 | author= author_name.group(1) 57 | else: 58 | author = "" 59 | 60 | 61 | if re_title.search(smetadata): 62 | doc_title = re_title.search(smetadata) 63 | title= doc_title.group(1) 64 | else: 65 | title = "" 66 | if re_cdata.search(smetadata): 67 | creation_date = re_cdata.search(smetadata) 68 | credate = creation_date.group(1) 69 | else: 70 | credate = "" 71 | if re_lmod.search(smetadata): 72 | modification_date = re_lmod.search(smetadata) 73 | moddate = modification_date.group(1) 74 | else: 75 | moddate = "" 76 | if re_mime.search(smetadata): 77 | mime_data = re_mime.search(smetadata) 78 | mime = mime_data.group(1) 79 | 80 | else: 81 | mime = getfiletype(extension) 82 | 83 | #logging.info( "%s \n %s \n %s %s\n",author_name.group(1),doc_title.group(1),creation_date.group(1),modification_date.group(1) ) 84 | #print author_name.group(1),doc_title.group(1),creation_date.group(1),modification_date.group(1) 85 | # Pass the data to insert autoattrib 86 | uid = uid 87 | 88 | attribid = insertattribinsert(author) 89 | print "Am ouside insert" 90 | print uid,author,title,credate,moddate,attribid,mime 91 | autoattribinsert(uid,author,title,credate,moddate,attribid,mime) 92 | 93 | except Exception as e: 94 | print e 95 | 96 | -------------------------------------------------------------------------------- /src/daemon-example.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys, time 4 | from daemonn import Daemon 5 | 6 | class MyDaemon(Daemon): 7 | def run(self): 8 | while True: 9 | time.sleep(1) 10 | 11 | if __name__ == "__main__": 12 | daemon = MyDaemon('/tmp/daemon-example.pid') 13 | if len(sys.argv) == 2: 14 | if 'start' == sys.argv[1]: 15 | daemon.start() 16 | elif 'stop' == sys.argv[1]: 17 | daemon.stop() 18 | elif 'restart' == sys.argv[1]: 19 | daemon.restart() 20 | else: 21 | print "Unknown command" 22 | sys.exit(2) 23 | sys.exit(0) 24 | else: 25 | print "usage: %s start|stop|restart" % sys.argv[0] 26 | sys.exit(2) 27 | 28 | 29 | print "am here" 30 | -------------------------------------------------------------------------------- /src/daemonn.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys, os, time, atexit 4 | from signal import SIGTERM 5 | 6 | class Daemon: 7 | """ 8 | A generic daemon class. 9 | 10 | Usage: subclass the Daemon class and override the run() method 11 | """ 12 | def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'): 13 | self.stdin = stdin 14 | self.stdout = stdout 15 | self.stderr = stderr 16 | self.pidfile = pidfile 17 | 18 | def daemonize(self): 19 | """ 20 | do the UNIX double-fork magic, see Stevens' "Advanced 21 | Programming in the UNIX Environment" for details (ISBN 0201563177) 22 | http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16 23 | """ 24 | try: 25 | pid = os.fork() 26 | if pid > 0: 27 | # exit first parent 28 | sys.exit(0) 29 | except OSError, e: 30 | sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror)) 31 | sys.exit(1) 32 | 33 | # decouple from parent environment 34 | os.chdir("/") 35 | os.setsid() 36 | os.umask(0) 37 | 38 | # do second fork 39 | try: 40 | pid = os.fork() 41 | if pid > 0: 42 | # exit from second parent 43 | sys.exit(0) 44 | except OSError, e: 45 | sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror)) 46 | sys.exit(1) 47 | 48 | # redirect standard file descriptors 49 | sys.stdout.flush() 50 | sys.stderr.flush() 51 | si = file(self.stdin, 'r') 52 | so = file(self.stdout, 'a+') 53 | se = file(self.stderr, 'a+', 0) 54 | os.dup2(si.fileno(), sys.stdin.fileno()) 55 | os.dup2(so.fileno(), sys.stdout.fileno()) 56 | os.dup2(se.fileno(), sys.stderr.fileno()) 57 | 58 | # write pidfile 59 | atexit.register(self.delpid) 60 | pid = str(os.getpid()) 61 | file(self.pidfile,'w+').write("%s\n" % pid) 62 | 63 | def delpid(self): 64 | os.remove(self.pidfile) 65 | 66 | def start(self): 67 | """ 68 | Start the daemon 69 | """ 70 | # Check for a pidfile to see if the daemon already runs 71 | try: 72 | pf = file(self.pidfile,'r') 73 | pid = int(pf.read().strip()) 74 | pf.close() 75 | except IOError: 76 | pid = None 77 | 78 | if pid: 79 | message = "pidfile %s already exist. Daemon already running?\n" 80 | sys.stderr.write(message % self.pidfile) 81 | sys.exit(1) 82 | 83 | # Start the daemon 84 | self.daemonize() 85 | self.run() 86 | 87 | def stop(self): 88 | """ 89 | Stop the daemon 90 | """ 91 | # Get the pid from the pidfile 92 | try: 93 | pf = file(self.pidfile,'r') 94 | pid = int(pf.read().strip()) 95 | pf.close() 96 | except IOError: 97 | pid = None 98 | 99 | if not pid: 100 | message = "pidfile %s does not exist. Daemon not running?\n" 101 | sys.stderr.write(message % self.pidfile) 102 | return # not an error in a restart 103 | 104 | # Try killing the daemon process 105 | try: 106 | while 1: 107 | os.kill(pid, SIGTERM) 108 | time.sleep(0.1) 109 | except OSError, err: 110 | err = str(err) 111 | if err.find("No such process") > 0: 112 | if os.path.exists(self.pidfile): 113 | os.remove(self.pidfile) 114 | else: 115 | print str(err) 116 | sys.exit(1) 117 | 118 | def restart(self): 119 | """ 120 | Restart the daemon 121 | """ 122 | self.stop() 123 | self.start() 124 | 125 | def run(self): 126 | """ 127 | You should override this method when you subclass Daemon. It will be called after the process has been 128 | daemonized by start() or restart(). 129 | """ 130 | -------------------------------------------------------------------------------- /src/execute.py: -------------------------------------------------------------------------------- 1 | import shlex, subprocess 2 | 3 | def scanfile(filepath,extension): 4 | 5 | try: 6 | 7 | if "rtf" in extension: 8 | binarypath ="/var/scan/expscanner/RTFScan.exe" 9 | else: 10 | binarypath ="/var/scan/expscanner/scan.exe" 11 | 12 | #subprocess.Popen('wine "Hello world!"', shell=True) 13 | 14 | process= subprocess.Popen(['wine',binarypath,filepath,'scan','brute'], shell=False, stdout=subprocess.PIPE) 15 | output,error = process.communicate() 16 | process.stdout.close() 17 | return output 18 | except Exception: 19 | 20 | pass 21 | 22 | def entropy(filepath,pngfile): 23 | 24 | try: 25 | 26 | #subprocess.Popen('wine "Hello world!"', shell=True) 27 | pngfile = pngfile+".png" 28 | print "I am inside entropy\n" 29 | binarypath ="/var/scan/others/png" 30 | pngoutputdir ="/var/www/sandy/entropy/"+pngfile 31 | filepathlen = len(filepath) 32 | process= subprocess.Popen([binarypath,filepath,pngoutputdir], shell=False, stdout=subprocess.PIPE) 33 | output,error = process.communicate() 34 | process.stdout.close() 35 | print output 36 | return 37 | 38 | except Exception: 39 | pass 40 | 41 | def strings(filepath): 42 | 43 | try: 44 | 45 | #subprocess.Popen('wine "Hello world!"', shell=True) 46 | print "I am inside Strings\n" 47 | binarypath ="/usr/bin/strings" 48 | process= subprocess.Popen([binarypath,filepath], shell=False, stdout=subprocess.PIPE) 49 | output,error = process.communicate() 50 | process.stdout.close() 51 | print output 52 | return output 53 | 54 | except Exception: 55 | pass 56 | 57 | 58 | def dex2jar(filepath): 59 | 60 | try: 61 | 62 | #subprocess.Popen('wine "Hello world!"', shell=True) 63 | print "I am inside dex2jar\n" 64 | binarypath ="/bin/sh" 65 | d2jar ="/var/scan/others/dex2jar/dex2jar.sh" 66 | process= subprocess.Popen([binarypath,d2jar,filepath], shell=False, stdout=subprocess.PIPE) 67 | output,error = process.communicate() 68 | process.stdout.close() 69 | print output 70 | return output 71 | 72 | except Exception: 73 | pass 74 | 75 | def filetype(filepath): 76 | 77 | try: 78 | 79 | #subprocess.Popen('wine "Hello world!"', shell=True) 80 | print "I am inside filetype\n" 81 | binarypath ="/usr/bin/file" 82 | filepathlen = len(filepath) 83 | process= subprocess.Popen([binarypath,filepath], shell=False, stdout=subprocess.PIPE) 84 | output,error = process.communicate() 85 | process.stdout.close() 86 | print output 87 | return output[filepathlen+1:] 88 | except Exception: 89 | pass 90 | 91 | def retrojad(filepath): 92 | 93 | try: 94 | 95 | #subprocess.Popen('wine "Hello world!"', shell=True) 96 | print "I am inside retrojad\n" 97 | binarypath ="/var/scan/others/jadretro" 98 | #jadretro filename.class 99 | process= subprocess.Popen([binarypath,filepath], shell=False, stdout=subprocess.PIPE) 100 | output,error = process.communicate() 101 | process.stdout.close() 102 | print output 103 | #return output[filepathlen+1:] 104 | except Exception: 105 | pass 106 | 107 | def jad(filepath,folder): 108 | 109 | try: 110 | 111 | #subprocess.Popen('wine "Hello world!"', shell=True) 112 | print "I am inside jad\n" 113 | binarypath ="/var/scan/others/jad" 114 | #jad -d converted XXX.class 115 | process= subprocess.Popen([binarypath,"-s",".java","-o","-d",folder,filepath], shell=False, stdout=subprocess.PIPE) 116 | output,error = process.communicate() 117 | process.stdout.close() 118 | print output 119 | #return output[filepathlen+1:] 120 | except Exception: 121 | pass 122 | -------------------------------------------------------------------------------- /src/extract_domain.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | sas = open("test.txt","r") 4 | 5 | data = sas.read() 6 | data2 = sas.readlines() 7 | possible_urls = [] 8 | possible_urls = re.findall(r'(https?://[^\s]+)', data) 9 | possible_url2 = re.findall(r'(www[^\s]+)', data) 10 | 11 | possible_ips = re.findall( r'[0-9]+(?:\.[0-9]+){3}', data ) 12 | for pu in possible_url2: 13 | possible_urls.append(pu) 14 | for ip in possible_ips: 15 | possible_urls.append(ip) 16 | 17 | 18 | 19 | 20 | for i in possible_urls: 21 | print i 22 | -------------------------------------------------------------------------------- /src/hachoir.py: -------------------------------------------------------------------------------- 1 | from hachoir_core.error import HachoirError 2 | from hachoir_core.cmd_line import unicodeFilename 3 | from hachoir_parser import createParser 4 | from hachoir_core.tools import makePrintable 5 | from hachoir_metadata import extractMetadata 6 | from hachoir_core.i18n import getTerminalCharset 7 | from sys import argv, stderr, exit 8 | #from timeout import timeout 9 | 10 | 11 | def getmeta(tempfile): 12 | try: 13 | 14 | filename = tempfile 15 | filename, realname = unicodeFilename(filename), filename 16 | parser = createParser(filename, realname) 17 | if not parser: 18 | print >>stderr, "Unable to parse file" 19 | return "error" 20 | try: 21 | metadata = extractMetadata(parser) 22 | except HachoirError, err: 23 | print "Metadata extraction error: %s" % unicode(err) 24 | metadata = None 25 | if not metadata: 26 | print "Unable to extract metadata" 27 | return "error" 28 | 29 | text = metadata.exportPlaintext() 30 | charset = getTerminalCharset() 31 | return text 32 | except Exception: 33 | print "Exception In Processing\n" 34 | pass 35 | 36 | ''' 37 | for line in text: 38 | 39 | print makePrintable(line, charset) 40 | ''' 41 | -------------------------------------------------------------------------------- /src/java_analysis.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Input: Path to folder with .java sourc code 3 | Function returns class name and checks if jar is a midlet or applet 4 | ''' 5 | 6 | import os,re 7 | from msl import update_class 8 | 9 | def read_java_code_domain(dir_read): 10 | print "Directory to scan"+dir_read 11 | possible_urls = [] 12 | os.chdir(dir_read) 13 | for files in os.listdir("."): 14 | #If Applet or midlet found then return values 15 | #Read each file and look for main class 16 | full_path = dir_read+"/"+files 17 | print full_path 18 | 19 | try: 20 | 21 | java_file = open(full_path,"r") 22 | data = java_file.read() 23 | possible_urls = re.findall(r'(https?://[^\s]+)', data) 24 | possible_url2 = re.findall(r'(www[^\s]+)', data) 25 | 26 | possible_ips = re.findall( r'[0-9]+(?:\.[0-9]+){3}', data ) 27 | #Java functions 28 | possible_java = re.findall('^.exec((.*?)^)',data) 29 | 30 | #java strings 31 | possible_strings=re.findall(r'\"(.+?)\"',data) 32 | possible_strings = set(possible_strings) 33 | possible_strings = list(possible_strings) 34 | return_possible_strings = '\n'.join(map(str, possible_strings)) 35 | 36 | 37 | for pu in possible_url2: 38 | 39 | possible_urls.append(pu) 40 | 41 | for ip in possible_ips: 42 | 43 | possible_urls.append(ip) 44 | 45 | for java_functions in possible_java: 46 | 47 | possible_urls.append(java_functions) 48 | 49 | 50 | 51 | print possible_urls 52 | possible_urls= set(possible_urls) 53 | possible_urls = list(possible_urls) 54 | 55 | 56 | java_file.close() 57 | 58 | 59 | except Exception as e: 60 | print e 61 | pass 62 | return (possible_urls,return_possible_strings) 63 | 64 | 65 | def read_java_code(dir_read): 66 | 67 | print "Directory to scan"+dir_read 68 | afound =0 69 | mfound =0 70 | error =3 71 | class_name = "" 72 | applet = "applet" 73 | midlet = "midlet" 74 | extends = "extends" 75 | app_type = 0 # 1: Applet 2: Midlet 76 | os.chdir(dir_read) 77 | for files in os.listdir("."): 78 | #If Applet or midlet found then return values 79 | #Read each file and look for main class 80 | full_path = dir_read+"/"+files 81 | print full_path 82 | 83 | try: 84 | 85 | java_file = open(full_path,"r") 86 | java_code = java_file.readlines() 87 | if (java_code): 88 | 89 | for line in java_code: 90 | 91 | if re.search(r"\b" + extends + r"\b", line.lower()) and re.search(r"\b" + midlet + r"\b",line.lower()): 92 | #print "found" 93 | print "Midlet Class Found: "+line.split()[2] 94 | print line 95 | class_name=line.split()[2] 96 | mfound =2 97 | return (mfound,afound,class_name) 98 | 99 | elif re.search(r"\b" + extends + r"\b", line.lower()) and re.search(r"\b" + applet + r"\b",line.lower()): 100 | print line 101 | afound =1 102 | class_name=line.split()[2] 103 | print "Applet Class Found: "+line.split()[2] 104 | return (mfound,afound,class_name) 105 | 106 | 107 | 108 | except Exception as e: 109 | print e 110 | pass 111 | 112 | def java_analysis_caller(jar_bin_path,uploaduid): 113 | class_value= read_java_code(jar_bin_path) 114 | print class_value 115 | #return (mfound,afound,class_name) 116 | ##Applet type- 0: Unknow 1: Applet 2: Midlet 117 | if class_value is not None: 118 | 119 | if class_value[0] ==2: 120 | 121 | print "Midlet Found: "+class_value[2] 122 | applet_type = 2 123 | class_name = class_value[2] 124 | #print class_name 125 | 126 | elif class_value[1] ==1: 127 | print "Applet Found: "+class_value[2] 128 | applet_type = 1 129 | class_name = class_value[2] 130 | else: 131 | applet_type =0 132 | class_name = "" 133 | else: 134 | class_name ="" 135 | applet_type =3 136 | 137 | urls =[] 138 | strings = [] 139 | urls,strings = read_java_code_domain(jar_bin_path) 140 | print "The urls:" 141 | print urls 142 | print "The string:" 143 | print strings 144 | print class_name,applet_type,uploaduid,urls 145 | update_class(class_name,applet_type,urls,strings,uploaduid) 146 | if __name__ == "__main__": 147 | 148 | java_analysis_caller("/var/scan/expscanner/sandbox/sandbox/src/samples/jfiles/decompiled/b7a797fe64365a0059e2ca373d7dc073/",uploaduid=2) 149 | #read_java_code_domain() -------------------------------------------------------------------------------- /src/mthreadbk.py: -------------------------------------------------------------------------------- 1 | import time,PySQLPool 2 | from threading import Thread 3 | 4 | def manytasks(sas): 5 | connection = PySQLPool.getNewConnection(username='root', password='password', host='localhost', db='sandyfiles') 6 | 7 | 8 | for i in range(2): 9 | t = Thread(target=checksamples, args=(i,connection,)) 10 | t.start() 11 | 12 | 13 | 14 | def (i,connection): 15 | 16 | print "At thread %d" % i 17 | query = PySQLPool.getNewQuery(connection) 18 | query.Query('select * from uploads') 19 | threadcount = len(query.record) 20 | print threadcount 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /src/mysqlstuffs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import PySQLPool 4 | 5 | 6 | if __name__ == "__main__": 7 | connection = PySQLPool.getNewConnection(username='root', password='password', host='localhost', db='sandyfiles') 8 | query = PySQLPool.getNewQuery(connection) 9 | query.QueryOne('SELECT VERSION()') 10 | print query.record 11 | -------------------------------------------------------------------------------- /src/proxy/.svn/entries: -------------------------------------------------------------------------------- 1 | 12 2 | -------------------------------------------------------------------------------- /src/proxy/.svn/format: -------------------------------------------------------------------------------- 1 | 12 2 | -------------------------------------------------------------------------------- /src/proxy/.svn/pristine/98/98f88925b98865cda184c1f70daeb37085d70bcd.svn-base: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/.svn/pristine/98/98f88925b98865cda184c1f70daeb37085d70bcd.svn-base -------------------------------------------------------------------------------- /src/proxy/.svn/pristine/b6/b660d0c3899eac087bf45a00a1be1bc292fc15e7.svn-base: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/.svn/pristine/b6/b660d0c3899eac087bf45a00a1be1bc292fc15e7.svn-base -------------------------------------------------------------------------------- /src/proxy/.svn/wc.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/.svn/wc.db -------------------------------------------------------------------------------- /src/proxy/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2010-2013 Cuckoo Sandbox Developers. 2 | # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org 3 | # See the file 'docs/LICENSE' for copying permission. 4 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/LICENSE: -------------------------------------------------------------------------------- 1 | 2011,2012 (c) Benoît Chesneau 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, 7 | copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the 9 | Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be 13 | included in all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/Makefile.ext: -------------------------------------------------------------------------------- 1 | # This file is renamed to "Makefile.ext" in release tarballs so that 2 | # setup.py won't try to run it. If you want setup.py to run "make" 3 | # automatically, rename it back to "Makefile". 4 | 5 | all: http_parser/parser.c 6 | 7 | http_parser/parser.c: http_parser/parser.pyx 8 | cython -o http_parser.parser.c http_parser/parser.pyx 9 | mv http_parser.parser.c http_parser/parser.c 10 | 11 | clean: 12 | rm -f http_parser/parser.c 13 | 14 | 15 | .PHONY: clean all 16 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/README.rst: -------------------------------------------------------------------------------- 1 | http-parser 2 | ----------- 3 | 4 | HTTP request/response parser for Python compatible with Python 2.x 5 | (>=2.6), Python 3 and Pypy. If possible a C parser based on 6 | http-parser_ from Ryan Dahl will be used. 7 | 8 | http-parser is under the MIT license. 9 | 10 | Project url: https://github.com/benoitc/http-parser/ 11 | 12 | .. image:: 13 | https://secure.travis-ci.org/benoitc/http-parser.png?branch=master 14 | :alt: Build Status 15 | :target: https://travis-ci.org/benoitc/http-parser 16 | 17 | Requirements: 18 | ------------- 19 | 20 | - Python 2.6 or sup. Pypy latest version. 21 | - Cython if you need to rebuild the C code (Not needed for Pypy) 22 | 23 | Installation 24 | ------------ 25 | 26 | :: 27 | 28 | $ pip install http-parser 29 | 30 | Or install from source:: 31 | 32 | $ git clone git://github.com/benoitc/http-parser.git 33 | $ cd http-parser && python setup.py install 34 | 35 | 36 | Note: if you get an error on MacOSX try to install with the following 37 | arguments: 38 | 39 | $ env ARCHFLAGS="-arch i386 -arch x86_64" python setup.py install 40 | 41 | Usage 42 | ----- 43 | 44 | http-parser provide you **parser.HttpParser** low-level parser in C that 45 | you can access in your python program and **http.HttpStream** providing 46 | higher-level access to a readable,sequential io.RawIOBase object. 47 | 48 | To help you in your day work, http-parser provides you 3 kind of readers 49 | in the reader module: IterReader to read iterables, StringReader to 50 | reads strings and StringIO objects, SocketReader to read sockets or 51 | objects with the same api (recv_into needed). You can of course use any 52 | io.RawIOBase object. 53 | 54 | Example of HttpStream 55 | +++++++++++++++++++++ 56 | 57 | ex:: 58 | 59 | #!/usr/bin/env python 60 | import socket 61 | 62 | from http_parser.http import HttpStream 63 | from http_parser.reader import SocketReader 64 | 65 | def main(): 66 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 67 | try: 68 | s.connect(('gunicorn.org', 80)) 69 | s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n") 70 | r = SocketReader(s) 71 | p = HttpStream(r) 72 | print p.headers() 73 | print p.body_file().read() 74 | finally: 75 | s.close() 76 | 77 | if __name__ == "__main__": 78 | main() 79 | 80 | Example of HttpParser: 81 | ++++++++++++++++++++++ 82 | 83 | :: 84 | 85 | #!/usr/bin/env python 86 | import socket 87 | 88 | # try to import C parser then fallback in pure python parser. 89 | try: 90 | from http_parser.parser import HttpParser 91 | except ImportError: 92 | from http_parser.pyparser import HttpParser 93 | 94 | 95 | def main(): 96 | 97 | p = HttpParser() 98 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 99 | body = [] 100 | try: 101 | s.connect(('gunicorn.org', 80)) 102 | s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n") 103 | 104 | while True: 105 | data = s.recv(1024) 106 | if not data: 107 | break 108 | 109 | recved = len(data) 110 | nparsed = p.execute(data, recved) 111 | assert nparsed == recved 112 | 113 | if p.is_headers_complete(): 114 | print p.get_headers() 115 | 116 | if p.is_partial_body(): 117 | body.append(p.recv_body()) 118 | 119 | if p.is_message_complete(): 120 | break 121 | 122 | print "".join(body) 123 | 124 | finally: 125 | s.close() 126 | 127 | if __name__ == "__main__": 128 | main() 129 | 130 | 131 | You can find more docs in the code (or use a doc generator). 132 | 133 | 134 | Copyright 135 | --------- 136 | 137 | 2011,2012 (c) Benoît Chesneau 138 | 139 | 140 | .. http-parser_ https://github.com/ry/http-parser 141 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/THANKS: -------------------------------------------------------------------------------- 1 | Benoit Calvez 2 | Brian Rosner 3 | Christian Wyglendowski 4 | Ronny Pfannschmidt 5 | Mike Gilbert 6 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/TODO.md: -------------------------------------------------------------------------------- 1 | - add unittests 2 | - make the speedup in C optionnal 3 | - refactor http_parser: C code should be minimal and all the logic 4 | (environ parsing) should be passed to the python. 5 | - add montgrel parser 6 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/build/lib.linux-i686-2.7/http_parser/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http_parser released under the MIT license. 4 | # See the NOTICE for more information. 5 | 6 | version_info = (0, 8, 1) 7 | __version__ = ".".join(map(str, version_info)) 8 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/build/lib.linux-i686-2.7/http_parser/_socketio.py: -------------------------------------------------------------------------------- 1 | """ 2 | socketio taken from the python3 stdlib 3 | """ 4 | import io 5 | import sys 6 | from socket import timeout, error, socket 7 | from errno import EINTR, EAGAIN, EWOULDBLOCK 8 | 9 | _blocking_errnos = EAGAIN, EWOULDBLOCK 10 | 11 | 12 | # python2.6 fixes 13 | 14 | def _recv_into_sock_py26(sock, buf): 15 | data = sock.recv(len(buf)) 16 | l = len(data) 17 | buf[:l] = data 18 | return l 19 | 20 | 21 | if sys.version_info < (2, 7, 0, 'final'): 22 | _recv_into_sock = _recv_into_sock_py26 23 | else: 24 | _recv_into_sock = lambda sock, buf: sock.recv_into(buf) 25 | 26 | 27 | class SocketIO(io.RawIOBase): 28 | 29 | """Raw I/O implementation for stream sockets. 30 | 31 | This class supports the makefile() method on sockets. It provides 32 | the raw I/O interface on top of a socket object. 33 | """ 34 | 35 | # One might wonder why not let FileIO do the job instead. There are two 36 | # main reasons why FileIO is not adapted: 37 | # - it wouldn't work under Windows (where you can't used read() and 38 | # write() on a socket handle) 39 | # - it wouldn't work with socket timeouts (FileIO would ignore the 40 | # timeout and consider the socket non-blocking) 41 | 42 | # XXX More docs 43 | 44 | def __init__(self, sock, mode): 45 | if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): 46 | raise ValueError("invalid mode: %r" % mode) 47 | io.RawIOBase.__init__(self) 48 | self._sock = sock 49 | if "b" not in mode: 50 | mode += "b" 51 | self._mode = mode 52 | self._reading = "r" in mode 53 | self._writing = "w" in mode 54 | self._timeout_occurred = False 55 | 56 | def readinto(self, b): 57 | """Read up to len(b) bytes into the writable buffer *b* and return 58 | the number of bytes read. If the socket is non-blocking and no bytes 59 | are available, None is returned. 60 | 61 | If *b* is non-empty, a 0 return value indicates that the connection 62 | was shutdown at the other end. 63 | """ 64 | self._checkClosed() 65 | self._checkReadable() 66 | if self._timeout_occurred: 67 | raise IOError("cannot read from timed out object") 68 | while True: 69 | try: 70 | return _recv_into_sock(self._sock, b) 71 | except timeout: 72 | self._timeout_occurred = True 73 | raise 74 | except error as e: 75 | n = e.args[0] 76 | if n == EINTR: 77 | continue 78 | if n in _blocking_errnos: 79 | return None 80 | raise 81 | 82 | def write(self, b): 83 | """Write the given bytes or bytearray object *b* to the socket 84 | and return the number of bytes written. This can be less than 85 | len(b) if not all data could be written. If the socket is 86 | non-blocking and no bytes could be written None is returned. 87 | """ 88 | self._checkClosed() 89 | self._checkWritable() 90 | try: 91 | return self._sock.send(b) 92 | except error as e: 93 | # XXX what about EINTR? 94 | if e.args[0] in _blocking_errnos: 95 | return None 96 | raise 97 | 98 | def readable(self): 99 | """True if the SocketIO is open for reading. 100 | """ 101 | return self._reading and not self.closed 102 | 103 | def writable(self): 104 | """True if the SocketIO is open for writing. 105 | """ 106 | return self._writing and not self.closed 107 | 108 | def fileno(self): 109 | """Return the file descriptor of the underlying socket. 110 | """ 111 | self._checkClosed() 112 | return self._sock.fileno() 113 | 114 | @property 115 | def name(self): 116 | if not self.closed: 117 | return self.fileno() 118 | else: 119 | return -1 120 | 121 | @property 122 | def mode(self): 123 | return self._mode 124 | 125 | def close(self): 126 | """Close the SocketIO object. This doesn't close the underlying 127 | socket, except if all references to it have disappeared. 128 | """ 129 | if self.closed: 130 | return 131 | io.RawIOBase.close(self) 132 | self._sock._decref_socketios() 133 | self._sock = None 134 | 135 | def _checkClosed(self, msg=None): 136 | """Internal: raise an ValueError if file is closed 137 | """ 138 | if self.closed: 139 | raise ValueError("I/O operation on closed file." 140 | if msg is None else msg) 141 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/build/lib.linux-i686-2.7/http_parser/reader.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http-parser released under the MIT license. 4 | 5 | # See the NOTICE for more information. 6 | 7 | from io import DEFAULT_BUFFER_SIZE, RawIOBase 8 | 9 | from http_parser.util import StringIO 10 | 11 | 12 | class HttpBodyReader(RawIOBase): 13 | """ Raw implementation to stream http body """ 14 | 15 | def __init__(self, http_stream): 16 | self.http_stream = http_stream 17 | self.eof = False 18 | 19 | def readinto(self, b): 20 | if self.http_stream.parser.is_message_complete() or self.eof: 21 | if self.http_stream.parser.is_partial_body(): 22 | return self.http_stream.parser.recv_body_into(b) 23 | return 0 24 | 25 | self._checkReadable() 26 | try: 27 | self._checkClosed() 28 | except AttributeError: 29 | pass 30 | 31 | while True: 32 | buf = bytearray(DEFAULT_BUFFER_SIZE) 33 | recved = self.http_stream.stream.readinto(buf) 34 | if recved is None: 35 | break 36 | 37 | del buf[recved:] 38 | nparsed = self.http_stream.parser.execute(bytes(buf), recved) 39 | if nparsed != recved: 40 | return None 41 | 42 | if self.http_stream.parser.is_partial_body() or recved == 0: 43 | break 44 | elif self.http_stream.parser.is_message_complete(): 45 | break 46 | 47 | if not self.http_stream.parser.is_partial_body(): 48 | self.eof = True 49 | b = b'' 50 | return len(b'') 51 | 52 | return self.http_stream.parser.recv_body_into(b) 53 | 54 | def readable(self): 55 | return not self.closed or self.http_stream.parser.is_partial_body() 56 | 57 | def close(self): 58 | if self.closed: 59 | return 60 | RawIOBase.close(self) 61 | self.http_stream = None 62 | 63 | class IterReader(RawIOBase): 64 | """ A raw reader implementation for iterable """ 65 | def __init__(self, iterable): 66 | self.iter = iter(iterable) 67 | self._buffer = "" 68 | 69 | def readinto(self, b): 70 | self._checkClosed() 71 | self._checkReadable() 72 | 73 | l = len(b) 74 | try: 75 | chunk = self.iter.next() 76 | self._buffer += chunk 77 | m = min(len(self._buffer), l) 78 | data, self._buffer = self._buffer[:m], self._buffer[m:] 79 | b[0:m] = data 80 | return len(data) 81 | except StopIteration: 82 | del b[0:] 83 | return 0 84 | 85 | def readable(self): 86 | return not self.closed 87 | 88 | def close(self): 89 | if self.closed: 90 | return 91 | RawIOBase.close(self) 92 | self.iter = None 93 | 94 | class StringReader(IterReader): 95 | """ a raw reader for strings or StringIO.StringIO, 96 | cStringIO.StringIO objects """ 97 | 98 | def __init__(self, string): 99 | if isinstance(string, types.StringTypes): 100 | iterable = StringIO(string) 101 | else: 102 | iterable = string 103 | IterReader.__init__(self, iterable) 104 | 105 | from http_parser._socketio import SocketIO 106 | 107 | class SocketReader(SocketIO): 108 | def __init__(self, sock): 109 | super(SocketReader, self).__init__(sock, mode='rb') 110 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/build/temp.linux-i686-2.7/http_parser/http_parser.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/build/http-parser/build/temp.linux-i686-2.7/http_parser/http_parser.o -------------------------------------------------------------------------------- /src/proxy/build/http-parser/examples/httpparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | 4 | try: 5 | from http_parser.parser import HttpParser 6 | except ImportError: 7 | from http_parser.pyparser import HttpParser 8 | from http_parser.util import b 9 | 10 | def main(): 11 | 12 | p = HttpParser() 13 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 14 | body = [] 15 | header_done = False 16 | try: 17 | s.connect(('gunicorn.org', 80)) 18 | s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")) 19 | 20 | while True: 21 | data = s.recv(1024) 22 | if not data: 23 | break 24 | 25 | recved = len(data) 26 | nparsed = p.execute(data, recved) 27 | assert nparsed == recved 28 | 29 | if p.is_headers_complete() and not header_done: 30 | print(p.get_headers()) 31 | print(p.get_headers()['content-length']) 32 | header_done = True 33 | 34 | if p.is_partial_body(): 35 | body.append(p.recv_body()) 36 | 37 | if p.is_message_complete(): 38 | break 39 | 40 | 41 | print(b("").join(body)) 42 | 43 | finally: 44 | s.close() 45 | 46 | if __name__ == "__main__": 47 | main() 48 | 49 | 50 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/examples/httpparser_from_file.py: -------------------------------------------------------------------------------- 1 | #coding=utf-8 2 | ''' 3 | Created on 2012-3-24 4 | 5 | @author: fengclient 6 | ''' 7 | from http_parser.pyparser import HttpParser 8 | 9 | if __name__ == '__main__': 10 | rsp = open('d:\\172_response.txt').read() 11 | # if your are reading a text file from windows, u may need manually convert \n to \r\n 12 | # universal newline support: http://docs.python.org/library/functions.html#open 13 | rsp = rsp.replace('\n', '\r\n') 14 | p = HttpParser() 15 | p.execute(rsp, len(rsp)) 16 | print p.get_headers() 17 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/examples/httpstream.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | 4 | from http_parser.http import HttpStream 5 | from http_parser.reader import SocketReader 6 | 7 | from http_parser.util import b 8 | 9 | def main(): 10 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 11 | try: 12 | s.connect(('gunicorn.org', 80)) 13 | s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")) 14 | p = HttpStream(SocketReader(s)) 15 | print(p.headers()) 16 | 17 | print(p.body_file().read()) 18 | finally: 19 | s.close() 20 | 21 | if __name__ == "__main__": 22 | main() 23 | 24 | 25 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/http_parser/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http_parser released under the MIT license. 4 | # See the NOTICE for more information. 5 | 6 | version_info = (0, 8, 1) 7 | __version__ = ".".join(map(str, version_info)) 8 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/http_parser/_socketio.py: -------------------------------------------------------------------------------- 1 | """ 2 | socketio taken from the python3 stdlib 3 | """ 4 | import io 5 | import sys 6 | from socket import timeout, error, socket 7 | from errno import EINTR, EAGAIN, EWOULDBLOCK 8 | 9 | _blocking_errnos = EAGAIN, EWOULDBLOCK 10 | 11 | 12 | # python2.6 fixes 13 | 14 | def _recv_into_sock_py26(sock, buf): 15 | data = sock.recv(len(buf)) 16 | l = len(data) 17 | buf[:l] = data 18 | return l 19 | 20 | 21 | if sys.version_info < (2, 7, 0, 'final'): 22 | _recv_into_sock = _recv_into_sock_py26 23 | else: 24 | _recv_into_sock = lambda sock, buf: sock.recv_into(buf) 25 | 26 | 27 | class SocketIO(io.RawIOBase): 28 | 29 | """Raw I/O implementation for stream sockets. 30 | 31 | This class supports the makefile() method on sockets. It provides 32 | the raw I/O interface on top of a socket object. 33 | """ 34 | 35 | # One might wonder why not let FileIO do the job instead. There are two 36 | # main reasons why FileIO is not adapted: 37 | # - it wouldn't work under Windows (where you can't used read() and 38 | # write() on a socket handle) 39 | # - it wouldn't work with socket timeouts (FileIO would ignore the 40 | # timeout and consider the socket non-blocking) 41 | 42 | # XXX More docs 43 | 44 | def __init__(self, sock, mode): 45 | if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): 46 | raise ValueError("invalid mode: %r" % mode) 47 | io.RawIOBase.__init__(self) 48 | self._sock = sock 49 | if "b" not in mode: 50 | mode += "b" 51 | self._mode = mode 52 | self._reading = "r" in mode 53 | self._writing = "w" in mode 54 | self._timeout_occurred = False 55 | 56 | def readinto(self, b): 57 | """Read up to len(b) bytes into the writable buffer *b* and return 58 | the number of bytes read. If the socket is non-blocking and no bytes 59 | are available, None is returned. 60 | 61 | If *b* is non-empty, a 0 return value indicates that the connection 62 | was shutdown at the other end. 63 | """ 64 | self._checkClosed() 65 | self._checkReadable() 66 | if self._timeout_occurred: 67 | raise IOError("cannot read from timed out object") 68 | while True: 69 | try: 70 | return _recv_into_sock(self._sock, b) 71 | except timeout: 72 | self._timeout_occurred = True 73 | raise 74 | except error as e: 75 | n = e.args[0] 76 | if n == EINTR: 77 | continue 78 | if n in _blocking_errnos: 79 | return None 80 | raise 81 | 82 | def write(self, b): 83 | """Write the given bytes or bytearray object *b* to the socket 84 | and return the number of bytes written. This can be less than 85 | len(b) if not all data could be written. If the socket is 86 | non-blocking and no bytes could be written None is returned. 87 | """ 88 | self._checkClosed() 89 | self._checkWritable() 90 | try: 91 | return self._sock.send(b) 92 | except error as e: 93 | # XXX what about EINTR? 94 | if e.args[0] in _blocking_errnos: 95 | return None 96 | raise 97 | 98 | def readable(self): 99 | """True if the SocketIO is open for reading. 100 | """ 101 | return self._reading and not self.closed 102 | 103 | def writable(self): 104 | """True if the SocketIO is open for writing. 105 | """ 106 | return self._writing and not self.closed 107 | 108 | def fileno(self): 109 | """Return the file descriptor of the underlying socket. 110 | """ 111 | self._checkClosed() 112 | return self._sock.fileno() 113 | 114 | @property 115 | def name(self): 116 | if not self.closed: 117 | return self.fileno() 118 | else: 119 | return -1 120 | 121 | @property 122 | def mode(self): 123 | return self._mode 124 | 125 | def close(self): 126 | """Close the SocketIO object. This doesn't close the underlying 127 | socket, except if all references to it have disappeared. 128 | """ 129 | if self.closed: 130 | return 131 | io.RawIOBase.close(self) 132 | self._sock._decref_socketios() 133 | self._sock = None 134 | 135 | def _checkClosed(self, msg=None): 136 | """Internal: raise an ValueError if file is closed 137 | """ 138 | if self.closed: 139 | raise ValueError("I/O operation on closed file." 140 | if msg is None else msg) 141 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/http_parser/http_parser.gyp: -------------------------------------------------------------------------------- 1 | # This file is used with the GYP meta build system. 2 | # http://code.google.com/p/gyp/ 3 | # To build try this: 4 | # svn co http://gyp.googlecode.com/svn/trunk gyp 5 | # ./gyp/gyp -f make --depth=`pwd` http_parser.gyp 6 | # ./out/Debug/test 7 | { 8 | 'target_defaults': { 9 | 'default_configuration': 'Debug', 10 | 'configurations': { 11 | # TODO: hoist these out and put them somewhere common, because 12 | # RuntimeLibrary MUST MATCH across the entire project 13 | 'Debug': { 14 | 'defines': [ 'DEBUG', '_DEBUG' ], 15 | 'msvs_settings': { 16 | 'VCCLCompilerTool': { 17 | 'RuntimeLibrary': 1, # static debug 18 | }, 19 | }, 20 | }, 21 | 'Release': { 22 | 'defines': [ 'NDEBUG' ], 23 | 'msvs_settings': { 24 | 'VCCLCompilerTool': { 25 | 'RuntimeLibrary': 0, # static release 26 | }, 27 | }, 28 | } 29 | }, 30 | 'msvs_settings': { 31 | 'VCCLCompilerTool': { 32 | }, 33 | 'VCLibrarianTool': { 34 | }, 35 | 'VCLinkerTool': { 36 | 'GenerateDebugInformation': 'true', 37 | }, 38 | }, 39 | 'conditions': [ 40 | ['OS == "win"', { 41 | 'defines': [ 42 | 'WIN32' 43 | ], 44 | }] 45 | ], 46 | }, 47 | 48 | 'targets': [ 49 | { 50 | 'target_name': 'http_parser', 51 | 'type': 'static_library', 52 | 'include_dirs': [ '.' ], 53 | 'direct_dependent_settings': { 54 | 'include_dirs': [ '.' ], 55 | }, 56 | 'defines': [ 'HTTP_PARSER_STRICT=0' ], 57 | 'sources': [ './http_parser.c', ], 58 | 'conditions': [ 59 | ['OS=="win"', { 60 | 'msvs_settings': { 61 | 'VCCLCompilerTool': { 62 | # Compile as C++. http_parser.c is actually C99, but C++ is 63 | # close enough in this case. 64 | 'CompileAs': 2, 65 | }, 66 | }, 67 | }] 68 | ], 69 | }, 70 | 71 | { 72 | 'target_name': 'test', 73 | 'type': 'executable', 74 | 'dependencies': [ 'http_parser' ], 75 | 'sources': [ 'test.c' ] 76 | } 77 | ] 78 | } 79 | 80 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/http_parser/parser.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/build/http-parser/http_parser/parser.so -------------------------------------------------------------------------------- /src/proxy/build/http-parser/http_parser/pyversion_compat.h: -------------------------------------------------------------------------------- 1 | #include "Python.h" 2 | 3 | #if PY_VERSION_HEX < 0x02070000 4 | #if PY_VERSION_HEX < 0x02060000 5 | #define PyObject_CheckBuffer(object) (0) 6 | 7 | #define PyObject_GetBuffer(obj, view, flags) (PyErr_SetString(PyExc_NotImplementedError, \ 8 | "new buffer interface is not available"), -1) 9 | #define PyBuffer_FillInfo(view, obj, buf, len, readonly, flags) (PyErr_SetString(PyExc_NotImplementedError, \ 10 | "new buffer interface is not available"), -1) 11 | #define PyBuffer_Release(obj) (PyErr_SetString(PyExc_NotImplementedError, \ 12 | "new buffer interface is not available"), -1) 13 | // Bytes->String 14 | #define PyBytes_FromStringAndSize PyString_FromStringAndSize 15 | #define PyBytes_FromString PyString_FromString 16 | #define PyBytes_AsString PyString_AsString 17 | #define PyBytes_Size PyString_Size 18 | #endif 19 | 20 | #define PyMemoryView_FromBuffer(info) (PyErr_SetString(PyExc_NotImplementedError, \ 21 | "new buffer interface is not available"), (PyObject *)NULL) 22 | #define PyMemoryView_FromObject(object) (PyErr_SetString(PyExc_NotImplementedError, \ 23 | "new buffer interface is not available"), (PyObject *)NULL) 24 | #endif 25 | 26 | #if PY_VERSION_HEX >= 0x03000000 27 | // for buffers 28 | #define Py_END_OF_BUFFER ((Py_ssize_t) 0) 29 | 30 | #define PyObject_CheckReadBuffer(object) (0) 31 | 32 | #define PyBuffer_FromMemory(ptr, s) (PyErr_SetString(PyExc_NotImplementedError, \ 33 | "old buffer interface is not available"), (PyObject *)NULL) 34 | #define PyBuffer_FromReadWriteMemory(ptr, s) (PyErr_SetString(PyExc_NotImplementedError, \ 35 | "old buffer interface is not available"), (PyObject *)NULL) 36 | #define PyBuffer_FromObject(object, offset, size) (PyErr_SetString(PyExc_NotImplementedError, \ 37 | "old buffer interface is not available"), (PyObject *)NULL) 38 | #define PyBuffer_FromReadWriteObject(object, offset, size) (PyErr_SetString(PyExc_NotImplementedError, \ 39 | "old buffer interface is not available"), (PyObject *)NULL) 40 | 41 | #endif 42 | 43 | -------------------------------------------------------------------------------- /src/proxy/build/http-parser/http_parser/reader.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http-parser released under the MIT license. 4 | 5 | # See the NOTICE for more information. 6 | 7 | from io import DEFAULT_BUFFER_SIZE, RawIOBase 8 | 9 | from http_parser.util import StringIO 10 | 11 | 12 | class HttpBodyReader(RawIOBase): 13 | """ Raw implementation to stream http body """ 14 | 15 | def __init__(self, http_stream): 16 | self.http_stream = http_stream 17 | self.eof = False 18 | 19 | def readinto(self, b): 20 | if self.http_stream.parser.is_message_complete() or self.eof: 21 | if self.http_stream.parser.is_partial_body(): 22 | return self.http_stream.parser.recv_body_into(b) 23 | return 0 24 | 25 | self._checkReadable() 26 | try: 27 | self._checkClosed() 28 | except AttributeError: 29 | pass 30 | 31 | while True: 32 | buf = bytearray(DEFAULT_BUFFER_SIZE) 33 | recved = self.http_stream.stream.readinto(buf) 34 | if recved is None: 35 | break 36 | 37 | del buf[recved:] 38 | nparsed = self.http_stream.parser.execute(bytes(buf), recved) 39 | if nparsed != recved: 40 | return None 41 | 42 | if self.http_stream.parser.is_partial_body() or recved == 0: 43 | break 44 | elif self.http_stream.parser.is_message_complete(): 45 | break 46 | 47 | if not self.http_stream.parser.is_partial_body(): 48 | self.eof = True 49 | b = b'' 50 | return len(b'') 51 | 52 | return self.http_stream.parser.recv_body_into(b) 53 | 54 | def readable(self): 55 | return not self.closed or self.http_stream.parser.is_partial_body() 56 | 57 | def close(self): 58 | if self.closed: 59 | return 60 | RawIOBase.close(self) 61 | self.http_stream = None 62 | 63 | class IterReader(RawIOBase): 64 | """ A raw reader implementation for iterable """ 65 | def __init__(self, iterable): 66 | self.iter = iter(iterable) 67 | self._buffer = "" 68 | 69 | def readinto(self, b): 70 | self._checkClosed() 71 | self._checkReadable() 72 | 73 | l = len(b) 74 | try: 75 | chunk = self.iter.next() 76 | self._buffer += chunk 77 | m = min(len(self._buffer), l) 78 | data, self._buffer = self._buffer[:m], self._buffer[m:] 79 | b[0:m] = data 80 | return len(data) 81 | except StopIteration: 82 | del b[0:] 83 | return 0 84 | 85 | def readable(self): 86 | return not self.closed 87 | 88 | def close(self): 89 | if self.closed: 90 | return 91 | RawIOBase.close(self) 92 | self.iter = None 93 | 94 | class StringReader(IterReader): 95 | """ a raw reader for strings or StringIO.StringIO, 96 | cStringIO.StringIO objects """ 97 | 98 | def __init__(self, string): 99 | if isinstance(string, types.StringTypes): 100 | iterable = StringIO(string) 101 | else: 102 | iterable = string 103 | IterReader.__init__(self, iterable) 104 | 105 | from http_parser._socketio import SocketIO 106 | 107 | class SocketReader(SocketIO): 108 | def __init__(self, sock): 109 | super(SocketReader, self).__init__(sock, mode='rb') 110 | -------------------------------------------------------------------------------- /src/proxy/http-parser/.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - 2.6 5 | - 2.7 6 | - 3.2 7 | - 3.3 8 | - pypy 9 | 10 | install: 11 | - pip install -r requirements_dev.txt --use-mirrors 12 | - python setup.py install 13 | 14 | script: py.test testing/ 15 | 16 | branches: 17 | only: 18 | - master 19 | -------------------------------------------------------------------------------- /src/proxy/http-parser/LICENSE: -------------------------------------------------------------------------------- 1 | 2011,2012 (c) Benoît Chesneau 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, 7 | copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the 9 | Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be 13 | included in all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /src/proxy/http-parser/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include .gitignore 2 | include LICENSE 3 | include NOTICE 4 | include README.rst 5 | include THANKS 6 | include TODO.md 7 | include Makefile.ext 8 | recursive-include http_parser * 9 | recursive-include examples * 10 | -------------------------------------------------------------------------------- /src/proxy/http-parser/Makefile: -------------------------------------------------------------------------------- 1 | # This file is renamed to "Makefile.ext" in release tarballs so that 2 | # setup.py won't try to run it. If you want setup.py to run "make" 3 | # automatically, rename it back to "Makefile". 4 | 5 | all: http_parser/parser.c 6 | 7 | http_parser/parser.c: http_parser/parser.pyx 8 | cython -o http_parser.parser.c http_parser/parser.pyx 9 | mv http_parser.parser.c http_parser/parser.c 10 | 11 | clean: 12 | rm -f http_parser/parser.c 13 | 14 | 15 | .PHONY: clean all 16 | -------------------------------------------------------------------------------- /src/proxy/http-parser/README.rst: -------------------------------------------------------------------------------- 1 | http-parser 2 | ----------- 3 | 4 | HTTP request/response parser for Python compatible with Python 2.x 5 | (>=2.6), Python 3 and Pypy. If possible a C parser based on 6 | http-parser_ from Ryan Dahl will be used. 7 | 8 | http-parser is under the MIT license. 9 | 10 | Project url: https://github.com/benoitc/http-parser/ 11 | 12 | .. image:: 13 | https://secure.travis-ci.org/benoitc/http-parser.png?branch=master 14 | :alt: Build Status 15 | :target: https://travis-ci.org/benoitc/http-parser 16 | 17 | Requirements: 18 | ------------- 19 | 20 | - Python 2.6 or sup. Pypy latest version. 21 | - Cython if you need to rebuild the C code (Not needed for Pypy) 22 | 23 | Installation 24 | ------------ 25 | 26 | :: 27 | 28 | $ pip install http-parser 29 | 30 | Or install from source:: 31 | 32 | $ git clone git://github.com/benoitc/http-parser.git 33 | $ cd http-parser && python setup.py install 34 | 35 | 36 | Note: if you get an error on MacOSX try to install with the following 37 | arguments: 38 | 39 | $ env ARCHFLAGS="-arch i386 -arch x86_64" python setup.py install 40 | 41 | Usage 42 | ----- 43 | 44 | http-parser provide you **parser.HttpParser** low-level parser in C that 45 | you can access in your python program and **http.HttpStream** providing 46 | higher-level access to a readable,sequential io.RawIOBase object. 47 | 48 | To help you in your day work, http-parser provides you 3 kind of readers 49 | in the reader module: IterReader to read iterables, StringReader to 50 | reads strings and StringIO objects, SocketReader to read sockets or 51 | objects with the same api (recv_into needed). You can of course use any 52 | io.RawIOBase object. 53 | 54 | Example of HttpStream 55 | +++++++++++++++++++++ 56 | 57 | ex:: 58 | 59 | #!/usr/bin/env python 60 | import socket 61 | 62 | from http_parser.http import HttpStream 63 | from http_parser.reader import SocketReader 64 | 65 | def main(): 66 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 67 | try: 68 | s.connect(('gunicorn.org', 80)) 69 | s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n") 70 | r = SocketReader(s) 71 | p = HttpStream(r) 72 | print p.headers() 73 | print p.body_file().read() 74 | finally: 75 | s.close() 76 | 77 | if __name__ == "__main__": 78 | main() 79 | 80 | Example of HttpParser: 81 | ++++++++++++++++++++++ 82 | 83 | :: 84 | 85 | #!/usr/bin/env python 86 | import socket 87 | 88 | # try to import C parser then fallback in pure python parser. 89 | try: 90 | from http_parser.parser import HttpParser 91 | except ImportError: 92 | from http_parser.pyparser import HttpParser 93 | 94 | 95 | def main(): 96 | 97 | p = HttpParser() 98 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 99 | body = [] 100 | try: 101 | s.connect(('gunicorn.org', 80)) 102 | s.send("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n") 103 | 104 | while True: 105 | data = s.recv(1024) 106 | if not data: 107 | break 108 | 109 | recved = len(data) 110 | nparsed = p.execute(data, recved) 111 | assert nparsed == recved 112 | 113 | if p.is_headers_complete(): 114 | print p.get_headers() 115 | 116 | if p.is_partial_body(): 117 | body.append(p.recv_body()) 118 | 119 | if p.is_message_complete(): 120 | break 121 | 122 | print "".join(body) 123 | 124 | finally: 125 | s.close() 126 | 127 | if __name__ == "__main__": 128 | main() 129 | 130 | 131 | You can find more docs in the code (or use a doc generator). 132 | 133 | 134 | Copyright 135 | --------- 136 | 137 | 2011,2012 (c) Benoît Chesneau 138 | 139 | 140 | .. http-parser_ https://github.com/ry/http-parser 141 | -------------------------------------------------------------------------------- /src/proxy/http-parser/THANKS: -------------------------------------------------------------------------------- 1 | Benoit Calvez 2 | Brian Rosner 3 | Christian Wyglendowski 4 | Ronny Pfannschmidt 5 | Mike Gilbert 6 | -------------------------------------------------------------------------------- /src/proxy/http-parser/TODO.md: -------------------------------------------------------------------------------- 1 | - add unittests 2 | - make the speedup in C optionnal 3 | - refactor http_parser: C code should be minimal and all the logic 4 | (environ parsing) should be passed to the python. 5 | - add montgrel parser 6 | -------------------------------------------------------------------------------- /src/proxy/http-parser/build/lib.linux-i686-2.7/http_parser/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http_parser released under the MIT license. 4 | # See the NOTICE for more information. 5 | 6 | version_info = (0, 8, 1) 7 | __version__ = ".".join(map(str, version_info)) 8 | -------------------------------------------------------------------------------- /src/proxy/http-parser/build/lib.linux-i686-2.7/http_parser/_socketio.py: -------------------------------------------------------------------------------- 1 | """ 2 | socketio taken from the python3 stdlib 3 | """ 4 | import io 5 | import sys 6 | from socket import timeout, error, socket 7 | from errno import EINTR, EAGAIN, EWOULDBLOCK 8 | 9 | _blocking_errnos = EAGAIN, EWOULDBLOCK 10 | 11 | 12 | # python2.6 fixes 13 | 14 | def _recv_into_sock_py26(sock, buf): 15 | data = sock.recv(len(buf)) 16 | l = len(data) 17 | buf[:l] = data 18 | return l 19 | 20 | 21 | if sys.version_info < (2, 7, 0, 'final'): 22 | _recv_into_sock = _recv_into_sock_py26 23 | else: 24 | _recv_into_sock = lambda sock, buf: sock.recv_into(buf) 25 | 26 | 27 | class SocketIO(io.RawIOBase): 28 | 29 | """Raw I/O implementation for stream sockets. 30 | 31 | This class supports the makefile() method on sockets. It provides 32 | the raw I/O interface on top of a socket object. 33 | """ 34 | 35 | # One might wonder why not let FileIO do the job instead. There are two 36 | # main reasons why FileIO is not adapted: 37 | # - it wouldn't work under Windows (where you can't used read() and 38 | # write() on a socket handle) 39 | # - it wouldn't work with socket timeouts (FileIO would ignore the 40 | # timeout and consider the socket non-blocking) 41 | 42 | # XXX More docs 43 | 44 | def __init__(self, sock, mode): 45 | if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): 46 | raise ValueError("invalid mode: %r" % mode) 47 | io.RawIOBase.__init__(self) 48 | self._sock = sock 49 | if "b" not in mode: 50 | mode += "b" 51 | self._mode = mode 52 | self._reading = "r" in mode 53 | self._writing = "w" in mode 54 | self._timeout_occurred = False 55 | 56 | def readinto(self, b): 57 | """Read up to len(b) bytes into the writable buffer *b* and return 58 | the number of bytes read. If the socket is non-blocking and no bytes 59 | are available, None is returned. 60 | 61 | If *b* is non-empty, a 0 return value indicates that the connection 62 | was shutdown at the other end. 63 | """ 64 | self._checkClosed() 65 | self._checkReadable() 66 | if self._timeout_occurred: 67 | raise IOError("cannot read from timed out object") 68 | while True: 69 | try: 70 | return _recv_into_sock(self._sock, b) 71 | except timeout: 72 | self._timeout_occurred = True 73 | raise 74 | except error as e: 75 | n = e.args[0] 76 | if n == EINTR: 77 | continue 78 | if n in _blocking_errnos: 79 | return None 80 | raise 81 | 82 | def write(self, b): 83 | """Write the given bytes or bytearray object *b* to the socket 84 | and return the number of bytes written. This can be less than 85 | len(b) if not all data could be written. If the socket is 86 | non-blocking and no bytes could be written None is returned. 87 | """ 88 | self._checkClosed() 89 | self._checkWritable() 90 | try: 91 | return self._sock.send(b) 92 | except error as e: 93 | # XXX what about EINTR? 94 | if e.args[0] in _blocking_errnos: 95 | return None 96 | raise 97 | 98 | def readable(self): 99 | """True if the SocketIO is open for reading. 100 | """ 101 | return self._reading and not self.closed 102 | 103 | def writable(self): 104 | """True if the SocketIO is open for writing. 105 | """ 106 | return self._writing and not self.closed 107 | 108 | def fileno(self): 109 | """Return the file descriptor of the underlying socket. 110 | """ 111 | self._checkClosed() 112 | return self._sock.fileno() 113 | 114 | @property 115 | def name(self): 116 | if not self.closed: 117 | return self.fileno() 118 | else: 119 | return -1 120 | 121 | @property 122 | def mode(self): 123 | return self._mode 124 | 125 | def close(self): 126 | """Close the SocketIO object. This doesn't close the underlying 127 | socket, except if all references to it have disappeared. 128 | """ 129 | if self.closed: 130 | return 131 | io.RawIOBase.close(self) 132 | self._sock._decref_socketios() 133 | self._sock = None 134 | 135 | def _checkClosed(self, msg=None): 136 | """Internal: raise an ValueError if file is closed 137 | """ 138 | if self.closed: 139 | raise ValueError("I/O operation on closed file." 140 | if msg is None else msg) 141 | -------------------------------------------------------------------------------- /src/proxy/http-parser/build/lib.linux-i686-2.7/http_parser/parser.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/http-parser/build/lib.linux-i686-2.7/http_parser/parser.so -------------------------------------------------------------------------------- /src/proxy/http-parser/build/lib.linux-i686-2.7/http_parser/reader.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http-parser released under the MIT license. 4 | 5 | # See the NOTICE for more information. 6 | 7 | from io import DEFAULT_BUFFER_SIZE, RawIOBase 8 | 9 | from http_parser.util import StringIO 10 | 11 | 12 | class HttpBodyReader(RawIOBase): 13 | """ Raw implementation to stream http body """ 14 | 15 | def __init__(self, http_stream): 16 | self.http_stream = http_stream 17 | self.eof = False 18 | 19 | def readinto(self, b): 20 | if self.http_stream.parser.is_message_complete() or self.eof: 21 | if self.http_stream.parser.is_partial_body(): 22 | return self.http_stream.parser.recv_body_into(b) 23 | return 0 24 | 25 | self._checkReadable() 26 | try: 27 | self._checkClosed() 28 | except AttributeError: 29 | pass 30 | 31 | while True: 32 | buf = bytearray(DEFAULT_BUFFER_SIZE) 33 | recved = self.http_stream.stream.readinto(buf) 34 | if recved is None: 35 | break 36 | 37 | del buf[recved:] 38 | nparsed = self.http_stream.parser.execute(bytes(buf), recved) 39 | if nparsed != recved: 40 | return None 41 | 42 | if self.http_stream.parser.is_partial_body() or recved == 0: 43 | break 44 | elif self.http_stream.parser.is_message_complete(): 45 | break 46 | 47 | if not self.http_stream.parser.is_partial_body(): 48 | self.eof = True 49 | b = b'' 50 | return len(b'') 51 | 52 | return self.http_stream.parser.recv_body_into(b) 53 | 54 | def readable(self): 55 | return not self.closed or self.http_stream.parser.is_partial_body() 56 | 57 | def close(self): 58 | if self.closed: 59 | return 60 | RawIOBase.close(self) 61 | self.http_stream = None 62 | 63 | class IterReader(RawIOBase): 64 | """ A raw reader implementation for iterable """ 65 | def __init__(self, iterable): 66 | self.iter = iter(iterable) 67 | self._buffer = "" 68 | 69 | def readinto(self, b): 70 | self._checkClosed() 71 | self._checkReadable() 72 | 73 | l = len(b) 74 | try: 75 | chunk = self.iter.next() 76 | self._buffer += chunk 77 | m = min(len(self._buffer), l) 78 | data, self._buffer = self._buffer[:m], self._buffer[m:] 79 | b[0:m] = data 80 | return len(data) 81 | except StopIteration: 82 | del b[0:] 83 | return 0 84 | 85 | def readable(self): 86 | return not self.closed 87 | 88 | def close(self): 89 | if self.closed: 90 | return 91 | RawIOBase.close(self) 92 | self.iter = None 93 | 94 | class StringReader(IterReader): 95 | """ a raw reader for strings or StringIO.StringIO, 96 | cStringIO.StringIO objects """ 97 | 98 | def __init__(self, string): 99 | if isinstance(string, types.StringTypes): 100 | iterable = StringIO(string) 101 | else: 102 | iterable = string 103 | IterReader.__init__(self, iterable) 104 | 105 | from http_parser._socketio import SocketIO 106 | 107 | class SocketReader(SocketIO): 108 | def __init__(self, sock): 109 | super(SocketReader, self).__init__(sock, mode='rb') 110 | -------------------------------------------------------------------------------- /src/proxy/http-parser/build/temp.linux-i686-2.7/http_parser/http_parser.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/http-parser/build/temp.linux-i686-2.7/http_parser/http_parser.o -------------------------------------------------------------------------------- /src/proxy/http-parser/build/temp.linux-i686-2.7/http_parser/parser.o: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/http-parser/build/temp.linux-i686-2.7/http_parser/parser.o -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/changelog: -------------------------------------------------------------------------------- 1 | python-http-parser (0.6.0-1) unstable; urgency=low 2 | 3 | * bump version. 4 | 5 | -- Benoit Chesneau Mon, 20 Jun 2011 17:20:00 +0100 6 | 7 | python-http-parser (0.5.4-1) unstable; urgency=low 8 | 9 | * bump version. 10 | 11 | -- Benoit Chesneau Mon, 20 Jun 2011 15:52:00 +0100 12 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/clean: -------------------------------------------------------------------------------- 1 | http-parser.egg-info/* 2 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/compat: -------------------------------------------------------------------------------- 1 | 7 2 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/control: -------------------------------------------------------------------------------- 1 | Source: python-http-parser 2 | Section: python 3 | Priority: optional 4 | Maintainer: Benoit Chesneau 5 | Build-Depends: debhelper (>= 7), python-support, python-setuptools 6 | Standards-Version: 3.9.0.0 7 | Homepage: http://github.com/benoitc/http-parser 8 | 9 | Package: python-http-parser 10 | Architecture: all 11 | Depends: ${python:Depends}, ${shlibs:Depends}, ${misc:Depends} 12 | Provides: ${python:Provides} 13 | Description: Python http request/response parser 14 | HTTP request/response parser for Python in C under MIT License, based on 15 | http-parser from Ryan Dahl. 16 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/copyright: -------------------------------------------------------------------------------- 1 | 2011 (c) Benoît Chesneau 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, 7 | copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the 9 | Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be 13 | included in all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/python-http-parser.preinst: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | 3 | set -e 4 | 5 | # This was added by stdeb to workaround Debian #479852. In a nutshell, 6 | # pycentral does not remove normally remove its symlinks on an 7 | # upgrade. Since we're using python-support, however, those symlinks 8 | # will be broken. This tells python-central to clean up any symlinks. 9 | if [ -e /var/lib/dpkg/info/python-http-parser.list ] && which pycentral >/dev/null 2>&1 10 | then 11 | pycentral pkgremove python-http-parser 12 | fi 13 | 14 | #DEBHELPER# 15 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/pyversions: -------------------------------------------------------------------------------- 1 | 2.5- 2 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | # -*- makefile -*- 3 | # Sample debian/rules that uses debhelper. 4 | # This file was originally written by Joey Hess and Craig Small. 5 | # As a special exception, when this file is copied by dh-make into a 6 | # dh-make output file, you may use that output file without restriction. 7 | # This special exception was added by Craig Small in version 0.37 of dh-make. 8 | 9 | # Uncomment this to turn on verbose mode. 10 | # export DH_VERBOSE=1 11 | 12 | %: 13 | dh $@ 14 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/source/format: -------------------------------------------------------------------------------- 1 | 3.0 (native) 2 | -------------------------------------------------------------------------------- /src/proxy/http-parser/debian/watch: -------------------------------------------------------------------------------- 1 | # Example watch control file for uscan 2 | # Rename this file to "watch" and then you can run the "uscan" command 3 | # to check for upstream updates and more. 4 | # See uscan(1) for format 5 | 6 | # Compulsory line, this is a version 3 file 7 | version=3 8 | 9 | # Uncomment to examine a Webpage 10 | # 11 | #http://www.example.com/downloads.php python-couchdbkit-(.*)\.tar\.gz 12 | opts=dversionmangle=s/\+dfsg$// \ 13 | http://pypi.python.org/packages/source/c/http-parser/http-parser-(.*).tar.gz 14 | # http://github.com/benoitc/couchdbkit/downloads/ /benoitc/couchdbkit/tarball/([0-9].*) 15 | 16 | # Uncomment to examine a Webserver directory 17 | #http://www.example.com/pub/python-couchdbkit-(.*)\.tar\.gz 18 | 19 | # Uncommment to examine a FTP server 20 | #ftp://ftp.example.com/pub/python-couchdbkit-(.*)\.tar\.gz debian uupdate 21 | 22 | # Uncomment to find new files on sourceforge, for devscripts >= 2.9 23 | # http://sf.net/python-couchdbkit/python-couchdbkit-(.*)\.tar\.gz 24 | 25 | # Uncomment to find new files on GooglePages 26 | # http://example.googlepages.com/foo.html python-couchdbkit-(.*)\.tar\.gz 27 | -------------------------------------------------------------------------------- /src/proxy/http-parser/dist/http_parser-0.8.1-py2.7-linux-i686.egg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/http-parser/dist/http_parser-0.8.1-py2.7-linux-i686.egg -------------------------------------------------------------------------------- /src/proxy/http-parser/examples/httpparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | 4 | try: 5 | from http_parser.parser import HttpParser 6 | except ImportError: 7 | from http_parser.pyparser import HttpParser 8 | from http_parser.util import b 9 | 10 | def main(): 11 | 12 | p = HttpParser() 13 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 14 | body = [] 15 | header_done = False 16 | try: 17 | s.connect(('gunicorn.org', 80)) 18 | s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")) 19 | 20 | while True: 21 | data = s.recv(1024) 22 | if not data: 23 | break 24 | 25 | recved = len(data) 26 | nparsed = p.execute(data, recved) 27 | assert nparsed == recved 28 | 29 | if p.is_headers_complete() and not header_done: 30 | print(p.get_headers()) 31 | print(p.get_headers()['content-length']) 32 | header_done = True 33 | 34 | if p.is_partial_body(): 35 | body.append(p.recv_body()) 36 | 37 | if p.is_message_complete(): 38 | break 39 | 40 | 41 | print(b("").join(body)) 42 | 43 | finally: 44 | s.close() 45 | 46 | if __name__ == "__main__": 47 | main() 48 | 49 | 50 | -------------------------------------------------------------------------------- /src/proxy/http-parser/examples/httpparser_from_file.py: -------------------------------------------------------------------------------- 1 | #coding=utf-8 2 | ''' 3 | Created on 2012-3-24 4 | 5 | @author: fengclient 6 | ''' 7 | from http_parser.pyparser import HttpParser 8 | 9 | if __name__ == '__main__': 10 | rsp = open('d:\\172_response.txt').read() 11 | # if your are reading a text file from windows, u may need manually convert \n to \r\n 12 | # universal newline support: http://docs.python.org/library/functions.html#open 13 | rsp = rsp.replace('\n', '\r\n') 14 | p = HttpParser() 15 | p.execute(rsp, len(rsp)) 16 | print p.get_headers() 17 | -------------------------------------------------------------------------------- /src/proxy/http-parser/examples/httpstream.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | 4 | from http_parser.http import HttpStream 5 | from http_parser.reader import SocketReader 6 | 7 | from http_parser.util import b 8 | 9 | def main(): 10 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 11 | try: 12 | s.connect(('gunicorn.org', 80)) 13 | s.send(b("GET / HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")) 14 | p = HttpStream(SocketReader(s)) 15 | print(p.headers()) 16 | 17 | print(p.body_file().read()) 18 | finally: 19 | s.close() 20 | 21 | if __name__ == "__main__": 22 | main() 23 | 24 | 25 | -------------------------------------------------------------------------------- /src/proxy/http-parser/http_parser/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http_parser released under the MIT license. 4 | # See the NOTICE for more information. 5 | 6 | version_info = (0, 8, 1) 7 | __version__ = ".".join(map(str, version_info)) 8 | -------------------------------------------------------------------------------- /src/proxy/http-parser/http_parser/_socketio.py: -------------------------------------------------------------------------------- 1 | """ 2 | socketio taken from the python3 stdlib 3 | """ 4 | import io 5 | import sys 6 | from socket import timeout, error, socket 7 | from errno import EINTR, EAGAIN, EWOULDBLOCK 8 | 9 | _blocking_errnos = EAGAIN, EWOULDBLOCK 10 | 11 | 12 | # python2.6 fixes 13 | 14 | def _recv_into_sock_py26(sock, buf): 15 | data = sock.recv(len(buf)) 16 | l = len(data) 17 | buf[:l] = data 18 | return l 19 | 20 | 21 | if sys.version_info < (2, 7, 0, 'final'): 22 | _recv_into_sock = _recv_into_sock_py26 23 | else: 24 | _recv_into_sock = lambda sock, buf: sock.recv_into(buf) 25 | 26 | 27 | class SocketIO(io.RawIOBase): 28 | 29 | """Raw I/O implementation for stream sockets. 30 | 31 | This class supports the makefile() method on sockets. It provides 32 | the raw I/O interface on top of a socket object. 33 | """ 34 | 35 | # One might wonder why not let FileIO do the job instead. There are two 36 | # main reasons why FileIO is not adapted: 37 | # - it wouldn't work under Windows (where you can't used read() and 38 | # write() on a socket handle) 39 | # - it wouldn't work with socket timeouts (FileIO would ignore the 40 | # timeout and consider the socket non-blocking) 41 | 42 | # XXX More docs 43 | 44 | def __init__(self, sock, mode): 45 | if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): 46 | raise ValueError("invalid mode: %r" % mode) 47 | io.RawIOBase.__init__(self) 48 | self._sock = sock 49 | if "b" not in mode: 50 | mode += "b" 51 | self._mode = mode 52 | self._reading = "r" in mode 53 | self._writing = "w" in mode 54 | self._timeout_occurred = False 55 | 56 | def readinto(self, b): 57 | """Read up to len(b) bytes into the writable buffer *b* and return 58 | the number of bytes read. If the socket is non-blocking and no bytes 59 | are available, None is returned. 60 | 61 | If *b* is non-empty, a 0 return value indicates that the connection 62 | was shutdown at the other end. 63 | """ 64 | self._checkClosed() 65 | self._checkReadable() 66 | if self._timeout_occurred: 67 | raise IOError("cannot read from timed out object") 68 | while True: 69 | try: 70 | return _recv_into_sock(self._sock, b) 71 | except timeout: 72 | self._timeout_occurred = True 73 | raise 74 | except error as e: 75 | n = e.args[0] 76 | if n == EINTR: 77 | continue 78 | if n in _blocking_errnos: 79 | return None 80 | raise 81 | 82 | def write(self, b): 83 | """Write the given bytes or bytearray object *b* to the socket 84 | and return the number of bytes written. This can be less than 85 | len(b) if not all data could be written. If the socket is 86 | non-blocking and no bytes could be written None is returned. 87 | """ 88 | self._checkClosed() 89 | self._checkWritable() 90 | try: 91 | return self._sock.send(b) 92 | except error as e: 93 | # XXX what about EINTR? 94 | if e.args[0] in _blocking_errnos: 95 | return None 96 | raise 97 | 98 | def readable(self): 99 | """True if the SocketIO is open for reading. 100 | """ 101 | return self._reading and not self.closed 102 | 103 | def writable(self): 104 | """True if the SocketIO is open for writing. 105 | """ 106 | return self._writing and not self.closed 107 | 108 | def fileno(self): 109 | """Return the file descriptor of the underlying socket. 110 | """ 111 | self._checkClosed() 112 | return self._sock.fileno() 113 | 114 | @property 115 | def name(self): 116 | if not self.closed: 117 | return self.fileno() 118 | else: 119 | return -1 120 | 121 | @property 122 | def mode(self): 123 | return self._mode 124 | 125 | def close(self): 126 | """Close the SocketIO object. This doesn't close the underlying 127 | socket, except if all references to it have disappeared. 128 | """ 129 | if self.closed: 130 | return 131 | io.RawIOBase.close(self) 132 | self._sock._decref_socketios() 133 | self._sock = None 134 | 135 | def _checkClosed(self, msg=None): 136 | """Internal: raise an ValueError if file is closed 137 | """ 138 | if self.closed: 139 | raise ValueError("I/O operation on closed file." 140 | if msg is None else msg) 141 | -------------------------------------------------------------------------------- /src/proxy/http-parser/http_parser/http_parser.gyp: -------------------------------------------------------------------------------- 1 | # This file is used with the GYP meta build system. 2 | # http://code.google.com/p/gyp/ 3 | # To build try this: 4 | # svn co http://gyp.googlecode.com/svn/trunk gyp 5 | # ./gyp/gyp -f make --depth=`pwd` http_parser.gyp 6 | # ./out/Debug/test 7 | { 8 | 'target_defaults': { 9 | 'default_configuration': 'Debug', 10 | 'configurations': { 11 | # TODO: hoist these out and put them somewhere common, because 12 | # RuntimeLibrary MUST MATCH across the entire project 13 | 'Debug': { 14 | 'defines': [ 'DEBUG', '_DEBUG' ], 15 | 'msvs_settings': { 16 | 'VCCLCompilerTool': { 17 | 'RuntimeLibrary': 1, # static debug 18 | }, 19 | }, 20 | }, 21 | 'Release': { 22 | 'defines': [ 'NDEBUG' ], 23 | 'msvs_settings': { 24 | 'VCCLCompilerTool': { 25 | 'RuntimeLibrary': 0, # static release 26 | }, 27 | }, 28 | } 29 | }, 30 | 'msvs_settings': { 31 | 'VCCLCompilerTool': { 32 | }, 33 | 'VCLibrarianTool': { 34 | }, 35 | 'VCLinkerTool': { 36 | 'GenerateDebugInformation': 'true', 37 | }, 38 | }, 39 | 'conditions': [ 40 | ['OS == "win"', { 41 | 'defines': [ 42 | 'WIN32' 43 | ], 44 | }] 45 | ], 46 | }, 47 | 48 | 'targets': [ 49 | { 50 | 'target_name': 'http_parser', 51 | 'type': 'static_library', 52 | 'include_dirs': [ '.' ], 53 | 'direct_dependent_settings': { 54 | 'include_dirs': [ '.' ], 55 | }, 56 | 'defines': [ 'HTTP_PARSER_STRICT=0' ], 57 | 'sources': [ './http_parser.c', ], 58 | 'conditions': [ 59 | ['OS=="win"', { 60 | 'msvs_settings': { 61 | 'VCCLCompilerTool': { 62 | # Compile as C++. http_parser.c is actually C99, but C++ is 63 | # close enough in this case. 64 | 'CompileAs': 2, 65 | }, 66 | }, 67 | }] 68 | ], 69 | }, 70 | 71 | { 72 | 'target_name': 'test', 73 | 'type': 'executable', 74 | 'dependencies': [ 'http_parser' ], 75 | 'sources': [ 'test.c' ] 76 | } 77 | ] 78 | } 79 | 80 | -------------------------------------------------------------------------------- /src/proxy/http-parser/http_parser/parser.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/http-parser/http_parser/parser.so -------------------------------------------------------------------------------- /src/proxy/http-parser/http_parser/pyversion_compat.h: -------------------------------------------------------------------------------- 1 | #include "Python.h" 2 | 3 | #if PY_VERSION_HEX < 0x02070000 4 | #if PY_VERSION_HEX < 0x02060000 5 | #define PyObject_CheckBuffer(object) (0) 6 | 7 | #define PyObject_GetBuffer(obj, view, flags) (PyErr_SetString(PyExc_NotImplementedError, \ 8 | "new buffer interface is not available"), -1) 9 | #define PyBuffer_FillInfo(view, obj, buf, len, readonly, flags) (PyErr_SetString(PyExc_NotImplementedError, \ 10 | "new buffer interface is not available"), -1) 11 | #define PyBuffer_Release(obj) (PyErr_SetString(PyExc_NotImplementedError, \ 12 | "new buffer interface is not available"), -1) 13 | // Bytes->String 14 | #define PyBytes_FromStringAndSize PyString_FromStringAndSize 15 | #define PyBytes_FromString PyString_FromString 16 | #define PyBytes_AsString PyString_AsString 17 | #define PyBytes_Size PyString_Size 18 | #endif 19 | 20 | #define PyMemoryView_FromBuffer(info) (PyErr_SetString(PyExc_NotImplementedError, \ 21 | "new buffer interface is not available"), (PyObject *)NULL) 22 | #define PyMemoryView_FromObject(object) (PyErr_SetString(PyExc_NotImplementedError, \ 23 | "new buffer interface is not available"), (PyObject *)NULL) 24 | #endif 25 | 26 | #if PY_VERSION_HEX >= 0x03000000 27 | // for buffers 28 | #define Py_END_OF_BUFFER ((Py_ssize_t) 0) 29 | 30 | #define PyObject_CheckReadBuffer(object) (0) 31 | 32 | #define PyBuffer_FromMemory(ptr, s) (PyErr_SetString(PyExc_NotImplementedError, \ 33 | "old buffer interface is not available"), (PyObject *)NULL) 34 | #define PyBuffer_FromReadWriteMemory(ptr, s) (PyErr_SetString(PyExc_NotImplementedError, \ 35 | "old buffer interface is not available"), (PyObject *)NULL) 36 | #define PyBuffer_FromObject(object, offset, size) (PyErr_SetString(PyExc_NotImplementedError, \ 37 | "old buffer interface is not available"), (PyObject *)NULL) 38 | #define PyBuffer_FromReadWriteObject(object, offset, size) (PyErr_SetString(PyExc_NotImplementedError, \ 39 | "old buffer interface is not available"), (PyObject *)NULL) 40 | 41 | #endif 42 | 43 | -------------------------------------------------------------------------------- /src/proxy/http-parser/http_parser/reader.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 - 2 | # 3 | # This file is part of http-parser released under the MIT license. 4 | 5 | # See the NOTICE for more information. 6 | 7 | from io import DEFAULT_BUFFER_SIZE, RawIOBase 8 | 9 | from http_parser.util import StringIO 10 | 11 | 12 | class HttpBodyReader(RawIOBase): 13 | """ Raw implementation to stream http body """ 14 | 15 | def __init__(self, http_stream): 16 | self.http_stream = http_stream 17 | self.eof = False 18 | 19 | def readinto(self, b): 20 | if self.http_stream.parser.is_message_complete() or self.eof: 21 | if self.http_stream.parser.is_partial_body(): 22 | return self.http_stream.parser.recv_body_into(b) 23 | return 0 24 | 25 | self._checkReadable() 26 | try: 27 | self._checkClosed() 28 | except AttributeError: 29 | pass 30 | 31 | while True: 32 | buf = bytearray(DEFAULT_BUFFER_SIZE) 33 | recved = self.http_stream.stream.readinto(buf) 34 | if recved is None: 35 | break 36 | 37 | del buf[recved:] 38 | nparsed = self.http_stream.parser.execute(bytes(buf), recved) 39 | if nparsed != recved: 40 | return None 41 | 42 | if self.http_stream.parser.is_partial_body() or recved == 0: 43 | break 44 | elif self.http_stream.parser.is_message_complete(): 45 | break 46 | 47 | if not self.http_stream.parser.is_partial_body(): 48 | self.eof = True 49 | b = b'' 50 | return len(b'') 51 | 52 | return self.http_stream.parser.recv_body_into(b) 53 | 54 | def readable(self): 55 | return not self.closed or self.http_stream.parser.is_partial_body() 56 | 57 | def close(self): 58 | if self.closed: 59 | return 60 | RawIOBase.close(self) 61 | self.http_stream = None 62 | 63 | class IterReader(RawIOBase): 64 | """ A raw reader implementation for iterable """ 65 | def __init__(self, iterable): 66 | self.iter = iter(iterable) 67 | self._buffer = "" 68 | 69 | def readinto(self, b): 70 | self._checkClosed() 71 | self._checkReadable() 72 | 73 | l = len(b) 74 | try: 75 | chunk = self.iter.next() 76 | self._buffer += chunk 77 | m = min(len(self._buffer), l) 78 | data, self._buffer = self._buffer[:m], self._buffer[m:] 79 | b[0:m] = data 80 | return len(data) 81 | except StopIteration: 82 | del b[0:] 83 | return 0 84 | 85 | def readable(self): 86 | return not self.closed 87 | 88 | def close(self): 89 | if self.closed: 90 | return 91 | RawIOBase.close(self) 92 | self.iter = None 93 | 94 | class StringReader(IterReader): 95 | """ a raw reader for strings or StringIO.StringIO, 96 | cStringIO.StringIO objects """ 97 | 98 | def __init__(self, string): 99 | if isinstance(string, types.StringTypes): 100 | iterable = StringIO(string) 101 | else: 102 | iterable = string 103 | IterReader.__init__(self, iterable) 104 | 105 | from http_parser._socketio import SocketIO 106 | 107 | class SocketReader(SocketIO): 108 | def __init__(self, sock): 109 | super(SocketReader, self).__init__(sock, mode='rb') 110 | -------------------------------------------------------------------------------- /src/proxy/http-parser/testing/test_parse_from_stream.py: -------------------------------------------------------------------------------- 1 | from errno import EINTR, EAGAIN, EWOULDBLOCK 2 | 3 | import os 4 | import socket 5 | 6 | import pytest 7 | from http_parser.http import HttpStream 8 | from http_parser.reader import SocketReader 9 | 10 | class FakeInputSocket(object): 11 | def __init__(self, events): 12 | self.events = events 13 | 14 | def recv(self, *ignored): 15 | try: 16 | event = self.events.pop(0) 17 | except IndexError: 18 | return b'' 19 | else: 20 | if isinstance(event, Exception): 21 | raise event 22 | else: 23 | return event 24 | 25 | def recv_into(self, buf): 26 | data = self.recv() 27 | l = len(data) 28 | assert l <= len(buf) 29 | buf[0:l] = data 30 | return l 31 | 32 | 33 | complete_request = b'GET /test HTTP/1.1\r\nContent-Type: text\r\n\r\n' 34 | 35 | def tostream(input): 36 | sock = FakeInputSocket(input) 37 | reader = SocketReader(sock) 38 | return HttpStream(reader) 39 | 40 | 41 | def test_parse_headers(): 42 | stream = tostream([complete_request]) 43 | assert stream.headers() 44 | 45 | 46 | def test_ioerror_on_noblocking(): 47 | stream = tostream([ 48 | b'GET /test HTTP/1.1\r\n', 49 | socket.error(EAGAIN, 'eagain'), 50 | b'Content-Type: text\r\n\r\n', 51 | ]) 52 | pytest.raises(IOError, stream.headers) 53 | 54 | 55 | def test_parse_with_timeout_raises(): 56 | stream = tostream([ 57 | b'GET /test HTTP/1.1\r\n', 58 | socket.timeout(EAGAIN, 'timeout'), 59 | b'Content-Type: text\r\n\r\n', 60 | ]) 61 | ex = pytest.raises(socket.timeout, stream.headers) 62 | print(ex.getrepr(style='short')) 63 | 64 | 65 | def test_parse_from_real_socket(): 66 | # would fail on python2.6 before the recv_into hack 67 | sock, sink = socket.socketpair() 68 | sink.send(complete_request) 69 | reader = SocketReader(sock) 70 | stream = HttpStream(reader) 71 | assert stream.headers() 72 | -------------------------------------------------------------------------------- /src/proxy/http-parser/tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py26,py27,py32,py33 3 | 4 | [testenv] 5 | deps = 6 | pytest 7 | pytest-cov 8 | pytest-cache 9 | commands= 10 | py.test [] 11 | -------------------------------------------------------------------------------- /src/proxy/httpparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | 4 | try: 5 | from http_parser.parser import HttpParser 6 | except ImportError: 7 | from http_parser.pyparser import HttpParser 8 | from http_parser.util import b 9 | 10 | def main(): 11 | 12 | p = HttpParser() 13 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 14 | body = [] 15 | header_done = False 16 | try: 17 | s.connect(('install2.optimum-installer.com', 80)) 18 | s.send(b("GET /o/PDFCreator/Express_Installer.exe.exe HTTP/1.1\r\nHost: gunicorn.org\r\n\r\n")) 19 | 20 | while True: 21 | data = s.recv(1024) 22 | if not data: 23 | break 24 | 25 | recved = len(data) 26 | nparsed = p.execute(data, recved) 27 | assert nparsed == recved 28 | 29 | if p.is_headers_complete() and not header_done: 30 | print(p.get_headers()) 31 | print(p.get_headers()['content-length']) 32 | header_done = True 33 | 34 | if p.is_partial_body(): 35 | body.append(p.recv_body()) 36 | print p.recv_body() 37 | print "BDy++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 38 | 39 | if p.is_message_complete(): 40 | break 41 | 42 | body = b("").join(body) 43 | 44 | print "Writing file\n" 45 | data_write = open("mal.exe","wb") 46 | data_write.write(body) 47 | data_write.close() 48 | 49 | print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" 50 | 51 | finally: 52 | s.close() 53 | 54 | if __name__ == "__main__": 55 | main() 56 | 57 | 58 | -------------------------------------------------------------------------------- /src/proxy/multiproc.py: -------------------------------------------------------------------------------- 1 | from multiprocessing import Process 2 | import os 3 | import time 4 | from random import randint 5 | from proxy import server 6 | from sele import selenium 7 | from msqlhttp import done 8 | 9 | def start_selenium(port,url,uid): 10 | print "Starting Selenium" 11 | selenium(port,url,uid) 12 | 13 | def start_server(rand_port,uid): 14 | print "Starting Server " 15 | server(rand_port,uid) 16 | 17 | def mprocess(i,uid,url): 18 | try: 19 | 20 | uid=uid 21 | i =i 22 | print " Uid is " + str(uid) +"id is :" +str(i) 23 | # We generate a random port and pass it on to selenium and Proxy 24 | rand_port = randint(8000,9000) 25 | procs = [] 26 | #Append process to a list 27 | procs.append(Process(target=start_server,args=(rand_port,uid,))) 28 | procs.append(Process(target=start_selenium,args=(rand_port,url,uid,))) 29 | #python lambda http://www.secnetix.de/olli/Python/lambda_functions.hawk 30 | map(lambda x: x.start(), procs) 31 | map(lambda x: x.join(70), procs) 32 | #Lets let the entire url execute for 60 seconds 33 | #time.sleep(80) 34 | print " Updating status to DB"+str(uid) 35 | done(i) 36 | print "Terminating Proxy|Selenium\n" 37 | 38 | map(lambda x: x.terminate(), procs) 39 | 40 | except Exception as e: 41 | print e 42 | pass 43 | 44 | if __name__ == '__main__': 45 | mprocess() -------------------------------------------------------------------------------- /src/proxy/multiproc_ff.py: -------------------------------------------------------------------------------- 1 | from multiprocessing import Process 2 | import os 3 | import time 4 | from random import randint 5 | from proxy_links import server 6 | from sele_ff import selenium 7 | from msqlhttp import done_links 8 | 9 | def start_selenium(port,url,uid,local_ip,remote_ip): 10 | print "Starting Selenium" 11 | selenium(port,url,uid,local_ip,remote_ip) 12 | 13 | def start_server(rand_port,uid,local_ip): 14 | print "Starting Server "+str(local_ip) 15 | server(rand_port,uid,local_ip) 16 | 17 | def mprocess(i,uid,url): 18 | 19 | try: 20 | 21 | local_ip ='192.168.6.10' 22 | remote_ip ='192.168.6.11' 23 | 24 | uid=i 25 | i =i 26 | print " Uid is " + str(uid) +"id is :" +str(i) 27 | # We generate a random port and pass it on to selenium and Proxy 28 | rand_port = randint(6000,7000) 29 | procs = [] 30 | #Append process to a list 31 | procs.append(Process(target=start_server,args=(rand_port,uid,local_ip,))) 32 | procs.append(Process(target=start_selenium,args=(rand_port,url,uid,local_ip,remote_ip))) 33 | #python lambda http://www.secnetix.de/olli/Python/lambda_functions.hawk 34 | map(lambda x: x.start(), procs) 35 | map(lambda x: x.join(70), procs) 36 | #Lets let the entire url execute for 60 seconds 37 | #time.sleep(80) 38 | print " Updating status to DB"+str(uid) 39 | done_links(i) 40 | print "Terminating Proxy|Selenium\n" 41 | 42 | map(lambda x: x.terminate(), procs) 43 | 44 | except Exception as e: 45 | print e 46 | pass 47 | 48 | if __name__ == '__main__': 49 | mprocess('1','1','http://www.gogole.com') -------------------------------------------------------------------------------- /src/proxy/multiproc_java.py: -------------------------------------------------------------------------------- 1 | from multiprocessing import Process 2 | import os 3 | import time 4 | from random import randint 5 | from proxy import server 6 | from sele_java import selenium 7 | from msqlhttp import done 8 | 9 | def start_selenium(port,url,uid): 10 | print "Starting Selenium" 11 | selenium(port,url,uid) 12 | 13 | def start_server(rand_port,uid): 14 | print "Starting Server " 15 | server(rand_port,uid) 16 | 17 | def mprocess(i,uid,url): 18 | try: 19 | 20 | uid=uid 21 | i =i 22 | print " Uid is " + str(uid) +"id is :" +str(i) 23 | # We generate a random port and pass it on to selenium and Proxy 24 | rand_port = randint(8000,9000) 25 | procs = [] 26 | #Append process to a list 27 | procs.append(Process(target=start_server,args=(rand_port,uid,))) 28 | procs.append(Process(target=start_selenium,args=(rand_port,url,uid,))) 29 | #python lambda http://www.secnetix.de/olli/Python/lambda_functions.hawk 30 | map(lambda x: x.start(), procs) 31 | map(lambda x: x.join(70), procs) 32 | #Lets let the entire url execute for 60 seconds 33 | #time.sleep(80) 34 | print " Updating status to DB"+str(uid) 35 | done(i) 36 | print "Terminating Proxy|Selenium\n" 37 | 38 | map(lambda x: x.terminate(), procs) 39 | 40 | except Exception as e: 41 | print e 42 | pass 43 | 44 | if __name__ == '__main__': 45 | mprocess() -------------------------------------------------------------------------------- /src/proxy/multiproc_links.py: -------------------------------------------------------------------------------- 1 | from multiprocessing import Process 2 | import os 3 | import time 4 | from random import randint 5 | from proxy import server 6 | from sele_ie import selenium 7 | from msqlhttp import done 8 | 9 | def start_selenium(port,url,uid): 10 | print "Starting Selenium" 11 | selenium(port,url,uid) 12 | 13 | def start_server(rand_port,uid): 14 | print "Starting Server " 15 | server(rand_port,uid) 16 | 17 | def mprocess(i,url): 18 | try: 19 | 20 | uid=i 21 | i =i 22 | print " Uid is " + str(uid) +" id is :" +str(i) 23 | # We generate a random port and pass it on to selenium and Proxy 24 | rand_port = randint(8000,9000) 25 | procs = [] 26 | #Append process to a list 27 | procs.append(Process(target=start_server,args=(rand_port,uid,))) 28 | procs.append(Process(target=start_selenium,args=(rand_port,url,uid,))) 29 | #python lambda http://www.secnetix.de/olli/Python/lambda_functions.hawk 30 | map(lambda x: x.start(), procs) 31 | map(lambda x: x.join(70), procs) 32 | #Lets let the entire url execute for 60 seconds 33 | #time.sleep(80) 34 | print " Updating status to DB"+str(uid) 35 | done(i) 36 | print "Terminating Proxy|Selenium\n" 37 | 38 | map(lambda x: x.terminate(), procs) 39 | 40 | except Exception as e: 41 | print e 42 | pass 43 | 44 | if __name__ == '__main__': 45 | mprocess() -------------------------------------------------------------------------------- /src/proxy/multiproc_links_ie.py: -------------------------------------------------------------------------------- 1 | from multiprocessing import Process 2 | import os 3 | import time 4 | from random import randint 5 | from proxy_links import server 6 | from sele_ie import selenium 7 | from msqlhttp import done_links 8 | 9 | def start_selenium(port,url,uid,remoteip,localip): 10 | print "Starting Selenium" 11 | selenium(port,url,uid,remoteip,localip) 12 | 13 | def start_server(rand_port,uid,localip): 14 | print "Starting Server " 15 | server(rand_port,uid,localip) 16 | 17 | def mprocess(i,url): 18 | try: 19 | 20 | localip="192.168.6.10" 21 | remoteip="192.168.6.12" 22 | uid=i 23 | i =i 24 | print " Uid is " + str(uid) +" id is :" +str(i) 25 | # We generate a random port and pass it on to selenium and Proxy 26 | rand_port = randint(8000,9000) 27 | procs = [] 28 | #Append process to a list 29 | procs.append(Process(target=start_server,args=(rand_port,uid,localip,))) 30 | procs.append(Process(target=start_selenium,args=(rand_port,url,uid,remoteip,localip,))) 31 | #python lambda http://www.secnetix.de/olli/Python/lambda_functions.hawk 32 | map(lambda x: x.start(), procs) 33 | map(lambda x: x.join(80), procs) 34 | #Lets let the entire url execute for 60 seconds 35 | #time.sleep(80) 36 | print " Updating status to DB"+str(uid) 37 | done_links(i) 38 | #done(i) 39 | print "Terminating Proxy|Selenium\n" 40 | 41 | map(lambda x: x.terminate(), procs) 42 | 43 | except Exception as e: 44 | print e 45 | pass 46 | 47 | if __name__ == '__main__': 48 | mprocess() -------------------------------------------------------------------------------- /src/proxy/parse.py: -------------------------------------------------------------------------------- 1 | def parsing (content_length, content): 2 | 3 | #Check if content length is empty 4 | if content_length is not None or content_length != 0 : 5 | print "Content-length is :"+str(content_length) 6 | print "Body Length is :" + str(len(self.body_file)) 7 | #body_file_type = ms.buffer(self.body_file[:400]) 8 | body_file_type ="buhaha" 9 | print "File type is "+body_file_type 10 | try: 11 | 12 | c_url = self.url.split(" ", 2)[1] 13 | if c_url != "": 14 | 15 | c_file = c_url.split('/')[-1] 16 | c_file = "/var/scan/expscanner/sandbox/sandbox/src/samples/dfiles/" +str(c_file) 17 | else: 18 | 19 | c_url="nothing" 20 | c_file = "/var/scan/expscanner/sandbox/sandbox/src/samples/dfiles/nothing" 21 | 22 | 23 | except Exception as e: 24 | print e 25 | pass 26 | ''' 27 | if body_file_type and "executable" in body_file_type: 28 | 29 | print "Found Executable Writing Executable to disk" 30 | try: 31 | 32 | #print(b("").join(self.body)) 33 | print "Writing file\n" 34 | data_write = open(c_file,"wb") 35 | data_write.write(self.body_file) 36 | data_write.close() 37 | 38 | except Exception as e: 39 | print e 40 | pass 41 | 42 | ''' 43 | 44 | else: 45 | content_length =0 46 | break 47 | -------------------------------------------------------------------------------- /src/proxy/proxy.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/proxy.py -------------------------------------------------------------------------------- /src/proxy/proxy_links.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/proxy/proxy_links.py -------------------------------------------------------------------------------- /src/proxy/pysph.py: -------------------------------------------------------------------------------- 1 | from pysphere import VIServer 2 | 3 | def revert(datastore): 4 | #vmsphear username and password 5 | server = VIServer() 6 | server.connect("101.91.1.21", "root", "Vsphear_root_password") 7 | 8 | vm1 = server.get_vm_by_path(datastore) 9 | print "Cuurrent Status",vm1.get_status() 10 | print "Reverting VM:", datastore 11 | vm1.revert_to_snapshot() 12 | print "Vm reverted" 13 | 14 | 15 | revert("[datastore1] WindowsXPRahul/WindowsXPRahul.vmx") -------------------------------------------------------------------------------- /src/proxy/run.sh: -------------------------------------------------------------------------------- 1 | python multiproc.py & 2 | python multiproc.py & 3 | python multiproc.py 4 | echo "exit" 5 | exit -------------------------------------------------------------------------------- /src/proxy/run_mprocess.py: -------------------------------------------------------------------------------- 1 | import sched, time,sys 2 | #append the path of mthread 3 | sys.path.append('/var/scan/expscanner/sandbox/sandbox/src/') 4 | import multiproc_java 5 | import MySQLdb as mdb 6 | from pysph import revert 7 | 8 | 9 | s = sched.scheduler(time.time, time.sleep) 10 | def main(sc): 11 | print "Main()" 12 | con = mdb.connect('localhost', 'root', 'password', 'sandyfiles') 13 | cur = con.cursor() 14 | cur.execute("select id,uid,url from urls uploads where sucess='0' limit 1") 15 | bindatas = cur.fetchall() 16 | querange= len(bindatas) 17 | if querange > 0: 18 | 19 | print "passing",bindatas[0][0],bindatas[0][1],bindatas[0][2] 20 | multiproc_java.mprocess(bindatas[0][0],bindatas[0][1],bindatas[0][2]) #id,uid,url 21 | #revert Snapshot 22 | revert("[datastore1] WindowsXPRahul/WindowsXPRahul.vmx") 23 | 24 | 25 | sc.enter(3, 1, main, (sc,)) 26 | s.enter(3, 1, main, (s,)) 27 | s.run() 28 | 29 | 30 | -------------------------------------------------------------------------------- /src/proxy/run_mprocess_2.py: -------------------------------------------------------------------------------- 1 | import sched, time,sys 2 | #append the path of mthread 3 | sys.path.append('/var/scan/expscanner/sandbox/sandbox/src/') 4 | import multiproc 5 | import MySQLdb as mdb 6 | 7 | s = sched.scheduler(time.time, time.sleep) 8 | def main(sc): 9 | print "Main()" 10 | con = mdb.connect('localhost', 'root', 'password', 'sandyfiles') 11 | cur = con.cursor() 12 | cur.execute("select id,uid,url from urls uploads where sucess='0' limit 1") 13 | bindatas = cur.fetchall() 14 | querange= len(bindatas) 15 | if querange > 0: 16 | 17 | print "passing",bindatas[0][0],bindatas[0][1],bindatas[0][2] 18 | multiproc.mprocess(bindatas[0][0],bindatas[0][1],bindatas[0][2]) #id,uid,url 19 | 20 | 21 | sc.enter(3, 1, main, (sc,)) 22 | s.enter(3, 1, main, (s,)) 23 | s.run() 24 | 25 | 26 | -------------------------------------------------------------------------------- /src/proxy/run_mprocess_ff.py: -------------------------------------------------------------------------------- 1 | import sched, time,sys 2 | #append the path of mthread 3 | sys.path.append('/var/scan/expscanner/sandbox/sandbox/src/') 4 | import multiproc_ff 5 | import MySQLdb as mdb 6 | from msqlhttp import in_use 7 | from pysph import revert 8 | 9 | s = sched.scheduler(time.time, time.sleep) 10 | def main(sc): 11 | print "Main()" 12 | con = mdb.connect('localhost', 'root', 'password', 'sandyfiles') 13 | cur = con.cursor() 14 | cur.execute("select id,uid,url from links where browser='1' and sucess='0' limit 1") 15 | bindatas = cur.fetchall() 16 | querange= len(bindatas) 17 | if querange > 0: 18 | 19 | print "passing",bindatas[0][0],bindatas[0][1],bindatas[0][2] 20 | in_use(bindatas[0][0]) 21 | multiproc_ff.mprocess(bindatas[0][0],bindatas[0][1],bindatas[0][2]) #id,uid,url 22 | #revert Snapshot 23 | revert("[datastore1] WindowsXPRahul/WindowsXPRahul.vmx") 24 | 25 | 26 | 27 | sc.enter(3, 1, main, (sc,)) 28 | s.enter(3, 1, main, (s,)) 29 | s.run() 30 | 31 | 32 | -------------------------------------------------------------------------------- /src/proxy/run_mprocess_ff_1.py: -------------------------------------------------------------------------------- 1 | import sched, time,sys 2 | #append the path of mthread 3 | sys.path.append('/var/scan/expscanner/sandbox/sandbox/src/') 4 | import multiproc_ff 5 | import MySQLdb as mdb 6 | from msqlhttp import in_use 7 | 8 | s = sched.scheduler(time.time, time.sleep) 9 | def main(sc): 10 | print "Main()" 11 | con = mdb.connect('localhost', 'root', 'password', 'sandyfiles') 12 | cur = con.cursor() 13 | cur.execute("select id,uid,url from links where browser='1' and sucess='0' limit 1") 14 | bindatas = cur.fetchall() 15 | querange= len(bindatas) 16 | if querange > 0: 17 | 18 | print "passing",bindatas[0][0],bindatas[0][1],bindatas[0][2] 19 | in_use(bindatas[0][0]) 20 | multiproc_ff.mprocess(bindatas[0][0],bindatas[0][1],bindatas[0][2]) #id,uid,url 21 | 22 | 23 | sc.enter(6, 1, main, (sc,)) 24 | s.enter(6, 1, main, (s,)) 25 | s.run() 26 | 27 | 28 | -------------------------------------------------------------------------------- /src/proxy/run_mprocess_ie.py: -------------------------------------------------------------------------------- 1 | import sched, time,sys 2 | #append the path of mthread 3 | sys.path.append('/var/scan/expscanner/sandbox/sandbox/src/') 4 | import multiproc_links_ie 5 | import MySQLdb as mdb 6 | from pysph import revert 7 | 8 | s = sched.scheduler(time.time, time.sleep) 9 | def main(sc): 10 | print "Main()" 11 | con = mdb.connect('localhost', 'root', 'password', 'sandyfiles') 12 | cur = con.cursor() 13 | cur.execute("select id,url from links where sucess='0' and browser='2' limit 1") 14 | bindatas = cur.fetchall() 15 | querange= len(bindatas) 16 | if querange > 0: 17 | 18 | print "passing",bindatas[0][0],bindatas[0][1] 19 | multiproc_links_ie.mprocess(bindatas[0][0],bindatas[0][1]) #id,url 20 | revert("[datastore1] WindowsXPRahul2/WindowsXPRahul.vmx") 21 | 22 | 23 | sc.enter(3, 1, main, (sc,)) 24 | s.enter(3, 1, main, (s,)) 25 | s.run() 26 | 27 | 28 | -------------------------------------------------------------------------------- /src/proxy/sele.py: -------------------------------------------------------------------------------- 1 | from selenium import webdriver 2 | from selenium.webdriver.common.keys import Keys 3 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities 4 | from selenium.webdriver.common.proxy import * 5 | import time 6 | 7 | 8 | def selenium (port,url,uid): 9 | server_ip ="" 10 | client_ip="" 11 | uid =uid 12 | myProxy = "192.168.6.10:"+str(port) 13 | print "Fetchning Url:"+str(url) 14 | proxy = Proxy({ 15 | 'proxyType': ProxyType.MANUAL, 16 | 'httpProxy': myProxy, 17 | 'ftpProxy': myProxy, 18 | 'sslProxy': myProxy, 19 | 'noProxy': '' # set this value as desired 20 | }) 21 | 22 | caps = webdriver.DesiredCapabilities.FIREFOX 23 | proxy.add_to_capabilities(caps) 24 | fp = webdriver.FirefoxProfile() 25 | fp.set_preference("browser.download.manager.showWhenStarting", False); 26 | fp.set_preference("browser.helperApps.neverAsk.saveToDisk", "application/octet-stream"); 27 | fp.update_preferences() 28 | 29 | 30 | driver = webdriver.Remote( 31 | command_executor='http://192.168.6.12:4444/wd/hub', 32 | desired_capabilities=caps,browser_profile=fp) 33 | print "\nCalling Browser and Url\n" 34 | try: 35 | 36 | driver.get(url) 37 | #Wait another 15 seconds more after request is compleated 38 | #time.sleep(30) 39 | except Exception as e: 40 | print e 41 | pass 42 | 43 | try: 44 | 45 | s_filename = "/var/www/sandy/screenshots/"+str(uid)+".png" 46 | driver.get_screenshot_as_file(s_filename) 47 | except Exception as e: 48 | print e 49 | pass 50 | try: 51 | 52 | time.sleep(20) 53 | driver.close() 54 | print "Browser Closed" 55 | except Exception as e: 56 | print e 57 | pass 58 | 59 | if __name__ == '__main__': 60 | selenium() -------------------------------------------------------------------------------- /src/proxy/sele_ff.py: -------------------------------------------------------------------------------- 1 | from selenium import webdriver 2 | from selenium.webdriver.common.keys import Keys 3 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities 4 | from selenium.webdriver.common.proxy import * 5 | import time 6 | from msqlhttp import insert_html 7 | import base64 8 | from yara_scan_links import yara_match 9 | import sys; 10 | sys.setdefaultencoding("utf8") 11 | 12 | 13 | def selenium (port,url,uid,local_ip,remote_ip): 14 | local_ip=local_ip 15 | remote_ip =remote_ip 16 | #remote_ip ='10.91.152' 17 | 18 | server_ip ="" 19 | client_ip="" 20 | uid =uid 21 | myProxy = str(local_ip)+":"+str(port) 22 | print "Fetchning Url:"+str(url) 23 | proxy = Proxy({ 24 | 'proxyType': ProxyType.MANUAL, 25 | 'httpProxy': myProxy, 26 | 'ftpProxy': myProxy, 27 | 'sslProxy': myProxy, 28 | 'noProxy': '' # set this value as desired 29 | }) 30 | 31 | caps = webdriver.DesiredCapabilities.FIREFOX 32 | proxy.add_to_capabilities(caps) 33 | fp = webdriver.FirefoxProfile() 34 | fp.set_preference("browser.download.manager.showWhenStarting", False); 35 | fp.set_preference("browser.helperApps.neverAsk.saveToDisk", "application/octet-stream"); 36 | fp.update_preferences() 37 | 38 | 39 | driver = webdriver.Remote( 40 | command_executor='http://'+remote_ip+':4444/wd/hub', 41 | desired_capabilities=caps,browser_profile=fp) 42 | print "\nCalling Browser and Url\n" 43 | 44 | 45 | 46 | try: 47 | 48 | driver.get(url) 49 | #Wait another 15 seconds more after request is compleated 50 | print 51 | time.sleep(40) 52 | except Exception as e: 53 | print e 54 | pass 55 | 56 | try: 57 | 58 | print " Getting Source\n" 59 | html_source_js = driver.page_source 60 | print "Scanning for signature\n" 61 | html_source_js = html_source_js.encode('utf-8') 62 | yara_results ="" 63 | yara_results = yara_match(html_source_js) 64 | yara_results = "
Dom_Scan"+str(yara_results) 65 | print yara_results 66 | 67 | print "Updating Source to Db\n" 68 | 69 | insert_html(uid,html_source_js,yara_results) 70 | 71 | except Exception as e: 72 | print e 73 | pass 74 | 75 | try: 76 | 77 | print "Getting Screen Shot\n" 78 | s_filename = "/var/www/sandy/screenshots_links/"+str(uid)+".png" 79 | driver.get_screenshot_as_file(s_filename) 80 | time.sleep(5) 81 | driver.close() 82 | print "Browser Closed" 83 | except Exception as e: 84 | print e 85 | pass 86 | 87 | if __name__ == '__main__': 88 | selenium() -------------------------------------------------------------------------------- /src/proxy/sele_ie.py: -------------------------------------------------------------------------------- 1 | from selenium import webdriver 2 | from selenium.webdriver.common.keys import Keys 3 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities 4 | from selenium.webdriver.common.proxy import * 5 | import time 6 | from msqlhttp import insert_html,binary_found 7 | import base64 8 | from yara_scan_links import yara_match 9 | import sys; 10 | sys.setdefaultencoding("utf8") 11 | 12 | 13 | def selenium (port,url,uid,remote_ip,local_ip): 14 | 15 | uid =uid 16 | myProxy = str(local_ip)+":"+str(port) 17 | print "Fetchning Url:"+str(url) 18 | webdriver.DesiredCapabilities.INTERNETEXPLORER['proxy'] = { 19 | "httpProxy":myProxy, 20 | "ftpProxy":myProxy, 21 | "sslProxy":myProxy, 22 | "noProxy":None, 23 | "proxyType":"MANUAL", 24 | "class":"org.openqa.selenium.Proxy", 25 | "autodetect":False } 26 | 27 | 28 | 29 | driver = webdriver.Remote("http://"+str(remote_ip)+":4444/wd/hub", webdriver.DesiredCapabilities.INTERNETEXPLORER) 30 | print "\nCalling Browser and Url\n" 31 | try: 32 | 33 | driver.get(url) 34 | #Wait another 15 seconds more after request is compleated 35 | time.sleep(40) 36 | except Exception as e: 37 | print e 38 | pass 39 | 40 | try: 41 | 42 | print " Getting Source\n" 43 | html_source_js = driver.page_source 44 | print "Scanning for signature\n" 45 | html_source_js = html_source_js.encode('utf-8') 46 | yara_results ="" 47 | yara_results = yara_match(html_source_js) 48 | yara_results = "
Dom_Scan"+str(yara_results) 49 | print yara_results 50 | 51 | print "Updating Source to Db\n" 52 | 53 | insert_html(uid,html_source_js,yara_results) 54 | 55 | except Exception as e: 56 | 57 | if "save" in str(e): 58 | print "Binary Found, updating status" 59 | binary_found(uid) 60 | print "Exception in Source:"+str(e) 61 | pass 62 | 63 | try: 64 | 65 | print " Getting Screenshot\n" 66 | s_filename = "/var/www/sandy/screenshots_links/"+str(uid)+".png" 67 | driver.get_screenshot_as_file(s_filename) 68 | driver.close() 69 | print "Browser Closed" 70 | except Exception as e: 71 | print "Exception in Screenshot"+str(e) 72 | pass 73 | 74 | if __name__ == '__main__': 75 | selenium() -------------------------------------------------------------------------------- /src/proxy/sele_java.py: -------------------------------------------------------------------------------- 1 | from selenium import webdriver 2 | from selenium.webdriver.common.keys import Keys 3 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities 4 | from selenium.webdriver.common.proxy import * 5 | import time 6 | 7 | 8 | def selenium (port,url,uid): 9 | server_ip ="" 10 | client_ip="" 11 | uid =uid 12 | myProxy = "192.168.6.10:"+str(port) 13 | print "Fetchning Url:"+str(url) 14 | proxy = Proxy({ 15 | 'proxyType': ProxyType.MANUAL, 16 | 'httpProxy': myProxy, 17 | 'ftpProxy': myProxy, 18 | 'sslProxy': myProxy, 19 | 'noProxy': '' # set this value as desired 20 | }) 21 | 22 | caps = webdriver.DesiredCapabilities.FIREFOX 23 | proxy.add_to_capabilities(caps) 24 | fp = webdriver.FirefoxProfile() 25 | fp.set_preference("browser.download.manager.showWhenStarting", False); 26 | fp.set_preference("browser.helperApps.neverAsk.saveToDisk", "application/octet-stream"); 27 | fp.update_preferences() 28 | 29 | 30 | driver = webdriver.Remote( 31 | command_executor='http://192.168.6.13:4444/wd/hub', 32 | desired_capabilities=caps,browser_profile=fp) 33 | print "\nCalling Browser and Url\n" 34 | try: 35 | 36 | driver.get(url) 37 | #Wait another 15 seconds more after request is compleated 38 | time.sleep(20) 39 | except Exception as e: 40 | print e 41 | pass 42 | 43 | try: 44 | 45 | s_filename = "/var/www/sandy/screenshots/"+str(uid)+".png" 46 | driver.get_screenshot_as_file(s_filename) 47 | except Exception as e: 48 | print e 49 | pass 50 | try: 51 | 52 | time.sleep(20) 53 | driver.close() 54 | print "Browser Closed" 55 | except Exception as e: 56 | print e 57 | pass 58 | 59 | if __name__ == '__main__': 60 | selenium() -------------------------------------------------------------------------------- /src/proxy/yara_scan_links.py: -------------------------------------------------------------------------------- 1 | import yara 2 | 3 | matches = dict() 4 | rules = yara.compile(filepaths={ 5 | 6 | "Exploits":"/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/browser/exploits.yar", 7 | "Exploit-kits":"/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/browser/exploit_kits.yar", 8 | "Java_Script":"/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/jsunpack/jsunpack.yar", 9 | "Exploits-ie":"/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/ie/exploit.yar", 10 | "Exploits-js":"/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/jsclassifier.yar", 11 | "Exploits-url":"/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/urlclassifier.yar" 12 | }) 13 | 14 | 15 | 16 | def yara_match(scan_file): 17 | yara_rules =[] 18 | matches = rules.match(data=scan_file) 19 | ##matches = str(matches) 20 | keys = matches.keys() 21 | for keys in matches: 22 | each_value = matches[keys] 23 | for each in each_value: 24 | match = keys+":"+each["rule"] 25 | #print match 26 | yara_rules.append(match) 27 | 28 | return yara_rules 29 | 30 | if __name__ == "__main__": 31 | fo = open("test1.txt", "r+") 32 | scan_file = fo.read(); 33 | match = yara_match(scan_file) 34 | print match 35 | -------------------------------------------------------------------------------- /src/readme: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/src/readme -------------------------------------------------------------------------------- /src/samples/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2010-2013 Cuckoo Sandbox Developers. 2 | # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org 3 | # See the file 'docs/LICENSE' for copying permission. 4 | -------------------------------------------------------------------------------- /src/samples/build/readme: -------------------------------------------------------------------------------- 1 | Extracted Sample goes here -------------------------------------------------------------------------------- /src/samples/dfiles/readme: -------------------------------------------------------------------------------- 1 | Extracted Sample goes here -------------------------------------------------------------------------------- /src/samples/jfiles/binaries/readme: -------------------------------------------------------------------------------- 1 | Extracted Sample goes here -------------------------------------------------------------------------------- /src/samples/jfiles/decompiled/readme: -------------------------------------------------------------------------------- 1 | Extracted Sample goes here -------------------------------------------------------------------------------- /src/samples/jfiles/extracted/readme: -------------------------------------------------------------------------------- 1 | Extracted Sample goes here -------------------------------------------------------------------------------- /src/samples/o/readme: -------------------------------------------------------------------------------- 1 | Extracted Sample goes here -------------------------------------------------------------------------------- /src/samples/run.py: -------------------------------------------------------------------------------- 1 | import sched, time,sys 2 | #append the path of mthread 3 | sys.path.append('/var/scan/expscanner/sandbox/sandbox/src/') 4 | import mthread 5 | from timeout import timeout 6 | import create_html_template 7 | 8 | s = sched.scheduler(time.time, time.sleep) 9 | 10 | @timeout(30) 11 | def main(sc): 12 | print "Main()" 13 | mthread.manytasks(sas="sas") 14 | create_html_template.create_html_d(sas='Urlgen') 15 | 16 | sc.enter(2, 1, main, (sc,)) 17 | s.enter(2, 1, main, (s,)) 18 | s.run() 19 | 20 | 21 | -------------------------------------------------------------------------------- /src/timeout.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Time out code from 3 | 4 | http://stackoverflow.com/questions/2281850/timeout-function-if-it-takes-too-long-to-finish 5 | ''' 6 | from functools import wraps 7 | import errno 8 | import os 9 | import signal 10 | 11 | class TimeoutError(Exception): 12 | pass 13 | 14 | def timeout(seconds=10, error_message=os.strerror(errno.ETIME)): 15 | def decorator(func): 16 | def _handle_timeout(signum, frame): 17 | raise TimeoutError(error_message) 18 | 19 | def wrapper(*args, **kwargs): 20 | signal.signal(signal.SIGALRM, _handle_timeout) 21 | signal.alarm(seconds) 22 | try: 23 | result = func(*args, **kwargs) 24 | finally: 25 | signal.alarm(0) 26 | return result 27 | 28 | return wraps(func)(wrapper) 29 | 30 | return decorator -------------------------------------------------------------------------------- /src/yara_scan.py: -------------------------------------------------------------------------------- 1 | import yara 2 | 3 | rules = yara.compile(filepaths={ 4 | 5 | 'JavaExploit':'/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/java/exploit.yar', 6 | 'InternetExplorer':'/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/ie/exploit.yar', 7 | 'ClamAv':'/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/clam_av/clam_av.yar', 8 | 'test':'/var/scan/expscanner/sandbox/sandbox/yara-ctypes/yara/rules/clam_av/test.yar' 9 | }) 10 | 11 | 12 | 13 | def yara_match(scan_file): 14 | 15 | matches = rules.match(data=scan_file) 16 | return matches 17 | 18 | 19 | 20 | if __name__ == "__main__": 21 | fo = open("test1.txt", "r+") 22 | scan_file = fo.read(); 23 | matches ="" 24 | matches=yara_match(scan_file) 25 | print matches 26 | -------------------------------------------------------------------------------- /yara-ctypes/.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - 2.6 4 | - 2.7 5 | - 3.2 6 | - "pypy" 7 | install: 8 | - python setup.py install 9 | script: python setup.py test 10 | notifications: 11 | email: mjdorma+travis-ci@gmail.com 12 | -------------------------------------------------------------------------------- /yara-ctypes/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.rst 2 | include distribute_setup.py 3 | recursive-include tests *.py 4 | recursive-include tests *.yar 5 | recursive-include yara/rules *.yar 6 | recursive-include libs *.so 7 | recursive-include libs *.dll 8 | -------------------------------------------------------------------------------- /yara-ctypes/README.rst: -------------------------------------------------------------------------------- 1 | Introduction to yara-ctypes-python 2 | ********************************** 3 | 4 | What is yara-ctypes: 5 | 6 | * A powerful python wrapper for `yara-project's libyara v1.6`_. 7 | * Supports thread safe matching of YARA rules. 8 | * namespace management to allow easy loading of multiple YARA rules into a 9 | single libyara context. 10 | * Comes with a scan module which exposes a user CLI and demonstrates a pattern 11 | for executing match jobs across a thread pool. 12 | 13 | 14 | Why: 15 | 16 | * ctypes releases the GIL on system function calls... Run your PC to its 17 | true potential. 18 | * No more building the PyC extension... 19 | * I found a few bugs and memory leaks and wanted to make my life simple. 20 | 21 | 22 | As a reference and guide to yara-ctypes see: `yara-ctypes documentation`_ 23 | 24 | 25 | For additional tips / tricks with this wrapper feel free to post a question at 26 | the github `yara-ctypes/issues`_ page. 27 | 28 | 29 | Project hosting provided by `github.com`_. 30 | 31 | 32 | [mjdorma+yara-ctypes@gmail.com] 33 | 34 | 35 | Install and run 36 | =============== 37 | 38 | Simply run the following:: 39 | 40 | > python setup.py install 41 | > python setup.py test 42 | > python -m yara.scan -h 43 | 44 | or `PyPi`_:: 45 | 46 | > pip install yara 47 | > python -m yara.scan -h 48 | 49 | 50 | .. note:: 51 | 52 | If the package does not contain a pre-compiled libyara library for your 53 | platform you will need to build and install it. See `notes on building`_. 54 | 55 | 56 | Compatability 57 | ============= 58 | 59 | *yara-ctypes* is implemented to be compatible with Python 2.6+ and Python 3.x. 60 | It has been tested against the following Python implementations: 61 | 62 | Ubuntu 12.04: 63 | 64 | + CPython 2.7 (32bit, 64bit) 65 | + CPython 3.2 (32bit, 64bit) 66 | 67 | Ubuntu 11.10 |build_status|: 68 | 69 | + CPython 2.6 (32bit) 70 | + CPython 2.7 (32bit) 71 | + CPython 3.2 (32bit) 72 | + PyPy 1.9.0 (32bit) 73 | 74 | Windows 7: 75 | 76 | + CPython 2.6 (32bit, 64bit) 77 | + CPython 3.2 (32bit, 64bit) 78 | 79 | 80 | Continuous integration testing is provided by `Travis CI `_. 81 | 82 | 83 | Issues 84 | ====== 85 | 86 | Source code for *yara-ctypes* is hosted on `GitHub `_. 87 | Please file `bug reports `_ 88 | with GitHub's issues system. 89 | 90 | 91 | Change log 92 | ========== 93 | 94 | version 1.6.0 (01/09/2012) 95 | 96 | * Initial release 97 | 98 | version 1.6.1 (06/09/2012) 99 | 100 | * Support for 64bit Windows 101 | * Bug fixes 102 | * Added documentation 103 | 104 | 105 | 106 | .. _github.com: https://github.com/mjdorma/yara-ctypes 107 | .. _PyPi: http://pypi.python.org/pypi/yara 108 | .. _yara-ctypes/issues: https://github.com/mjdorma/yara-ctypes/issues 109 | .. _notes on building: http://packages.python.org/yara/howto/build.html 110 | .. _yara-ctypes documentation: http://packages.python.org/yara/ 111 | .. _yara-project's libyara v1.6: http://code.google.com/p/yara-project 112 | .. |build_status| image:: https://secure.travis-ci.org/mjdorma/yara-ctypes.png?branch=master 113 | :target: http://travis-ci.org/#!/mjorma/yara-ctypes 114 | -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/__init__.py: -------------------------------------------------------------------------------- 1 | """Compile YARA rules to test against files or strings 2 | 3 | [mjdorma@gmail.com] 4 | """ 5 | 6 | from yara.version import __version__ 7 | from yara.rules import compile 8 | from yara.rules import load_rules 9 | from yara.rules import Rules 10 | from yara.rules import YARA_RULES_ROOT 11 | from yara.rules import CALLBACK_CONTINUE 12 | from yara.rules import CALLBACK_ABORT 13 | from yara.scan import Scanner 14 | -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/antidebug.yar: -------------------------------------------------------------------------------- 1 | rule DebuggerCheck__API : AntiDebug DebuggerCheck { 2 | meta: 3 | weight = 1 4 | strings: 5 | $ ="IsDebuggerPresent" 6 | condition: 7 | any of them 8 | } 9 | 10 | rule DebuggerCheck__PEB : AntiDebug DebuggerCheck { 11 | meta: 12 | weight = 1 13 | strings: 14 | $ ="IsDebugged" 15 | condition: 16 | any of them 17 | } 18 | 19 | rule DebuggerCheck__GlobalFlags : AntiDebug DebuggerCheck { 20 | meta: 21 | weight = 1 22 | strings: 23 | $ ="NtGlobalFlags" 24 | condition: 25 | any of them 26 | } 27 | 28 | rule DebuggerCheck__QueryInfo : AntiDebug DebuggerCheck { 29 | meta: 30 | weight = 1 31 | strings: 32 | $ ="QueryInformationProcess" 33 | condition: 34 | any of them 35 | } 36 | 37 | rule DebuggerCheck__RemoteAPI : AntiDebug DebuggerCheck { 38 | meta: 39 | weight = 1 40 | strings: 41 | $ ="CheckRemoteDebuggerPresent" 42 | condition: 43 | any of them 44 | } 45 | 46 | /////////////////////////////////////////////////////////////////////////////// 47 | rule DebuggerHiding__Thread : AntiDebug DebuggerHiding { 48 | meta: 49 | weight = 1 50 | strings: 51 | $ ="SetInformationThread" 52 | condition: 53 | any of them 54 | } 55 | 56 | rule DebuggerHiding__Active : AntiDebug DebuggerHiding { 57 | meta: 58 | weight = 1 59 | strings: 60 | $ ="DebugActiveProcess" 61 | condition: 62 | any of them 63 | } 64 | 65 | rule DebuggerTiming__PerformanceCounter : AntiDebug DebuggerTiming { 66 | meta: 67 | weight = 1 68 | strings: 69 | $ ="QueryPerformanceCounter" 70 | condition: 71 | any of them 72 | } 73 | 74 | rule DebuggerTiming__Ticks : AntiDebug DebuggerTiming { 75 | meta: 76 | weight = 1 77 | strings: 78 | $ ="GetTickCount" 79 | condition: 80 | any of them 81 | } 82 | 83 | rule DebuggerOutput__String : AntiDebug DebuggerOutput { 84 | meta: 85 | weight = 1 86 | strings: 87 | $ ="OutputDebugString" 88 | condition: 89 | any of them 90 | } 91 | 92 | /////////////////////////////////////////////////////////////////////////////// 93 | rule DebuggerException__UnhandledFilter : AntiDebug DebuggerException { 94 | meta: 95 | weight = 1 96 | strings: 97 | $ ="SetUnhandledExceptionFilter" 98 | condition: 99 | any of them 100 | } 101 | 102 | rule DebuggerException__ConsoleCtrl : AntiDebug DebuggerException { 103 | meta: 104 | weight = 1 105 | strings: 106 | $ ="GenerateConsoleCtrlEvent" 107 | condition: 108 | any of them 109 | } 110 | 111 | rule DebuggerException__SetConsoleCtrl : AntiDebug DebuggerException { 112 | meta: 113 | weight = 1 114 | strings: 115 | $ ="SetConsoleCtrlHandler" 116 | condition: 117 | any of them 118 | } 119 | 120 | /////////////////////////////////////////////////////////////////////////////// 121 | rule ThreadControl__Context : AntiDebug ThreadControl { 122 | meta: 123 | weight = 1 124 | strings: 125 | $ ="SetThreadContext" 126 | condition: 127 | any of them 128 | } 129 | 130 | rule DebuggerCheck__DrWatson : AntiDebug DebuggerCheck { 131 | meta: 132 | weight = 1 133 | strings: 134 | $ ="__invoke__watson" 135 | condition: 136 | any of them 137 | } 138 | 139 | rule SEH__v3 : AntiDebug SEH { 140 | meta: 141 | weight = 1 142 | strings: 143 | $ = "____except__handler3" 144 | $ = "____local__unwind3" 145 | condition: 146 | any of them 147 | } 148 | 149 | rule SEH__v4 : AntiDebug SEH { 150 | // VS 8.0+ 151 | meta: 152 | weight = 1 153 | strings: 154 | $ = "____except__handler4" 155 | $ = "____local__unwind4" 156 | $ = "__XcptFilter" 157 | condition: 158 | any of them 159 | } 160 | 161 | rule SEH__vba : AntiDebug SEH { 162 | meta: 163 | weight = 1 164 | strings: 165 | $ = "vbaExceptHandler" 166 | condition: 167 | any of them 168 | } 169 | 170 | rule SEH__vectored : AntiDebug SEH { 171 | meta: 172 | weight = 1 173 | strings: 174 | $ = "AddVectoredExceptionHandler" 175 | $ = "RemoveVectoredExceptionHandler" 176 | condition: 177 | any of them 178 | } 179 | 180 | /////////////////////////////////////////////////////////////////////////////// 181 | // Patterns 182 | rule DebuggerPattern__RDTSC : AntiDebug DebuggerPattern { 183 | meta: 184 | weight = 1 185 | strings: 186 | $ = {0F 31} 187 | condition: 188 | any of them 189 | } 190 | 191 | rule DebuggerPattern__CPUID : AntiDebug DebuggerPattern { 192 | meta: 193 | weight = 1 194 | strings: 195 | $ = {0F A2} 196 | condition: 197 | any of them 198 | } 199 | 200 | rule DebuggerPattern__SEH_Saves : AntiDebug DebuggerPattern { 201 | meta: 202 | weight = 1 203 | strings: 204 | $ = {64 ff 35 00 00 00 00} 205 | condition: 206 | any of them 207 | } 208 | 209 | rule DebuggerPattern__SEH_Inits : AntiDebug DebuggerPattern { 210 | meta: 211 | weight = 1 212 | strings: 213 | $ = {64 89 25 00 00 00 00} 214 | condition: 215 | any of them 216 | } -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/compiler.yar: -------------------------------------------------------------------------------- 1 | rule RTTI__enabled : Compiler RTTI { 2 | meta: 3 | weight = 1 4 | strings: 5 | $ ="run-time check failure #" nocase 6 | condition: 7 | any of them 8 | } 9 | 10 | rule CompilerVersion__Microsoft_Visual_Basic_5_0 : Compiler CompilerVersion { 11 | meta: 12 | weight = 1 13 | strings: 14 | $ ="msvbvm50" 15 | condition: 16 | any of them 17 | } 18 | 19 | rule CompilerVersion__Microsoft_Visual_Basic_6_0 : Compiler CompilerVersion { 20 | meta: 21 | weight = 1 22 | strings: 23 | $ ="msvbvm60" 24 | condition: 25 | any of them 26 | } 27 | 28 | rule CompilerVersion__Microsoft_Visual_Basic_4_0_16bit : Compiler CompilerVersion { 29 | meta: 30 | weight = 1 31 | strings: 32 | $ ="vb0016.dll" 33 | condition: 34 | any of them 35 | } 36 | 37 | rule CompilerVersion__Microsoft_Visual_Basic_4_0_32bit : Compiler CompilerVersion { 38 | meta: 39 | weight = 1 40 | strings: 41 | $ ="vb0032.dll" 42 | condition: 43 | any of them 44 | } 45 | 46 | // TODO Line 50, Unknown how to match paths for pdb file and such 47 | 48 | rule CompilerVersion__Delphi : Compiler CompilerVersion { 49 | meta: 50 | weight = 1 51 | strings: 52 | $ ="this program must be run under win32" nocase 53 | $ ="SOFTWARE\\Borland\\Delphi\\RTL" nocase 54 | condition: 55 | any of them 56 | } 57 | 58 | // TODO Line 80, Unknown how to match regexes... lots of them 59 | 60 | // Line 168 61 | rule CompilerVersion__Microsoft_Visual_Cpp_4_2 : Compiler CompilerVersion { 62 | meta: 63 | weight = 1 64 | strings: 65 | $ = /MSVBVM(|D).DLL/ nocase 66 | condition: 67 | any of them 68 | } 69 | 70 | // TODO skipping check at line 175 71 | // TODO Should identify when it's the debug build vs release 72 | rule CompilerVersion__Microsoft_Visual_Cpp_5_0 : Compiler CompilerVersion { 73 | meta: 74 | weight = 1 75 | strings: 76 | $ =/MSVC(P|R)50(|D).DLL/ nocase 77 | condition: 78 | any of them 79 | } 80 | 81 | rule CompilerVersion__Microsoft_Visual_Cpp_6_0 : Compiler CompilerVersion { 82 | meta: 83 | weight = 1 84 | strings: 85 | $ =/MSVC(P|R)60(|D).DLL/ nocase 86 | condition: 87 | any of them 88 | } 89 | 90 | rule CompilerVersion__Microsoft_Visual_Cpp_2002 : Compiler CompilerVersion { 91 | meta: 92 | weight = 1 93 | strings: 94 | $ =/MSVC(P|R)70(|D).DLL/ nocase 95 | condition: 96 | any of them 97 | } 98 | 99 | rule CompilerVersion__Microsoft_Visual_Cpp_2003 : Compiler CompilerVersion { 100 | meta: 101 | weight = 1 102 | strings: 103 | $ =/MSVC(P|R)71(|D).DLL/ nocase 104 | condition: 105 | any of them 106 | } 107 | 108 | rule CompilerVersion__Microsoft_Visual_Cpp_2005 : Compiler CompilerVersion { 109 | meta: 110 | weight = 1 111 | strings: 112 | $ =/MSVC(P|R)80(|D).DLL/ nocase 113 | condition: 114 | any of them 115 | } 116 | 117 | rule CompilerVersion__Microsoft_Visual_Cpp_2008 : Compiler CompilerVersion { 118 | meta: 119 | weight = 1 120 | strings: 121 | $ =/MSVC(P|R)90(|D).DLL/ nocase 122 | condition: 123 | any of them 124 | } 125 | 126 | // TODO add check for VS2010 127 | 128 | rule CompilerPattern__BufferSecurityChecks : AntiDebug CompilerPattern { 129 | meta: 130 | weight = 1 131 | strings: 132 | $ = {8B 4D FC 33 CD E8} 133 | condition: 134 | any of them 135 | } 136 | 137 | rule CompilerPattern__FPO_Count : AntiDebug CompilerPattern { 138 | meta: 139 | weight = 1 140 | strings: 141 | $ = {C7 44 24 ?? 00 00 00 00} 142 | condition: 143 | any of them 144 | } -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/compression.yar: -------------------------------------------------------------------------------- 1 | rule CompressionUsed__LZ_Compression : Compression CompressionUsed { 2 | meta: 3 | weight = 1 4 | strings: 5 | $ ="LZOpenFile" nocase 6 | $ ="LZClose" nocase 7 | $ ="LZCopy" nocase 8 | $ ="LZRead" nocase 9 | $ ="LZInit" nocase 10 | $ ="LZSeek" nocase 11 | condition: 12 | any of them 13 | } 14 | 15 | rule CompressionUsed__UPX_Packing : Compression CompressionUsed { 16 | meta: 17 | weight = 1 18 | strings: 19 | $ ="UPX0" nocase 20 | $ ="UPX1" nocase 21 | condition: 22 | any of them 23 | } 24 | 25 | -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/fingerprint.yar: -------------------------------------------------------------------------------- 1 | include "antidebug.yar" 2 | include "compiler.yar" 3 | include "compression.yar" 4 | include "integerparsing.yar" 5 | include "libs.yar" 6 | include "microsoft.yar" 7 | include "sockets.yar" 8 | 9 | 10 | // TODO didn't do msapi.cs, pe.cs, or strings.cs -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/integerparsing.yar: -------------------------------------------------------------------------------- 1 | // Originally, I had regexes, but that was slow (made the program run in over a second, 2 | // instead of under half a second)... so I'm using strings 3 | rule DataConversion__ansi : IntegerParsing DataConversion { 4 | meta: 5 | weight = 1 6 | strings: 7 | $ = "atoi" nocase 8 | $ = "atol" nocase 9 | $ = "atof" nocase 10 | $ = "atodb" nocase 11 | condition: 12 | any of them 13 | } 14 | 15 | 16 | rule DataConversion__wide : IntegerParsing DataConversion { 17 | meta: 18 | weight = 1 19 | strings: 20 | $ = "wtoi" nocase 21 | $ = "wtol" nocase 22 | $ = "wtof" nocase 23 | $ = "wtodb" nocase 24 | condition: 25 | any of them 26 | } 27 | 28 | 29 | rule DataConversion__64bit : IntegerParsing DataConversion { 30 | meta: 31 | weight = 1 32 | strings: 33 | $ = "atoi64" nocase 34 | $ = "wtoi64" nocase 35 | $ = "atol64" nocase 36 | $ = "wtol64" nocase 37 | $ = "atof64" nocase 38 | $ = "wtof64" nocase 39 | $ = "atodb64" nocase 40 | $ = "wtodb64" nocase 41 | condition: 42 | any of them 43 | } 44 | 45 | 46 | rule DataConversion__locale : IntegerParsing DataConversion { 47 | meta: 48 | weight = 1 49 | strings: 50 | $ = "atoi_l" nocase 51 | $ = "wtoi_l" nocase 52 | $ = "atoi64_l" nocase 53 | $ = "wtoi64_l" nocase 54 | 55 | $ = "atol_l" nocase 56 | $ = "wtol_l" nocase 57 | $ = "atol64_l" nocase 58 | $ = "wtol64_l" nocase 59 | 60 | $ = "atof_l" nocase 61 | $ = "wtof_l" nocase 62 | $ = "atof64_l" nocase 63 | $ = "wtof64_l" nocase 64 | 65 | $ = "atodb_l" nocase 66 | $ = "wtodb_l" nocase 67 | $ = "atodb64_l" nocase 68 | $ = "wtodb64_l" nocase 69 | condition: 70 | any of them 71 | } 72 | 73 | 74 | rule DataConversion__int : IntegerParsing DataConversion { 75 | meta: 76 | weight = 1 77 | strings: 78 | $ = "atoi" nocase 79 | $ = "wtoi" nocase 80 | condition: 81 | any of them 82 | } 83 | 84 | 85 | rule DataConversion__long : IntegerParsing DataConversion { 86 | meta: 87 | weight = 1 88 | strings: 89 | $ = "atol" nocase 90 | $ = "wtol" nocase 91 | condition: 92 | any of them 93 | } 94 | 95 | rule DataConversion__float : IntegerParsing DataConversion { 96 | meta: 97 | weight = 1 98 | strings: 99 | $ = "atof" nocase 100 | $ = "wtof" nocase 101 | condition: 102 | any of them 103 | } 104 | 105 | rule DataConversion__double : IntegerParsing DataConversion { 106 | meta: 107 | weight = 1 108 | strings: 109 | $ = "atodb" nocase 110 | $ = "wtodb" nocase 111 | condition: 112 | any of them 113 | } 114 | 115 | rule DataConversion__longdouble : IntegerParsing DataConversion { 116 | meta: 117 | weight = 1 118 | strings: 119 | $ = "atodbl" nocase 120 | $ = "wtodbl" nocase 121 | condition: 122 | any of them 123 | } -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/libs.yar: -------------------------------------------------------------------------------- 1 | // TODO get xvid codex version 2 | rule LibsUsed__xvid_codex : Libs LibsUsed { 3 | meta: 4 | weight = 1 5 | strings: 6 | $ = "xvid codex " nocase 7 | condition: 8 | any of them 9 | } 10 | 11 | rule LibsUsed__libpng : Libs LibsUsed { 12 | meta: 13 | weight = 1 14 | strings: 15 | $ = "MNG features are not allowed in a PNG datastream" nocase 16 | condition: 17 | any of them 18 | } 19 | 20 | // TODO get inflate library version 21 | rule LibsUsed__Inflate_Library : Libs LibsUsed { 22 | meta: 23 | weight = 1 24 | strings: 25 | $ = /inflate [0-9\\.]+ Copyright 1995/ 26 | condition: 27 | any of them 28 | } 29 | 30 | rule LibsUsed__Lex_Yacc : Libs LibsUsed { 31 | meta: 32 | weight = 1 33 | strings: 34 | $ = "yy_create_buffer" nocase 35 | condition: 36 | any of them 37 | } 38 | 39 | rule LibsUsed__STL_new : Libs LibsUsed { 40 | meta: 41 | weight = 1 42 | strings: 43 | $ = "AVbad_alloc" 44 | condition: 45 | any of them 46 | } 47 | -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/microsoft.yar: -------------------------------------------------------------------------------- 1 | rule Functionality__Windows_GDI_Common_Controls : Microsoft Functionality { 2 | meta: 3 | weight = 1 4 | strings: 5 | $ ="comctl32.dll" nocase 6 | $ ="gdi32.dll" nocase 7 | condition: 8 | any of them 9 | } 10 | 11 | rule Functionality__Windows_Multimedia : Microsoft Functionality { 12 | meta: 13 | weight = 1 14 | strings: 15 | $ ="winmm.dll" nocase 16 | condition: 17 | any of them 18 | } 19 | 20 | rule Functionality__Windows_socket_library : Microsoft Functionality { 21 | meta: 22 | weight = 1 23 | strings: 24 | $ ="wsock32.dll" nocase 25 | $ ="ws2_32.dll" nocase 26 | condition: 27 | any of them 28 | } 29 | 30 | rule Functionality__Windows_Internet_API : Microsoft Functionality { 31 | meta: 32 | weight = 1 33 | strings: 34 | $ ="wininet.dll" nocase 35 | condition: 36 | any of them 37 | } 38 | 39 | rule Functionality__Windows_HTML_Help_Control : Microsoft Functionality { 40 | meta: 41 | weight = 1 42 | strings: 43 | $ ="hhctrl.dll" nocase 44 | condition: 45 | any of them 46 | } 47 | 48 | rule Functionality__Windows_Video_For_Windows : Microsoft Functionality { 49 | meta: 50 | weight = 1 51 | strings: 52 | $ ="msvfw32.dll" nocase 53 | condition: 54 | any of them 55 | } 56 | 57 | rule Copyright__faked : Microsoft Copyright { 58 | meta: 59 | weight = 1 60 | strings: 61 | $ ="Microsoft (c)" 62 | condition: 63 | any of them 64 | } -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/rules/hbgary/sockets.yar: -------------------------------------------------------------------------------- 1 | rule Winsock__WSA : Sockets Winsock { 2 | meta: 3 | weight = 1 4 | strings: 5 | $ ="WSASocket" 6 | $ ="WSASend" 7 | $ ="WSARecv" 8 | $ ="WSAConnect" 9 | $ ="WSAIoctl" 10 | $ ="WSAConnect" 11 | condition: 12 | any of them 13 | } 14 | 15 | rule Winsock__Generic : Sockets Winsock { 16 | meta: 17 | weight = 1 18 | strings: 19 | $ ="socket" 20 | $ ="send" 21 | $ ="recv" 22 | $ ="connect" 23 | $ ="ioctlsocket" 24 | $ ="closesocket" 25 | condition: 26 | any of them 27 | } 28 | 29 | rule HostQuery__Peer : Sockets HostQuery { 30 | meta: 31 | weight = 1 32 | strings: 33 | $ ="getpeername" 34 | condition: 35 | any of them 36 | } 37 | 38 | rule HostQuery__ByName : Sockets HostQuery { 39 | meta: 40 | weight = 1 41 | strings: 42 | $ ="gethostbyname" 43 | condition: 44 | any of them 45 | } 46 | 47 | rule HostQuery__ByAddr : Sockets HostQuery { 48 | meta: 49 | weight = 1 50 | strings: 51 | $ ="gethostbyaddr" 52 | condition: 53 | any of them 54 | } 55 | 56 | rule SocketCalls__Winsock_Address_Conversion : Sockets SocketCalls { 57 | meta: 58 | weight = 1 59 | strings: 60 | $ ="inet_addr" 61 | $ ="inet_ntoa" 62 | $ ="htons" 63 | $ ="htonl" 64 | condition: 65 | any of them 66 | } 67 | 68 | rule SocketCalls__Advanced_WSA_Winsock : Sockets SocketCalls { 69 | meta: 70 | weight = 1 71 | strings: 72 | $ ="WSAEnumNetworkEvents" 73 | $ ="WSAAsync" 74 | $ ="WSAEnumNameSpaceProviders" 75 | condition: 76 | any of them 77 | } -------------------------------------------------------------------------------- /yara-ctypes/build/lib.linux-x86_64-2.7/yara/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "1.6.1" 2 | -------------------------------------------------------------------------------- /yara-ctypes/dist/yara-1.6.1-py2.7.egg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/dist/yara-1.6.1-py2.7.egg -------------------------------------------------------------------------------- /yara-ctypes/distribute-0.6.25-py2.7.egg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/distribute-0.6.25-py2.7.egg -------------------------------------------------------------------------------- /yara-ctypes/distribute-0.6.25.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/distribute-0.6.25.tar.gz -------------------------------------------------------------------------------- /yara-ctypes/docs/source/howto/install.rst: -------------------------------------------------------------------------------- 1 | Install guide 2 | ============= 3 | 4 | Things to know about installing yara-ctypes. 5 | 6 | 7 | PyPi install 8 | ------------ 9 | 10 | Simply run the following:: 11 | 12 | pip install yara 13 | 14 | 15 | If you do not have pip, you can `click here to find`_ the latest download 16 | package. 17 | 18 | Unzip than install:: 19 | 20 | python setup.py install 21 | 22 | 23 | Download and install the master 24 | ------------------------------- 25 | 26 | You can find the master copy of `yara-ctypes on github`_. 27 | 28 | Here is how to install from the master:: 29 | 30 | wget -O master.zip https://github.com/mjdorma/yara-ctypes/zipball/master 31 | unzip master.zip 32 | cd mjdorma-yara-ctypes-XXX 33 | python setup.py install 34 | 35 | 36 | 37 | Missing a dll? Try installing the MS VC++ 2010 redistributable package 38 | ----------------------------------------------------------------------- 39 | 40 | The shipped dlls' were built using Visual Studio 2010. If you do not have the 41 | appropriate runtime already installed you will get an error message pop 42 | up saying you are missing ``msvcr100.dll``. Download and install the 43 | appropriate redistribution package for your platform: 44 | 45 | * `Microsoft Visual C++ 2010 Redistributable Package (x86)`_ (or `vcredist_x86.exe`_) 46 | * `Microsoft Visual C++ 2010 Redistributable Package (x64)`_ (or `vcredist_x64.exe`_) 47 | 48 | 49 | Failing to import libyara 50 | ------------------------- 51 | 52 | At this point you need to figure out if the shipped library file is compatible 53 | with your system/platform. You may need to build your own libyara library from 54 | scratch. See :doc:`build` for more information. 55 | 56 | 57 | 58 | .. _yara-ctypes on github: https://github.com/mjdorma/yara-ctypes 59 | .. _click here to find: http://pypi.python.org/pypi/yara/#downloads 60 | 61 | .. _Microsoft Visual C++ 2010 Redistributable Package (x64): http://www.microsoft.com/en-us/download/details.aspx?id=14632 62 | .. _vcredist_x64.exe: http://download.microsoft.com/download/3/2/2/3224B87F-CFA0-4E70-BDA3-3DE650EFEBA5/vcredist_x64.exe 63 | .. _Microsoft Visual C++ 2010 Redistributable Package (x86): http://www.microsoft.com/en-us/download/details.aspx?id=5555 64 | .. _vcredist_x86.exe: http://download.microsoft.com/download/5/B/C/5BC5DBB3-652D-4DCE-B14A-475AB85EEF6E/vcredist_x86.exe 65 | 66 | 67 | -------------------------------------------------------------------------------- /yara-ctypes/docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. note:: 2 | 3 | *this doc is a work in progress* 4 | 5 | 6 | Introduction to yara-ctypes-python 7 | ================================== 8 | 9 | What is yara-ctypes: 10 | 11 | * A powerful python wrapper for `yara-project's libyara v1.6`_. 12 | * Supports thread safe matching of YARA rules. 13 | * namespace management to allow easy loading of multiple YARA rules into a 14 | single libyara context. 15 | * Comes with a scan module which exposes a user CLI and demonstrates a pattern 16 | for executing match jobs across a thread pool. 17 | 18 | 19 | Why: 20 | 21 | * ctypes releases the GIL on system function calls... Run your PC to its 22 | true potential. 23 | * No more building the PyC extension... 24 | * I found a few bugs and memory leaks and wanted to make my life simple. 25 | 26 | 27 | As a reference and guide to yara-ctypes see: `yara-ctypes documentation`_ 28 | 29 | 30 | For additional tips / tricks with this wrapper feel free to post a question at 31 | the github `yara-ctypes/issues`_ page. 32 | 33 | 34 | Project hosting provided by `github.com`_. 35 | 36 | 37 | [mjdorma+yara-ctypes@gmail.com] 38 | 39 | 40 | 41 | Getting started 42 | =============== 43 | 44 | .. toctree:: 45 | :maxdepth: 2 46 | 47 | howto/install.rst 48 | howto/scan.rst 49 | howto/build.rst 50 | 51 | 52 | Reference 53 | ========= 54 | 55 | .. toctree:: 56 | :maxdepth: 2 57 | 58 | yara/scan.rst 59 | yara/rules.rst 60 | yara/libyara_wrapper.rst 61 | 62 | 63 | Indices and tables 64 | =================== 65 | 66 | * :ref:`genindex` 67 | * :ref:`modindex` 68 | * :ref:`search` 69 | 70 | 71 | 72 | .. _github.com: https://github.com/mjdorma/yara-ctypes 73 | .. _yara-ctypes/issues: https://github.com/mjdorma/yara-ctypes/issues 74 | .. _notes on building: http://packages.python.org/yara/howto/build.html 75 | .. _yara-ctypes documentation: http://packages.python.org/yara/ 76 | .. _yara-project's libyara v1.6: http://code.google.com/p/yara-project 77 | -------------------------------------------------------------------------------- /yara-ctypes/docs/source/yara/libyara_wrapper.rst: -------------------------------------------------------------------------------- 1 | :mod:`yara.libyara_wrapper` --- ctypes wrapper for libyara 2 | ========================================================== 3 | 4 | .. module:: yara.libyara_wrapper 5 | :synopsis: Wraps libyara's exported functions 6 | .. moduleauthor:: Michael Dorman 7 | .. sectionauthor:: Michael Dorman 8 | 9 | 10 | .. automodule:: yara.libyara_wrapper 11 | :members: 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /yara-ctypes/docs/source/yara/rules.rst: -------------------------------------------------------------------------------- 1 | :mod:`yara.rules` --- YARA namespaces, compilation, and matching 2 | ================================================================ 3 | .. module:: yara.rules 4 | :synopsis: Compile and test YARA against data 5 | .. moduleauthor:: Michael Dorman 6 | .. sectionauthor:: Michael Dorman 7 | 8 | 9 | .. automodule:: yara.rules 10 | 11 | .. autoclass:: yara.rules.Rules 12 | :members: 13 | 14 | -------------------------------------------------------------------------------- /yara-ctypes/docs/source/yara/scan.rst: -------------------------------------------------------------------------------- 1 | :mod:`yara.scan` --- A multithreaded Scanner for PIDs or filepaths 2 | ================================================================== 3 | 4 | .. module:: yara.scan 5 | :synopsis: Compile and test YARA against data 6 | .. moduleauthor:: Michael Dorman 7 | .. sectionauthor:: Michael Dorman 8 | 9 | 10 | .. autoclass:: yara.scan.Scanner 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /yara-ctypes/libs/ELF/32bit/libyara.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/libs/ELF/32bit/libyara.so -------------------------------------------------------------------------------- /yara-ctypes/libs/ELF/64bit/libyara.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/libs/ELF/64bit/libyara.so -------------------------------------------------------------------------------- /yara-ctypes/libs/WindowsPE/32bit/libyara.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/libs/WindowsPE/32bit/libyara.dll -------------------------------------------------------------------------------- /yara-ctypes/libs/WindowsPE/64bit/libyara.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/libs/WindowsPE/64bit/libyara.dll -------------------------------------------------------------------------------- /yara-ctypes/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from distribute_setup import use_setuptools 3 | use_setuptools() 4 | 5 | from setuptools import setup 6 | import re 7 | import platform 8 | import os 9 | import sys 10 | 11 | 12 | def load_version(filename='yara/version.py'): 13 | """Parse a __version__ number from a source file""" 14 | with open(filename) as source: 15 | text = source.read() 16 | match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", text) 17 | if not match: 18 | msg = "Unable to find version number in {}".format(filename) 19 | raise RuntimeError(msg) 20 | version = match.group(1) 21 | return version 22 | 23 | 24 | #build the yara package data (shipped yar files) 25 | yara_package_data = [] 26 | for path, _, files in os.walk(os.path.join('yara', 'rules')): 27 | rootpath = path[len('yara') + 1:] 28 | for f in files: 29 | if f.endswith('.yar'): 30 | yara_package_data.append(os.path.join(rootpath, f)) 31 | 32 | 33 | #see if we have a pre-built libyara for this platform 34 | arch, exetype = platform.architecture() 35 | libs = [] 36 | libspath = os.path.join('.', 'libs', exetype, arch) 37 | if os.path.exists(libspath): 38 | for lib in filter(lambda x: os.path.splitext(x)[-1] in ['.so', '.dll'], 39 | os.listdir(libspath)): 40 | libs.append(os.path.join(libspath, lib)) 41 | data_files = [] 42 | if libs: 43 | if exetype == 'ELF': 44 | libdir = os.path.join(sys.prefix, 'lib') 45 | else: 46 | libdir = os.path.join(sys.prefix, 'DLLs') 47 | data_files.append((libdir, libs)) 48 | else: 49 | print("WARNING: No libs found at %s" % libspath) 50 | print("You need to 'make install' libyara (yara-1.6) for this platform") 51 | 52 | setup( 53 | name="yara", 54 | version=load_version(), 55 | packages=['yara'], 56 | package_data=dict(yara=yara_package_data), 57 | data_files=data_files, 58 | zip_safe=False, 59 | author="Michael Dorman", 60 | author_email="mjdorma@gmail.com", 61 | url="http://code.google.com/p/yara-project/", 62 | description="Compile YARA rules to test against files or strings", 63 | long_description=open('README.rst').read(), 64 | license="Apache Software Licence", 65 | install_requires = [], 66 | platforms=['cygwin', 'win', 'linux'], 67 | classifiers=[ 68 | 'Development Status :: 2 - Pre-Alpha', 69 | 'Intended Audience :: Other Audience', 70 | 'License :: OSI Approved :: Apache Software License', 71 | 'Operating System :: Microsoft :: Windows', 72 | 'Operating System :: POSIX :: Linux', 73 | 'Programming Language :: Python', 74 | 'Programming Language :: Python :: 2.6', 75 | 'Programming Language :: Python :: 2.7', 76 | 'Programming Language :: Python :: 3', 77 | 'Programming Language :: Python :: 3.2', 78 | 'Programming Language :: Python :: 3.3', 79 | 'Programming Language :: Python :: Implementation :: CPython', 80 | 'Topic :: Security', 81 | 'Topic :: System :: Monitoring' 82 | ], 83 | test_suite="tests" 84 | ) 85 | -------------------------------------------------------------------------------- /yara-ctypes/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/tests/__init__.py -------------------------------------------------------------------------------- /yara-ctypes/tests/broken_rules.yar: -------------------------------------------------------------------------------- 1 | // 2 | // This rules are based on PEiD signatures (http://www.peid.info/BobSoft/Downloads/Use $noep33 = { EB 02 CD 20 EB 01 91 8D 35 80 ?? ?? 00 33 C2 68 83 93 7E 7D 0C A4 5B 23 C3 68 77 93 7E 7D } 3 | $noep34 = { 4B 45 52 4E 45 4C 33 32 2E 64 6C 6C 00 00 4C 6F 61 64 4C 69 62 72 61 72 79 41 00 00 47 65 } 4 | $noep35 = { 0F BE C1 EB 01 0E 8D 35 C3 BE B6 22 F7 D1 68 43 ?? ?? 22 EB 02 B5 15 5F C8 C1 C0 10 86 C4 29 F8 80 EB E8 01 F0 89 07 83 C7 } 5 | $noep3 = { 01 DB [0-1] 07 8B 1E 83 EE FC 11 DB [1-4] B8 01 00 00 00 01 DB } 6 | $noep4 = { 9C 60 E8 00 00 00 00 5D B8 B3 85 40 00 2D AC 85 40 00 2B E8 8D B5 D5 FE FF FF 8B 06?? ?? 8D BE ?? ?? ?? ?? 57 83 CD } 7 | 8 | condition: 9 | 10 | any of ($noep*) or for any of ($ep*) : ($ at entrypoint) 11 | } 12 | 13 | -------------------------------------------------------------------------------- /yara-ctypes/tests/test_libyara_wrapper.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import time 4 | import doctest 5 | 6 | import yara 7 | from yara.libyara_wrapper import * 8 | import sys 9 | sys.path.append(r'C:\Python26\DLLs') 10 | 11 | 12 | class TestLibYara(unittest.TestCase): 13 | 14 | def error_report_function(self, filename, line_number, error_message): 15 | #if not filename: 16 | # filename = "_" 17 | #print "%s:%s: %s"%(filename, line_number, error_message) 18 | self.err_callback_count += 1 19 | 20 | def test_readme_doctest(self): 21 | """Run doctests on README documentation""" 22 | doctest.testfile('../README.rst') 23 | 24 | def test_build_context_with_a_rule(self): 25 | """compile and destroy a good rule""" 26 | 27 | cdir = yara.YARA_RULES_ROOT 28 | good_rule = os.path.join(cdir, 'example', 'packer_rules.yar') 29 | error_report_function = YARAREPORT(self.error_report_function) 30 | 31 | #create and destroy a bunch of contexts 32 | for i in range(2): 33 | #create a new context and do the bizz 34 | sm = yr_malloc_count() 35 | sf = yr_free_count() 36 | context = yr_create_context() 37 | context.contents.error_report_function =\ 38 | error_report_function 39 | 40 | #add the good rule file and make sure it doesn't raise or callback 41 | yr_push_file_name(context, 'good_rule') 42 | ns = yr_create_namespace(context, 'test') 43 | context.contents.current_namespace = ns 44 | self.err_callback_count = 0 45 | 46 | yr_compile_file(good_rule, context) 47 | ns = yr_create_namespace(context, 'test2') 48 | context.contents.current_namespace = ns 49 | yr_compile_file(good_rule, context) 50 | self.assertEqual(self.err_callback_count, 0) 51 | 52 | #clean up 53 | yr_destroy_context(context) 54 | dsm = yr_malloc_count() 55 | dsf = yr_free_count() 56 | self.assertEqual(dsm, dsf) 57 | 58 | def test_demonstrate_memleak_when_error(self): 59 | """compile broken rule""" 60 | cdir = os.path.split(__file__)[0] 61 | bad_rule = os.path.join(cdir, 'broken_rules.yar') 62 | error_report_function = YARAREPORT(self.error_report_function) 63 | 64 | #create and destroy a bunch of contexts 65 | for i in range(2): 66 | sm = yr_malloc_count() 67 | sf = yr_free_count() 68 | #create a new context and do the bizz 69 | context = yr_create_context() 70 | context.contents.error_report_function =\ 71 | error_report_function 72 | 73 | #add the bad rule file and assert that it raises and calls back 74 | self.err_callback_count = 0 75 | yr_push_file_name(context, 'bad_rule') 76 | current = yr_get_current_file_name(context) 77 | self.assertTrue(current == 'bad_rule') 78 | self.assertRaises(YaraSyntaxError, yr_compile_file, bad_rule, 79 | context) 80 | self.assertEqual(self.err_callback_count, 1) 81 | #clean up 82 | yr_destroy_context(context) 83 | dsm = yr_malloc_count() 84 | dsf = yr_free_count() 85 | self.assertEqual(dsm, dsf) 86 | 87 | def test_demonstrate_memleak_good_and_bad_load(self): 88 | """compile a good rule followed by a broken rule""" 89 | 90 | cdir = yara.YARA_RULES_ROOT 91 | good_rule = os.path.join(cdir, 'example', 'packer_rules.yar') 92 | cdir = os.path.split(__file__)[0] 93 | bad_rule = os.path.join(cdir, 'broken_rules.yar') 94 | error_report_function = YARAREPORT(self.error_report_function) 95 | 96 | #create and destroy a bunch of contexts 97 | for i in range(2): 98 | sm = yr_malloc_count() 99 | sf = yr_free_count() 100 | #create a new context and do the bizz 101 | context = yr_create_context() 102 | context.contents.error_report_function =\ 103 | error_report_function 104 | 105 | #add the good rule file and make sure it doesn't raise or callback 106 | yr_push_file_name(context, 'good_rule') 107 | ns = yr_create_namespace(context, 'test') 108 | context.contents.current_namespace = ns 109 | self.err_callback_count = 0 110 | yr_compile_file(good_rule, context) 111 | self.assertEqual(self.err_callback_count, 0) 112 | 113 | #add the bad rule file and assert that it raises and calls back 114 | yr_push_file_name(context, 'bad_rule') 115 | ns = yr_create_namespace(context, 'badrule') 116 | context.contents.current_namespace = ns 117 | self.assertRaises(YaraSyntaxError, yr_compile_file, 118 | bad_rule, context) 119 | self.assertEqual(self.err_callback_count, 1) 120 | 121 | #clean up 122 | yr_destroy_context(context.contents) 123 | 124 | dsm = yr_malloc_count() 125 | dsf = yr_free_count() 126 | self.assertEqual(dsm, dsf) 127 | 128 | 129 | if __name__ == "__main__": 130 | unittest.main() 131 | -------------------------------------------------------------------------------- /yara-ctypes/yara.egg-info/PKG-INFO: -------------------------------------------------------------------------------- 1 | Metadata-Version: 1.1 2 | Name: yara 3 | Version: 1.6.1 4 | Summary: Compile YARA rules to test against files or strings 5 | Home-page: http://code.google.com/p/yara-project/ 6 | Author: Michael Dorman 7 | Author-email: mjdorma@gmail.com 8 | License: Apache Software Licence 9 | Description: Introduction to yara-ctypes-python 10 | ********************************** 11 | 12 | What is yara-ctypes: 13 | 14 | * A powerful python wrapper for `yara-project's libyara v1.6`_. 15 | * Supports thread safe matching of YARA rules. 16 | * namespace management to allow easy loading of multiple YARA rules into a 17 | single libyara context. 18 | * Comes with a scan module which exposes a user CLI and demonstrates a pattern 19 | for executing match jobs across a thread pool. 20 | 21 | 22 | Why: 23 | 24 | * ctypes releases the GIL on system function calls... Run your PC to its 25 | true potential. 26 | * No more building the PyC extension... 27 | * I found a few bugs and memory leaks and wanted to make my life simple. 28 | 29 | 30 | As a reference and guide to yara-ctypes see: `yara-ctypes documentation`_ 31 | 32 | 33 | For additional tips / tricks with this wrapper feel free to post a question at 34 | the github `yara-ctypes/issues`_ page. 35 | 36 | 37 | Project hosting provided by `github.com`_. 38 | 39 | 40 | [mjdorma+yara-ctypes@gmail.com] 41 | 42 | 43 | Install and run 44 | =============== 45 | 46 | Simply run the following:: 47 | 48 | > python setup.py install 49 | > python setup.py test 50 | > python -m yara.scan -h 51 | 52 | or `PyPi`_:: 53 | 54 | > pip install yara 55 | > python -m yara.scan -h 56 | 57 | 58 | .. note:: 59 | 60 | If the package does not contain a pre-compiled libyara library for your 61 | platform you will need to build and install it. See `notes on building`_. 62 | 63 | 64 | Compatability 65 | ============= 66 | 67 | *yara-ctypes* is implemented to be compatible with Python 2.6+ and Python 3.x. 68 | It has been tested against the following Python implementations: 69 | 70 | Ubuntu 12.04: 71 | 72 | + CPython 2.7 (32bit, 64bit) 73 | + CPython 3.2 (32bit, 64bit) 74 | 75 | Ubuntu 11.10 |build_status|: 76 | 77 | + CPython 2.6 (32bit) 78 | + CPython 2.7 (32bit) 79 | + CPython 3.2 (32bit) 80 | + PyPy 1.9.0 (32bit) 81 | 82 | Windows 7: 83 | 84 | + CPython 2.6 (32bit, 64bit) 85 | + CPython 3.2 (32bit, 64bit) 86 | 87 | 88 | Continuous integration testing is provided by `Travis CI `_. 89 | 90 | 91 | Issues 92 | ====== 93 | 94 | Source code for *yara-ctypes* is hosted on `GitHub `_. 95 | Please file `bug reports `_ 96 | with GitHub's issues system. 97 | 98 | 99 | Change log 100 | ========== 101 | 102 | version 1.6.0 (01/09/2012) 103 | 104 | * Initial release 105 | 106 | version 1.6.1 (06/09/2012) 107 | 108 | * Support for 64bit Windows 109 | * Bug fixes 110 | * Added documentation 111 | 112 | 113 | 114 | .. _github.com: https://github.com/mjdorma/yara-ctypes 115 | .. _PyPi: http://pypi.python.org/pypi/yara 116 | .. _yara-ctypes/issues: https://github.com/mjdorma/yara-ctypes/issues 117 | .. _notes on building: http://packages.python.org/yara/howto/build.html 118 | .. _yara-ctypes documentation: http://packages.python.org/yara/ 119 | .. _yara-project's libyara v1.6: http://code.google.com/p/yara-project 120 | .. |build_status| image:: https://secure.travis-ci.org/mjdorma/yara-ctypes.png?branch=master 121 | :target: http://travis-ci.org/#!/mjorma/yara-ctypes 122 | 123 | Platform: cygwin 124 | Platform: win 125 | Platform: linux 126 | Classifier: Development Status :: 2 - Pre-Alpha 127 | Classifier: Intended Audience :: Other Audience 128 | Classifier: License :: OSI Approved :: Apache Software License 129 | Classifier: Operating System :: Microsoft :: Windows 130 | Classifier: Operating System :: POSIX :: Linux 131 | Classifier: Programming Language :: Python 132 | Classifier: Programming Language :: Python :: 2.6 133 | Classifier: Programming Language :: Python :: 2.7 134 | Classifier: Programming Language :: Python :: 3 135 | Classifier: Programming Language :: Python :: 3.2 136 | Classifier: Programming Language :: Python :: 3.3 137 | Classifier: Programming Language :: Python :: Implementation :: CPython 138 | Classifier: Topic :: Security 139 | Classifier: Topic :: System :: Monitoring 140 | -------------------------------------------------------------------------------- /yara-ctypes/yara.egg-info/not-zip-safe: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /yara-ctypes/yara/__init__.py: -------------------------------------------------------------------------------- 1 | """Compile YARA rules to test against files or strings 2 | 3 | [mjdorma@gmail.com] 4 | """ 5 | 6 | from yara.version import __version__ 7 | from yara.rules import compile 8 | from yara.rules import load_rules 9 | from yara.rules import Rules 10 | from yara.rules import YARA_RULES_ROOT 11 | from yara.rules import CALLBACK_CONTINUE 12 | from yara.rules import CALLBACK_ABORT 13 | from yara.scan import Scanner 14 | -------------------------------------------------------------------------------- /yara-ctypes/yara/rules/browser/.exploits.yar.kate-swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fb1h2s/sandy/3bde2730842ea0c3b5ab16a45db58378c7fce080/yara-ctypes/yara/rules/browser/.exploits.yar.kate-swp -------------------------------------------------------------------------------- /yara-ctypes/yara/rules/browser/exploit_kits.yar: -------------------------------------------------------------------------------- 1 | 2 | rule sakura_bin : exploit_kit 3 | { 4 | strings: 5 | $a = /\/[0-9]{4}\.html/ 6 | $b = /Host:.*?:8/ 7 | condition: 8 | 2 of ($a,$b) 9 | } 10 | 11 | rule neutrino_V2_bin : exploit_kit 12 | { 13 | strings: 14 | $a = /\/[a-zA-Z0-9]{55,62}\={1,2}/ 15 | $b = "Java/1" 16 | condition: 17 | $a and $b 18 | } 19 | 20 | rule safepack_bin : exploit_kit 21 | { 22 | strings: 23 | $a = "load.php?e=" 24 | $b = "&ip=" 25 | condition: 26 | 2 of ($a,$b) 27 | } 28 | 29 | rule safepack_jar : exploit_kit 30 | { 31 | strings: 32 | $a = /\/j\d{2}\.php\?i\=[a-zA-Z0-9]{9,15}/ 33 | condition: 34 | $a 35 | } 36 | 37 | rule Gondad_jar : exploit_kit 38 | { 39 | strings: 40 | $a = "PK" 41 | $b = "GondadGondad" 42 | condition: 43 | $a and $b 44 | } 45 | 46 | 47 | rule sweetorange_entry : exploit_kit 48 | { 49 | strings: 50 | $a = "archive=" 51 | $b = "sdj1" 52 | condition: 53 | $a and $b 54 | } 55 | 56 | rule sweetorange_jar : exploit_kit 57 | { 58 | strings: 59 | $a = /\/(mcINkf|YZjcS|tUaZFs)/ 60 | condition: 61 | $a 62 | } 63 | 64 | rule srun_bin : exploit_kit 65 | { 66 | strings: 67 | $a = /get\?src\=/ 68 | $b = "Request: srun" 69 | condition: 70 | $a or $b 71 | } 72 | 73 | rule neutrino_bin : exploit_kit 74 | { 75 | strings: 76 | $a = /\/[a-z]{4,15}\?[a-z]{4,7}\=[a-f0-9]{24}/ 77 | condition: 78 | $a 79 | } 80 | 81 | rule bleedinglife : exploit_kit 82 | { 83 | strings: 84 | $a = /\.php\?e\=(Adobe-|Java-SPLOIT)/ 85 | condition: 86 | $a 87 | } 88 | 89 | rule blackhole : exploit_kit 90 | { 91 | strings: 92 | $a = /\.php\?.*?\:[a-zA-Z0-9\:]{6,}\&.*?\&/ 93 | condition: 94 | $a 95 | } 96 | 97 | 98 | rule crimeboss_entry : exploit_kit 99 | { 100 | strings: 101 | $a = /\.php\?x\=s\&\w+\=\d+\&no\=\d/ 102 | condition: 103 | $a 104 | } 105 | 106 | rule crimeboss_jar : exploit_kit 107 | { 108 | strings: 109 | $a = /\/[a-z0-9]{3,4}\.jar\?r\=\d{6}\s/ 110 | condition: 111 | $a 112 | } 113 | 114 | rule critxpack_jar : exploit_kit 115 | { 116 | strings: 117 | $a = /\/j[\d]{2}\.php\?i\=[A-Za-z0-9]{72,}\s/ 118 | condition: 119 | $a 120 | } 121 | 122 | rule critxpack_bin : exploit_kit 123 | { 124 | strings: 125 | $a = /load.php\?e\=[a-f0-9\%]{12,16}\&jquery\=[a-f0-9\%]{12,35}\&/ 126 | condition: 127 | $a 128 | } 129 | 130 | rule sofosFO_bin : exploit_kit 131 | { 132 | strings: 133 | $a = /\/[a-zA-Z0-9]{24,}\/[0-9]{9,10}\/[0-9]{7,10}\s/ 134 | condition: 135 | $a 136 | } 137 | 138 | rule sofosFO_redirect : exploit_kit 139 | { 140 | strings: 141 | $a = /\.php\?id\=\d+\&session\=[a-z0-9]{15,}\&ip\=\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/ 142 | condition: 143 | $a 144 | } 145 | 146 | rule whitehole_jar : exploit_kit 147 | { 148 | strings: 149 | $a = /\/Java(\d+.|.)jar\?java\=\d{2,3}/ 150 | condition: 151 | $a 152 | } 153 | 154 | rule kein_bin : exploit_kit 155 | { 156 | strings: 157 | $a = /\/\?[A-Za-z0-9]{2,10}\=[a-z0-9%]{70,}\&t\=\d+/ 158 | condition: 159 | $a 160 | } 161 | 162 | rule reddot_bin : exploit_kit 163 | { 164 | strings: 165 | $a = /\?guid\=[a-zA-Z0-9]{32}\&thread\=\w+\&exploit\=\d+\&\;version\=[\d._]/ 166 | condition: 167 | $a 168 | } 169 | 170 | rule fiesta_bin : exploit_kit 171 | { 172 | strings: 173 | $a = /\/\?[A-Za-z0-9]{55,70}\;\d+\;\d+/ 174 | condition: 175 | $a 176 | } 177 | 178 | rule fiesta_enum : exploit_kit 179 | { 180 | strings: 181 | $a = /\/\?[A-Za-z0-9]{50,66}\;\d+\;\d+\;\d+/ 182 | condition: 183 | $a 184 | } 185 | 186 | rule redkit_bin : exploit_kit 187 | { 188 | strings: 189 | $a = /\/\d{2}.html\s/ 190 | condition: 191 | $a 192 | } 193 | 194 | rule impact_bin : exploit_kit 195 | { 196 | strings: 197 | $a = /\/\w+.php\?\;\d/ 198 | condition: 199 | $a 200 | } 201 | 202 | rule blackhole_bin : exploit_kit 203 | { 204 | strings: 205 | $a = /.php\?[a-zA-Z]{2,6}\=[A-Za-z0-9]{10,}\&[A-Za-z]{2,}\=[A-Za-z0-9]{10,}\&[A-Za-z]{1,}\=[A-Fa-f0-9]{2,}\&[A-Za-z]{2,}\=[A-Za-z0-9]+\&[A-Za-z0-9]{1,}\=[A-Za-z]{1,}/ 206 | condition: 207 | $a 208 | } 209 | 210 | rule traffic_broker : tds 211 | { 212 | strings: 213 | $a = /rd.php\?http/ 214 | condition: 215 | $a 216 | } 217 | 218 | rule sutra : tds 219 | { 220 | strings: 221 | $a = /\/in.cgi\?\d/ 222 | condition: 223 | $a 224 | } 225 | 226 | 227 | rule redkit_exploit_fb : exploit_kit 228 | { 229 | meta: 230 | author = "X" 231 | version = "X" 232 | description = "rule redkit_exploit_fb" 233 | 234 | strings: 235 | $js3=/\.html\?i=[0-9]{7}/ 236 | $js4=/iframe/ 237 | 238 | condition: 239 | $js3 and $js4 240 | } 241 | 242 | rule Kelihos_Bot_to_exploit_kit : exploit_kit 243 | { 244 | meta: 245 | author = "X" 246 | version = "X" 247 | description = "rule Kelihos_Bot_to_exploit_kit " 248 | 249 | strings: 250 | $js3=/[a-z\.]{11}\/count[0-9]{1,2}.php/ nocase 251 | 252 | 253 | condition: 254 | $js3 255 | } -------------------------------------------------------------------------------- /yara-ctypes/yara/rules/browser/exploits.yar: -------------------------------------------------------------------------------- 1 | 2 | 3 | rule Malicious_Redirect_Code 4 | { 5 | meta: 6 | author = "X" 7 | version = "X" 8 | description = "Blackhole" 9 | 10 | strings: 11 | $js = /km0ae9gr6m/ 12 | $js2=/qhk6sa6g1c/ 13 | $js3=/\.php\?page=[a-zA-Z0-9]{16}/ 14 | $js4=/iframe/ 15 | $js5=/%69%66%72%61%6d%65/ 16 | $js6=/%68%69%64%64%65%6e/ 17 | $js7=/unescape/ 18 | $js8=// 19 | $long=/([0-9]{1,4},){256}/ 20 | $long2=/([0-9]{1,2}\.[0-9]{1,2}\$){8}/ 21 | $long3=/([0-9]{1,4}\.\.[0-9]{1,4}){8}/ 22 | $long4=/([0-9a-zA-Z]{1,2}(\$|@|#|!|,){1,2}){256}/ 23 | $long5=/("[0-9a-zA-Z]{1,2}",){16}/ 24 | $long6=/([0-9a-zA-Z]{2,3}&&){256}/ 25 | $long7=/(0x[0-9a-fA-F]{1,2},){256}/ 26 | $maliciousfor=/for\((.)=(.){1,10};(.){5}!=(.);(.)\+\+\)/ 27 | $maliciousif=/if\('[a-zA-Z]{3,8}'=='[a-zA-Z]{3,8}'\)/ 28 | condition: 29 | ($js and $js2) or ($long or $long2 or $long3 or $long4 or $long6) or ($js3 and $js4) or ($js5 and $js6 and $js7) or ($long5 and $maliciousfor) or $maliciousif or $js8 or $long7 30 | } 31 | 32 | 33 | rule Malicious_javascript_method 34 | { 35 | meta: 36 | author = "X" 37 | version = "X" 38 | description = "Malicious_javascript_method" 39 | 40 | strings: 41 | $js = "eval" 42 | $js1= "unescape" 43 | condition: 44 | $js or $js1 45 | } 46 | 47 | 48 | rule Malicious_java_deployment 49 | { 50 | meta: 51 | author = "X" 52 | version = "X" 53 | description = "Malicious_java_deployment" 54 | 55 | strings: 56 | $js = "deployJava.getJREs" 57 | $js1= "document.createElement('applet')" 58 | condition: 59 | $js or $js1 60 | } 61 | 62 | rule Malicious_iframe_injection 63 | { 64 | meta: 65 | author = "X" 66 | version = "X" 67 | description = "Malicious_iframe_injection" 68 | 69 | strings: 70 | $js = "document.createElement('iframe')" 71 | $js1= "