├── .gitattributes ├── .gitignore ├── DumpsterDive.py ├── README.md ├── RigDecoder.py ├── adWindDecoder.py ├── bozokConfig.py ├── fileHasher.py ├── imageMounter.py ├── jRatConfig.py ├── jRatNetwork.py ├── quarantine.py ├── usbkeycap.py ├── vt_feed.py └── wide.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | *.sln merge=union 7 | *.csproj merge=union 8 | *.vbproj merge=union 9 | *.fsproj merge=union 10 | *.dbproj merge=union 11 | 12 | # Standard to msysgit 13 | *.doc diff=astextplain 14 | *.DOC diff=astextplain 15 | *.docx diff=astextplain 16 | *.DOCX diff=astextplain 17 | *.dot diff=astextplain 18 | *.DOT diff=astextplain 19 | *.pdf diff=astextplain 20 | *.PDF diff=astextplain 21 | *.rtf diff=astextplain 22 | *.RTF diff=astextplain 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ################# 2 | ## Eclipse 3 | ################# 4 | 5 | *.pydevproject 6 | .project 7 | .metadata 8 | bin/ 9 | tmp/ 10 | *.tmp 11 | *.bak 12 | *.swp 13 | *~.nib 14 | local.properties 15 | .classpath 16 | .settings/ 17 | .loadpath 18 | 19 | # External tool builders 20 | .externalToolBuilders/ 21 | 22 | # Locally stored "Eclipse launch configurations" 23 | *.launch 24 | 25 | # CDT-specific 26 | .cproject 27 | 28 | # PDT-specific 29 | .buildpath 30 | 31 | 32 | ################# 33 | ## Visual Studio 34 | ################# 35 | 36 | ## Ignore Visual Studio temporary files, build results, and 37 | ## files generated by popular Visual Studio add-ons. 38 | 39 | # User-specific files 40 | *.suo 41 | *.user 42 | *.sln.docstates 43 | 44 | # Build results 45 | [Dd]ebug/ 46 | [Rr]elease/ 47 | *_i.c 48 | *_p.c 49 | *.ilk 50 | *.meta 51 | *.obj 52 | *.pch 53 | *.pdb 54 | *.pgc 55 | *.pgd 56 | *.rsp 57 | *.sbr 58 | *.tlb 59 | *.tli 60 | *.tlh 61 | *.tmp 62 | *.vspscc 63 | .builds 64 | *.dotCover 65 | 66 | ## TODO: If you have NuGet Package Restore enabled, uncomment this 67 | #packages/ 68 | 69 | # Visual C++ cache files 70 | ipch/ 71 | *.aps 72 | *.ncb 73 | *.opensdf 74 | *.sdf 75 | 76 | # Visual Studio profiler 77 | *.psess 78 | *.vsp 79 | 80 | # ReSharper is a .NET coding add-in 81 | _ReSharper* 82 | 83 | # Installshield output folder 84 | [Ee]xpress 85 | 86 | # DocProject is a documentation generator add-in 87 | DocProject/buildhelp/ 88 | DocProject/Help/*.HxT 89 | DocProject/Help/*.HxC 90 | DocProject/Help/*.hhc 91 | DocProject/Help/*.hhk 92 | DocProject/Help/*.hhp 93 | DocProject/Help/Html2 94 | DocProject/Help/html 95 | 96 | # Click-Once directory 97 | publish 98 | 99 | # Others 100 | [Bb]in 101 | [Oo]bj 102 | sql 103 | TestResults 104 | *.Cache 105 | ClientBin 106 | stylecop.* 107 | ~$* 108 | *.dbmdl 109 | Generated_Code #added for RIA/Silverlight projects 110 | 111 | # Backup & report files from converting an old project file to a newer 112 | # Visual Studio version. Backup files are not needed, because we have git ;-) 113 | _UpgradeReport_Files/ 114 | Backup*/ 115 | UpgradeLog*.XML 116 | 117 | 118 | 119 | ############ 120 | ## Windows 121 | ############ 122 | 123 | # Windows image file caches 124 | Thumbs.db 125 | 126 | # Folder config file 127 | Desktop.ini 128 | 129 | 130 | ############# 131 | ## Python 132 | ############# 133 | 134 | *.py[co] 135 | 136 | # Packages 137 | *.egg 138 | *.egg-info 139 | dist 140 | build 141 | eggs 142 | parts 143 | bin 144 | var 145 | sdist 146 | develop-eggs 147 | .installed.cfg 148 | 149 | # Installer logs 150 | pip-log.txt 151 | 152 | # Unit test / coverage reports 153 | .coverage 154 | .tox 155 | 156 | #Translations 157 | *.mo 158 | 159 | #Mr Developer 160 | .mr.developer.cfg 161 | 162 | # Mac crap 163 | .DS_Store 164 | -------------------------------------------------------------------------------- /DumpsterDive.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (C) 2013 Kevin Breen. 4 | Python script to scan SMB / FTP results parsed from shodan 5 | ''' 6 | __description__ = 'Shodan SMB / FTP Parser' 7 | __author__ = 'Kevin Breen' 8 | __version__ = '0.3' 9 | __date__ = '2013/09/20' 10 | 11 | # Configuration 12 | API_KEY = "ENTERYOURAPIKEYHERE" # REMOVE ME BEFORE POSTING TO GIT 13 | client_machine_name = 'Random Name' 14 | remote_machine_name = 'server' 15 | 16 | import os 17 | import sys 18 | from optparse import OptionParser 19 | from ftplib import FTP 20 | try: 21 | import shodan 22 | except: 23 | print "Shodan Not Found please install using 'pip install shodan'" 24 | try: 25 | from smb.SMBConnection import SMBConnection 26 | except: 27 | print "pySMB Not Found please install using 'pip install pysmb'" 28 | 29 | 30 | def main(): 31 | parser = OptionParser(usage='usage: %prog [options] "search term" outfile\n' + __description__, version='%prog ' + __version__) 32 | parser.add_option("-t", dest="searchType", help="Search Type. e.g. SMB") 33 | (options, args) = parser.parse_args() 34 | searchTerm = args[0] 35 | searchType = options.searchType 36 | outfile = args[1] 37 | if len(args) != 2: 38 | parser.print_help() 39 | sys.exit() 40 | 41 | if args[0] == "": 42 | parser.print_help() 43 | sys.exit() 44 | if options.searchType == "SMB": 45 | print "Running Query: ", searchTerm 46 | ipList = shodanSearch().collectIP(searchTerm) 47 | smbParse().createList(ipList, outfile) 48 | 49 | elif options.searchType == "FTP": 50 | print "Running Query: ", searchTerm 51 | ipList = shodanSearch().collectIP(searchTerm) 52 | ftpParse().ftpList(ipList, outfile) 53 | else: 54 | print "not a valid search type: ", options.searchType 55 | parser.print_help() 56 | sys.exit() 57 | 58 | class shodanSearch(): 59 | def collectIP(self, searchString): 60 | ip_address = [] 61 | try: 62 | # Setup the api 63 | api = shodan.WebAPI(API_KEY) 64 | # Perform the search 65 | query = searchString 66 | result = api.search(query) 67 | print 'Results found: %s' % result['total'] 68 | # Loop through the matches and print each IP 69 | for host in result['matches']: 70 | ip_address.append(host['ip']) 71 | except Exception, e: 72 | print 'Error: %s' % e 73 | sys.exit(1) 74 | return ip_address 75 | 76 | class ftpParse(): 77 | def ftpList(self, ipList, outfile): 78 | temp = sys.stdout 79 | with open(outfile, 'a') as out: #Open our save file 80 | for ip in ipList: 81 | sys.stdout = temp 82 | print "Connecting to FTP: %s" % ip 83 | out.write("Connecting to FTP: %s \n" % ip) 84 | try: 85 | ftp = FTP(ip) # Connect to FTP 86 | ftp.login() # Try Anonymous Login 87 | lines = ftp.retrlines('LIST') # Grab the Dir Listing 88 | print "Collecting File Names" 89 | sys.stdout = out # redirect stdout to save file so we can store the Dir List 90 | out.write(lines) 91 | out.write('\n') 92 | sys.stdout = temp #restore print commands to interactive prompt 93 | except: 94 | print "Failed to connect to FTP: %s" % ip 95 | out.write("Failed to connect to IP: %s \n" % ip) 96 | 97 | class smbParse(): 98 | def createList(self, ipList, outfile): 99 | with open(outfile, 'a') as out: 100 | for ip in ipList: 101 | out.write("\n----------------------------------------------------\n") 102 | print "Attempting to access: ", ip 103 | out.write("Attempting to access: %s \n" % ip) 104 | try: 105 | conn = SMBConnection('guest', '', client_machine_name, remote_machine_name, use_ntlm_v2 = True) 106 | conn.connect(ip, 139) 107 | print "Connected to: ", ip 108 | out.write("Connected To: %s \n" % ip) 109 | except: 110 | print "Failed to Connect" 111 | out.write("Failed to Connect To: %s \n" % ip) 112 | pass 113 | try: 114 | shareList = conn.listShares() 115 | except: 116 | out.write("Failed to open Shares\n") 117 | shareList = None 118 | if shareList != None: 119 | for x in shareList: 120 | try: 121 | out.write("found Share: %s \n" % x.name) 122 | print "Listing files in share: ", x.name 123 | out.write("Listing files in share: %s \n" % x.name) 124 | filelist = conn.listPath(x.name, '/') 125 | for y in filelist: 126 | if y.isDirectory: 127 | print "DIR", y.filename 128 | out.write("-----") 129 | out.write(y.filename) 130 | out.write('\n') 131 | except: 132 | print "failed to open share: ", x.name 133 | out.write("Failed to open Share: %s \n" % x.name) 134 | 135 | print "report written to outfile.txt" 136 | 137 | 138 | if __name__ == "__main__": 139 | main() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Scripts 2 | ======= 3 | 4 | Just a collection of scripts 5 | -------------------------------------------------------------------------------- /RigDecoder.py: -------------------------------------------------------------------------------- 1 | import re 2 | import sys 3 | import operator 4 | from optparse import OptionParser 5 | 6 | def clean_comments(line): 7 | return re.sub('/\*(.*?)\*/', '', line) 8 | 9 | def find_split(all_text): 10 | split_chars = re.search('function y\(\) \{return "(.*?)";', all_text) 11 | return split_chars.group(1) 12 | 13 | def find_content(all_text): 14 | counted = {} 15 | # This should be the most common occourance 16 | # get the first 3 chars of every line and count them in to a dict 17 | for line in all_text.split(';'): 18 | try: 19 | counted[line[:3]] += 1 20 | except: 21 | counted[line[:3]] = 1 22 | var_name = max(counted.iteritems(), key=operator.itemgetter(1))[0] 23 | 24 | return var_name 25 | 26 | if __name__ == "__main__": 27 | parser = OptionParser(usage= 'usage: %prog inputfile output_file\nRig Exploit Kit Decoder\nBy Kevin Breen @KevTheHermit') 28 | parser.add_option('-e', '--exploits', action='store_true', default=False, help="Identify Exploits") 29 | parser.add_option('-t', '--tidy', action='store_true', default=False, help="Tidy the output code") 30 | (options, args) = parser.parse_args() 31 | 32 | # Start here 33 | if len(args) < 2: 34 | parser.print_help() 35 | sys.exit() 36 | 37 | input_file = args[0] 38 | output_file = args[1] 39 | 40 | try: 41 | print "[+] Reading input file" 42 | landing_page = open(input_file, 'r').read() 43 | except: 44 | print "[!] Unable to open file" 45 | sys.exit() 46 | 47 | try: 48 | # Find the split char it always seems to be in y() 49 | split_char = find_split(landing_page) 50 | 51 | # Find the var that holds the content 52 | var_name = find_content(landing_page) 53 | 54 | # Remove all Comments 55 | landing_page = clean_comments(landing_page) 56 | 57 | # Extract all the contents 58 | coded_line = "" 59 | for line in landing_page.split(';'): 60 | if line.startswith(var_name): 61 | coded_line += line.split('"')[1] 62 | 63 | # Decode Char Codes 64 | out_string = "" 65 | char_list = coded_line.split(split_char) 66 | for char in char_list: 67 | try: 68 | out_string += chr(int(char)) 69 | except: 70 | print " [-] Failed to read {0}".format(char) 71 | 72 | # remove comments again 73 | out_string = clean_comments(out_string) 74 | 75 | # Write out to file 76 | if options.tidy: 77 | print "[+] Tidying Output" 78 | out_string = out_string.replace(';',';\n') 79 | out_string = out_string.replace('}', '}\n') 80 | with open(output_file, 'w') as out: 81 | out.write(out_string) 82 | 83 | print "[+] Output Written to: {0}".format(output_file) 84 | 85 | if options.exploits: 86 | print "Searching for Possible Exploits" 87 | if 'ShockwaveFlash' in out_string: 88 | print " [-] Found Possible Flash Exploit" 89 | if 'silverlight' in out_string: 90 | print " [-] Found Possible SilverLight Exploit" 91 | if 'gum.dashstyle' in out_string: 92 | print " [-] Found Possible Internet Explorer Exploit" 93 | if '' in out_string: 94 | print " [-] Found Possible Java Exploit" 95 | 96 | except: 97 | print "[!] Unable to find Rig EK" 98 | -------------------------------------------------------------------------------- /adWindDecoder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Adwind Class Decoder 4 | ''' 5 | 6 | __description__ = 'Adwind Class Decoder' 7 | __author__ = 'Kevin Breen http://techanarchy.net' 8 | __version__ = '0.1' 9 | __date__ = '2014/01' 10 | 11 | import sys 12 | import string 13 | import os 14 | from optparse import OptionParser 15 | import zlib 16 | 17 | try: 18 | from Crypto.Cipher import ARC4 19 | from Crypto.Cipher import DES 20 | except ImportError: 21 | print "Cannot import PyCrypto, Is it installed?" 22 | 23 | 24 | def main(): 25 | parser = OptionParser(usage='usage: %prog [options] pass inFile outFile\n' + __description__, version='%prog ' + __version__) 26 | parser.add_option("-d", "--DES", action='store_true', default=False, help="ENC Mode = DES") 27 | parser.add_option("-r", "--RC4", action='store_true', default=False, help="ENC Mode = RC4") 28 | 29 | (options, args) = parser.parse_args() 30 | if len(args) != 3: 31 | parser.print_help() 32 | sys.exit() 33 | password = args[0] 34 | infile = args[1] 35 | outfile = args[2] 36 | 37 | 38 | 39 | with open(outfile, 'w') as out: 40 | data = open(infile, 'rb').read() 41 | 42 | if options.DES == True: 43 | result = DecryptDES(password[:8], data) 44 | elif options.RC4 == True: 45 | result = DecryptRC4(password, data) 46 | else: 47 | print "No Cypher selected" 48 | sys.exit() 49 | if infile.endswith(".adwind"): 50 | result = decompress(result) 51 | out.write(result) 52 | else: 53 | result = filter(lambda x: x in string.printable, result) 54 | out.write(result) 55 | 56 | #### DES Cipher #### 57 | 58 | def DecryptDES(enckey, data): 59 | cipher = DES.new(enckey, DES.MODE_ECB) # set the ciper 60 | return cipher.decrypt(data) # decrpyt the data 61 | 62 | ####RC4 Cipher #### 63 | def DecryptRC4(enckey, data): 64 | cipher = ARC4.new(enckey) # set the ciper 65 | return cipher.decrypt(data) # decrpyt the data 66 | 67 | 68 | 69 | #### ZLIB #### 70 | 71 | def decompress(data): 72 | ba = bytearray(data) 73 | this = zlib.decompress(bytes(data), 15+32) 74 | return this 75 | if __name__ == "__main__": 76 | main() -------------------------------------------------------------------------------- /bozokConfig.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Bozok Config Extractor 4 | ''' 5 | 6 | __description__ = 'Bozok Config Extractor' 7 | __author__ = 'Kevin Breen http://techanarchy.net' 8 | __version__ = '0.2' 9 | __date__ = '2013/11' 10 | 11 | import re 12 | import os 13 | import sys 14 | from optparse import OptionParser 15 | 16 | 17 | def main(): 18 | parser = OptionParser(usage='usage: %prog [options] inFile outConfig\n' + __description__, version='%prog ' + __version__) 19 | parser.add_option("-r", "--recursive", action='store_true', default=False, help="Recursive Mode") 20 | (options, args) = parser.parse_args() 21 | if len(args) != 2: 22 | parser.print_help() 23 | sys.exit() 24 | 25 | if options.recursive == True: 26 | with open(args[1], 'a+') as out: 27 | out.write("Filename,ServerID,Mutex,InstallName,Startup Name,Extension,Password,Install Flag,Startup Flag,Visible Flag,Unknown Flag,Unknown Flag,Port,Domain,Unknown Flag\n") 28 | for server in os.listdir(args[0]): 29 | config = configExtract(os.path.join(args[0], server)) 30 | if config != None: 31 | out.write((server+',')) 32 | for column in config: 33 | out.write((column+',')) 34 | out.write('\n') 35 | else: 36 | with open(args[1], 'a+') as out: 37 | out.write("Filename,ServerID,Mutex,InstallName,Startup Name,Extension,Password,Install Flag,Startup Flag,Visible Flag,Unknown Flag,Unknown Flag,Port,Domain,Unknown Flag\n") 38 | config = configExtract(args[0]) 39 | if config != None: 40 | out.write((args[0]+',')) 41 | for column in config: 42 | out.write((column+',')) 43 | out.write('\n') 44 | 45 | def configExtract(server): 46 | openfile = open(server, 'rb').read() # Open and read the server 47 | try: 48 | match = re.findall('O\x00\x00\x00(.+)\|\x00\x00\x00', openfile) # find the config section 49 | clean_config = match[0].replace('\x00', '') # replace all wide null chars 50 | config = clean_config.split('|') # split on our | 51 | return config 52 | except: 53 | print "Couldn't Locate the Config, Is it Packed?" 54 | 55 | 56 | 57 | if __name__ == "__main__": 58 | main() -------------------------------------------------------------------------------- /fileHasher.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (C) 2012-2013 Kevin Breen. 4 | YaraMail 5 | Python script to YaraScan Email Attatchments 6 | ''' 7 | __description__ = 'Yara Mail Scanner, use it to Scan Email Attatchments' 8 | __author__ = 'Kevin Breen' 9 | __version__ = '0.3' 10 | __date__ = '2013/04/22' 11 | 12 | 13 | import os 14 | import sys 15 | import hashlib 16 | from datetime import datetime 17 | from optparse import OptionParser, OptionGroup 18 | 19 | 20 | def main(): 21 | parser = OptionParser(usage='usage: %prog [options] root output\n' + __description__, version='%prog ' + __version__) 22 | parser.add_option("-m", "--md5", action='store_true', default=False, help="MD5 Each File") 23 | parser.add_option("-s", "--ssdeep", action='store_true', default=False, help="ssdeep Each File") 24 | (options, args) = parser.parse_args() 25 | if len(args) != 2: 26 | parser.print_help() 27 | sys.exit() 28 | rootDir = args[0] 29 | outFile = args[1] 30 | counter = 0 31 | startTime = datetime.now() 32 | for path, subdirs, files in os.walk(rootDir): 33 | for names in files: 34 | counter += 1 35 | pathName = os.path.join(path, names) 36 | md5, deep = hashing().fileHash(pathName) 37 | print pathName, md5, deep 38 | reportMain(outFile, pathName, md5, deep) 39 | endTime = datetime.now() - startTime 40 | print endTime 41 | print counter 42 | 43 | 44 | class hashing: 45 | def fileHash(self, filePath): 46 | 47 | try: 48 | with open(filePath, 'rb') as fh: 49 | data = fh.read() 50 | m = hashlib.md5() 51 | m.update(data) 52 | md5 = m.hexdigest() 53 | try: 54 | import ssdeep 55 | deep = ssdeep.hash(data) 56 | except: 57 | deep = "Null" 58 | except: 59 | md5 = "Null" 60 | deep = "Null" 61 | return md5, deep 62 | 63 | 64 | class reportMain: 65 | def __init__(self, outFile, pathName, md5, deep): 66 | with open(outFile, "a") as f: 67 | f.write("%s, %s, %s\n" % (pathName, md5, deep)) 68 | 69 | if __name__ == "__main__": 70 | main() -------------------------------------------------------------------------------- /imageMounter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (C) 2013 Kevin Breen. 4 | Python script to MNT Partitions on a Disk Image 5 | http://techanarchy.net 6 | ''' 7 | __description__ = 'Python script to MNT Partitions on a Disk Image' 8 | __author__ = 'Kevin Breen @KevTheHermit' 9 | __version__ = '0.5' 10 | __date__ = '2014/09/13' 11 | 12 | 13 | import os 14 | import sys 15 | import hashlib 16 | import commands 17 | import subprocess 18 | from datetime import datetime 19 | from optparse import OptionParser, OptionGroup 20 | 21 | supported_types = ['0x83', '0x07', '0x0b', '0x0c','0x06', '0x17', '0x16', '0x1b', '0x1c', 'Basic data partition'] 22 | vfat = ['0x0b', '0x0c', '0x06', '0x1b', '0x1c', '0x16'] 23 | ntfs = ['0x07', '0x17', 'Basic data partition'] 24 | 25 | 26 | def parse_mmls(img_path): 27 | # use mmls to get a list of partitions. 28 | try: 29 | mmls_output = commands.getoutput("mmls {0}".format(img_path)) 30 | except Exception as e: 31 | print "[+] MMLS Failed with Exception {0}".format(e) 32 | return False, False 33 | #Build a Dictionary containing all the Partition Information 34 | partition_info = {} 35 | part_count = 0 36 | for line in mmls_output.split('\n'): 37 | if line == 'Cannot determine partition type': 38 | print "[+] Image file doesnt Contain a Partition Table" 39 | print "[+] If this is a single partition try the '-s' option" 40 | sys.exit() 41 | # We need sector size. 42 | if line.startswith('Units'): 43 | sector_size = int(line.split()[3].split('-')[0]) 44 | # we need to get all the partitions. But only supported ones 45 | if any(fs_type in line for fs_type in supported_types): 46 | # Dict for single part 47 | inf = {} 48 | line_info = line.split(' ') 49 | if len(line_info) == 5: 50 | inf['Start'] = int(line_info[1]) 51 | inf['End'] = int(line_info[2]) 52 | inf['Length'] = int(line_info[3]) 53 | inf['Type'] = line_info[4].split('(')[1][:-1] 54 | elif len(line_info) == 6: 55 | inf['Start'] = int(line_info[2]) 56 | inf['End'] = int(line_info[3]) 57 | inf['Length'] = int(line_info[4]) 58 | inf['Type'] = line_info[5] 59 | # Calculated offset 60 | inf['Offset'] = inf['Start'] * sector_size 61 | # add partition to list of all parts 62 | partition_info[part_count] = inf 63 | part_count += 1 64 | return partition_info, part_count 65 | 66 | def mount_single(img_file, mnt_path): 67 | if not os.path.exists(mnt_path): 68 | os.makedirs(mnt_path) 69 | print "[+] Attempting to Mount {0} at {1}".format(img_file, mnt_path) 70 | try: 71 | retcode = subprocess.call("mount -o ro,loop {0} {1}".format(img_file, mnt_path), shell=True) 72 | #Crappy error Handling here 73 | if retcode != 0: 74 | sys.exit() 75 | print " [-] Mounted {0} at {1}".format(img_file, mnt_path) 76 | print " [-] To unmount run 'sudo umount %s'" % mnt_path 77 | except: 78 | print "[+] Failed to Mount %s" % mnt_path 79 | 80 | def mount_multi(img_file, mnt_path, part_count, part_data): 81 | for i in range(part_count): 82 | new_path = os.path.join(mnt_path, str(i)) 83 | print "[+] Creating Temp Mount Point at {0}".format(new_path) 84 | if not os.path.exists(new_path): 85 | os.makedirs(new_path) 86 | print "[+] Attempting to Mount Partition {0} at {1}".format(i, new_path) 87 | try: 88 | offset = part_data[i]["Offset"] 89 | fs_type = part_data[i]["Type"] 90 | if fs_type in vfat: 91 | fs_type = 'vfat' 92 | if fs_type in ntfs: 93 | fs_type = 'ntfs' 94 | retcode = subprocess.call('mount -t {0} -o ro,loop,offset={1} {2} {3}'.format(fs_type, offset, img_file, new_path), shell=True) 95 | #Crappy error Handling here 96 | if retcode != 0: 97 | sys.exit() 98 | print " [-] Mounted {0} at {1}".format(img_file, new_path) 99 | print " [-] To unmount run 'sudo umount {0}'".format(new_path) 100 | except: 101 | print "[+] Failed to Mount {0}".format(new_path) 102 | 103 | def ewf_mount(img_file): 104 | print "[+] Processing E01 File" 105 | #check we have an EO file 106 | if img_file.endswith('.E01'): 107 | # create a tmp mnt point 108 | ts = datetime.now().strftime('%Y_%m_%d-%H_%S') 109 | ewf_path = '/mnt/ewf_{0}'.format(ts) 110 | if not os.path.exists(ewf_path): 111 | try: 112 | os.makedirs(ewf_path) 113 | except Exception as e: 114 | print "Unable to create Temp Dir: {0}".format(e) 115 | sys.exit() 116 | # Run ewfmount 117 | try: 118 | retcode = subprocess.call('ewfmount {0} {1}'.format(img_file, ewf_path), shell=True) 119 | if retcode != 0: 120 | sys.exit() 121 | print "[+] Mounted E0 File at {0}".format(ewf_path+'/ewf1') 122 | print " [-] to unmount run 'sudo umount {0}'".format(ewf_path) 123 | return ewf_path+'/ewf1' 124 | except Exception as e: 125 | print "Failed to mount E0 File: {0}".format(e) 126 | sys.exit() 127 | 128 | def bitlock_mount(img_file, mnt_path, recover_key, part_data): 129 | # for now we only support recovery key 130 | offset = part_data[0]['Offset'] 131 | print "[+] Processing Bitlocked Drive" 132 | #check we have an EO file 133 | ts = datetime.now().strftime('%Y_%m_%d-%H_%S') 134 | bitlock_path = '/mnt/bde_{0}'.format(ts) 135 | if not os.path.exists(bitlock_path): 136 | try: 137 | os.makedirs(bitlock_path) 138 | except Exception as e: 139 | print "Unable to create Temp Dir: {0}".format(e) 140 | sys.exit() 141 | # Run bdemount 142 | try: 143 | retcode = subprocess.call('bdemount -r {0} -o {1} {2} {3}'.format(recover_key, offset, img_file, bitlock_path), shell=True) 144 | if retcode != 0: 145 | sys.exit() 146 | print "[+] Mounted Bitlocker File at {0}".format(bitlock_path+'/bde1') 147 | print " [-] to unmount run 'sudo umount {0}'".format(bitlock_path) 148 | img_path = bitlock_path+'/bde1' 149 | except Exception as e: 150 | print "Failed to mount File: {0}".format(e) 151 | sys.exit() 152 | mount_single(img_path, mnt_path) 153 | 154 | 155 | def main(): 156 | parser = OptionParser(usage='usage: %prog [options] image_name mnt_point\n' + __description__, version='%prog ' + __version__) 157 | parser.add_option("-s", "--single", action='store_true', default=False, help="Single partition in image") 158 | parser.add_option("-i", "--info", action='store_true', default=False, help="Just Display the information") 159 | parser.add_option("-e", "--e01", action='store_true', default=False, help="Use ewfmount to mount E0 Evidence Files") 160 | parser.add_option("-b", "--bitlocker", action='store_true', default=False, help="Mount Bitlocker Drive with Recovery Key") 161 | parser.add_option("-k", "--key", dest='recover_key', default=False, help="Bitlocker Recovery Key") 162 | (options, args) = parser.parse_args() 163 | 164 | if len(args) == 0: 165 | print "[+] You need to give me some Paths" 166 | parser.print_help() 167 | sys.exit() 168 | 169 | # Sort args 170 | img_file = args[0] 171 | mnt_path = args[1] 172 | 173 | # Dont trust the user force E01 Check 174 | if img_file.endswith('.E01'): 175 | options.e01 = True 176 | 177 | # We need to proces E0 Files before anythign else 178 | if options.e01: 179 | # mount the ewf and use this as our new img_file 180 | img_file = ewf_mount(img_file) 181 | 182 | # Single partition. We have no mmls data so jsut try mounting 183 | if options.single: 184 | mount_single(img_file, mnt_path) 185 | 186 | # Mount a Disk with multi parts 187 | if not options.single and not options.bitlocker: 188 | # Get partition Information 189 | part_data, part_count = parse_mmls(img_file) 190 | if part_data: 191 | mount_multi(img_file, mnt_path, part_count, part_data) 192 | 193 | # Mount a BitLocked Drive 194 | if options.bitlocker and options.recover_key: 195 | part_data, part_count = parse_mmls(img_file) 196 | bitlock_mount(img_file, mnt_path, options.recover_key, part_data) 197 | 198 | 199 | if __name__ == "__main__": 200 | if os.getuid() == 0: 201 | main() 202 | else: 203 | print "[+] You must be Root or Sudo to run this Script" 204 | sys.exit() 205 | 206 | 207 | -------------------------------------------------------------------------------- /jRatConfig.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (C) 2013 Kevin Breen. 4 | jRat Config Parser 5 | 6 | ''' 7 | __description__ = 'jRat Config Parser' 8 | __author__ = 'Kevin Breen' 9 | __version__ = '0.1' 10 | __date__ = '2013/08/05' 11 | 12 | import sys 13 | import base64 14 | import string 15 | from zipfile import ZipFile 16 | from optparse import OptionParser 17 | try: 18 | from Crypto.Cipher import AES 19 | from Crypto.Cipher import DES3 20 | except ImportError: 21 | print "Cannot import PyCrypto, Is it installed?" 22 | 23 | 24 | def main(): 25 | parser = OptionParser(usage='usage: %prog [options] InFile SavePath\n' + __description__, version='%prog ' + __version__) 26 | parser.add_option("-v", "--verbose", action='store_true', default=False, help="Verbose Output") 27 | (options, args) = parser.parse_args() 28 | if len(args) != 2: 29 | parser.print_help() 30 | sys.exit() 31 | 32 | archive = args[0] 33 | outfile = args[1] 34 | dropper = None 35 | conf = None 36 | with ZipFile(archive, 'r') as zip: 37 | for name in zip.namelist(): # get all the file names 38 | if name == "key.dat": # this file contains the encrytpion key 39 | enckey = zip.read(name) 40 | if name == "enc.dat": # if this file exists, jrat has an installer / dropper 41 | dropper = zip.read(name) 42 | if name == "config.dat": # this is the encrypted config file 43 | conf = zip.read(name) 44 | if dropper != None: # we need to process the dropper first 45 | print "Dropper Detected" 46 | ExtractDrop(enckey, dropper, outfile) 47 | elif conf != None: # if theres not dropper just decrpyt the config file 48 | if len(enckey) == 16: # version > 3.2.3 use AES 49 | cleandrop = DecryptAES(enckey, conf) 50 | WriteReport(enckey, outfile, cleandrop) 51 | elif len(enckey) == 24: # versions <= 3.2.3 use DES 52 | cleandrop = DecryptDES(enckey, conf) 53 | WriteReport(enckey, outfile, cleandrop) 54 | 55 | def ExtractDrop(enckey, data, outfile): 56 | split = enckey.split('\x2c') 57 | key = split[0][:16] 58 | with open(outfile, 'a') as new: 59 | print "### Dropper Information ###" 60 | new.write("### Dropper Information ###\n") 61 | for x in split: # grab each line of the config and decode it. 62 | try: 63 | drop = base64.b64decode(x).decode('hex') 64 | print drop 65 | new.write(drop+'\n') 66 | except: 67 | drop = base64.b64decode(x[16:]).decode('hex') 68 | print drop 69 | new.write(drop+'\n') 70 | newzipdata = DecryptAES(key, data) 71 | from cStringIO import StringIO 72 | newZip = StringIO(newzipdata) # Write new zip file to memory instead of to disk 73 | with ZipFile(newZip) as zip: 74 | for name in zip.namelist(): 75 | if name == "key.dat": # contains the encryption key 76 | enckey = zip.read(name) 77 | if name == "config.dat": 78 | conf = zip.read(name) # the encrypted config file 79 | if len(enckey) == 16: # version > 3.2.3 use AES 80 | printkey = enckey.encode('hex') 81 | print "AES Key Found: ", printkey 82 | cleandrop = DecryptAES(enckey, conf) # pass to the decrpyt function 83 | print "### Configuration File ###" 84 | WriteReport(printkey, outfile, cleandrop) 85 | elif len(enckey) == 24: # versions <= 3.2.3 use DES 86 | printkey = enckey 87 | print "DES Key Found: ", enckey 88 | cleandrop = DecryptDES(enckey, conf) # pass to the decrpyt function 89 | print "### Configuration File ###" 90 | WriteReport(enckey, outfile, cleandrop) 91 | 92 | def DecryptAES(enckey, data): 93 | cipher = AES.new(enckey) # set the cipher 94 | return cipher.decrypt(data) # decrpyt the data 95 | 96 | def DecryptDES(enckey, data): 97 | 98 | cipher = DES3.new(enckey) # set the ciper 99 | return cipher.decrypt(data) # decrpyt the data 100 | 101 | def WriteReport(key, outfile, data): # this should be self expanatory 102 | split = data.split("SPLIT") 103 | with open(outfile, 'a') as new: 104 | new.write(key) 105 | new.write('\n') 106 | for s in split: 107 | stripped = (char for char in s if 32 < ord(char) < 127) # im only interested in ASCII Characters 108 | line = ''.join(stripped) 109 | #if options.verbose == True: 110 | print line 111 | new.write(line) 112 | new.write('\n') 113 | print "Config Written To: ", outfile 114 | 115 | 116 | if __name__ == "__main__": 117 | main() 118 | -------------------------------------------------------------------------------- /jRatNetwork.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (C) 2013 Kevin Breen. 4 | jRat Network Decrypter 5 | 6 | ''' 7 | __description__ = 'jRat Config Parser' 8 | __author__ = 'Kevin Breen' 9 | __version__ = '0.1' 10 | __date__ = '2013/08/05' 11 | 12 | import sys 13 | import re 14 | import base64 15 | import string 16 | from zipfile import ZipFile 17 | from optparse import OptionParser 18 | try: 19 | from Crypto.Cipher import AES 20 | from Crypto.Cipher import DES3 21 | except ImportError: 22 | print "Cannot import PyCrypto, Is it installed?" 23 | 24 | 25 | def main(): 26 | parser = OptionParser(usage='usage: %prog [options] keyFile DataFile -o -s \n' + __description__, version='%prog ' + __version__) 27 | parser.add_option("-v", action='store_true', default=False, help="Verbose Output") 28 | parser.add_option("-o", dest="offset", default=0, help="Optional Offset") 29 | parser.add_option("-s", dest='outfile', help="OutPut Save File") 30 | (options, args) = parser.parse_args() 31 | if len(args) != 2: 32 | parser.print_help() 33 | sys.exit() 34 | 35 | 36 | keyFile = args[0] 37 | dataFile = args[1] 38 | enckey = open(keyFile, 'rb').read() 39 | f = open(dataFile, 'rb').read().replace("\x00", "") 40 | stream = bytearray(f) 41 | EOF = len(stream) 42 | Offset = int(options.offset) 43 | outfile = options.outfile 44 | if len(enckey) == 16: 45 | printkey = enckey.encode('hex') 46 | print "AES Key Found: ", printkey 47 | report = AESDecrypt(enckey, stream, Offset, EOF) 48 | if options.outfile: 49 | writereport(outfile, report) 50 | elif len(enckey) == 24: 51 | printkey = enckey 52 | print "DES Key Found: ", enckey 53 | report = DESDecrypt(enckey, stream, Offset, EOF) 54 | if options.outfile: 55 | writereport(outfile, report) 56 | else: 57 | print "Unknown Key Length Found" 58 | sys.exit() 59 | 60 | #with open('sample2clean.bin', 'a') as clean: 61 | # clean.write(f) 62 | 63 | def AESDecrypt(enckey, stream, Offset, EOF): 64 | cipher = AES.new(enckey) 65 | report = [] 66 | counter = 11 67 | while Offset < EOF: 68 | print "off: ", Offset 69 | Length = stream[Offset] 70 | '''if counter != 0: 71 | Offset += 1 72 | print "off: ", Offset 73 | Length = stream[Offset] 74 | counter -= 1 75 | print counter''' 76 | if Length == 21 or Length == 31 : 77 | Offset += 1 78 | Length = stream[Offset] 79 | print "len: ", Length 80 | this = [] 81 | 82 | for l in range(Offset+1,Offset+1+Length): 83 | this.append(chr(stream[l])) 84 | 85 | predecode = "".join(this) 86 | print predecode 87 | try: 88 | if predecode.startswith("-h"): 89 | decstring = predecode[3:].decode('hex') 90 | else: 91 | string = base64.b64decode(predecode) 92 | decstring = cipher.decrypt(string) 93 | clean = (char for char in decstring if 31 < ord(char) < 127) 94 | line = ''.join(clean) 95 | print line 96 | report.append(line) 97 | Offset += (Length+1) 98 | except: 99 | Offset += (Length+1) 100 | return report 101 | 102 | 103 | def DESDecrypt(enckey, stream, Offset, EOF): 104 | cipher = DES3.new(enckey) 105 | report = [] 106 | while Offset < EOF: 107 | #print "off: ", Offset 108 | Length = stream[Offset] 109 | #print "len: ", Length 110 | this = [] 111 | for l in range(Offset+1,Offset+1+Length): 112 | this.append(chr(stream[l])) 113 | predecode = "".join(this) 114 | #print predecode 115 | if predecode.startswith("-h"): 116 | decstring = predecode[3:].decode('hex') 117 | else: 118 | string = base64.b64decode(predecode) 119 | decstring = cipher.decrypt(string) 120 | clean = (char for char in decstring if 31 < ord(char) < 127) 121 | line = ''.join(clean) 122 | report.append(line) 123 | Offset += (Length+1) 124 | return report 125 | 126 | def writereport(outfile, report): 127 | with open(outfile, 'w') as reportFile: 128 | for line in report: 129 | reportFile.write(line) 130 | reportFile.write('\n') 131 | print "Report Written to: ", outfile 132 | 133 | if __name__ == "__main__": 134 | main() -------------------------------------------------------------------------------- /quarantine.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Copyright (C) 2013 Kevin Breen. 4 | McAfee Quarnatine Extractor 5 | ''' 6 | 7 | __description__ = 'Python script to extract McAfee Quarantine Files' 8 | __author__ = 'Kevin Breen' 9 | __version__ = '0.1' 10 | __date__ = '2013/08/05' 11 | 12 | import os 13 | import sys 14 | import subprocess 15 | from optparse import OptionParser 16 | 17 | def main(): 18 | parser = OptionParser(usage='usage: %prog [options] bupFile savePath\n' + __description__, version='%prog ' + __version__) 19 | parser.add_option("-k", "--key", dest="key", default=0x6A, help="Optional XOR Key Default is 0x6A") 20 | (options, args) = parser.parse_args() 21 | if len(args) !=2: 22 | parser.print_help() 23 | sys.exit() 24 | bupFile = args[0] 25 | savePath = args[1] 26 | key = options.key 27 | if not os.path.exists(savePath): 28 | os.makedirs(savePath) 29 | try: 30 | subprocess.call(["7z", "e", args[0]]) 31 | except: 32 | print "Failed to extract is 7z installed?" 33 | sys.exit() 34 | encodedA = bytearray(open('Details', 'rb').read()) 35 | for i in range(len(encodedA)): 36 | encodedA[i] ^= key 37 | open(os.path.join(savePath, 'Details.txt'), 'wb').write(encodedA) 38 | 39 | encodedB = bytearray(open('File_0', 'rb').read()) 40 | for i in range(len(encodedB)): 41 | encodedB[i] ^= key 42 | open(os.path.join(savePath, 'File_0.xor'), 'wb').write(encodedB) 43 | 44 | os.remove('Details') 45 | os.remove('File_0') 46 | 47 | if __name__ == "__main__": 48 | main() -------------------------------------------------------------------------------- /usbkeycap.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import commands 4 | import subprocess 5 | import json 6 | from optparse import OptionParser 7 | 8 | def main(): 9 | parser = OptionParser(usage='Usage: %prog [options] pcapfile') 10 | parser.add_option("-l", "--language", default="gb", help="Keyboard Language") 11 | parser.add_option("-a", "--address", help="USB Device Address") 12 | (options, args) = parser.parse_args() 13 | 14 | if len(args) == 0: 15 | print "[+] You need to provide a pcap file" 16 | sys.exit() 17 | 18 | if not options.address: 19 | print "[!] You need to privide a USB Device Address" 20 | sys.exit() 21 | 22 | tshark_output = commands.getoutput('tshark -r {0} -T fields -e usb.capdata -R "usb.capdata != 00:00:00:00:00:00:00:00 && usb.transfer_type == 0x01 && usb.device_address=={1}" -2'.format(args[0], options.address)) 23 | # 24 | 25 | # tshark -r Keylogger.pcapng -T fields -e usb.capdata -R "usb.device_address==10" -2 > keystrokes.txt 26 | 27 | duck_lang = 'gb' 28 | out_file = '' 29 | 30 | # Read in Langauge File 31 | lang_file = json.load(open('gb.json')) 32 | 33 | # Format tshark output 34 | for line in tshark_output.split('\n'): 35 | try: 36 | key_codes = line.split(':') 37 | except: 38 | key_codes = False 39 | 40 | 41 | # Create compatible keymap 42 | if key_codes and len(key_codes) > 3 and key_codes[3] == '00': 43 | 44 | if key_codes[0] == '20': 45 | key_codes[0] = '02' 46 | 47 | keymap = '' 48 | keymap += key_codes[0] 49 | keymap += ',' 50 | keymap += key_codes[1] 51 | keymap += ',' 52 | keymap += key_codes[2] 53 | 54 | keymap_char = '' 55 | for key, value in lang_file.iteritems(): 56 | 57 | if keymap == value: 58 | keymap_char = key 59 | if key == 'SPACE': 60 | keymap_char = ' ' 61 | elif key == 'ENTER': 62 | keymap_char = '\n' 63 | elif key == 'SHIFT': 64 | keymap_char = '' 65 | 66 | 67 | if keymap_char: 68 | out_file += keymap_char 69 | else: 70 | print "Unmapped Key Found: ", key_codes 71 | 72 | print "Captured KeyStrokes\n" 73 | print out_file 74 | print "End Captured Session" 75 | 76 | 77 | 78 | if __name__ == "__main__": 79 | main() 80 | 81 | 82 | 83 | -------------------------------------------------------------------------------- /vt_feed.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | VirusTotal Feed Collector 4 | Set your API and change max limit as required 5 | ''' 6 | 7 | __description__ = 'VT Feed Collector' 8 | __author__ = 'Kevin Breen http://techanarchy.net' 9 | __version__ = '0.3' 10 | __date__ = '2013/10' 11 | 12 | import sys 13 | import os 14 | import urllib2 15 | from optparse import OptionParser 16 | import xml.etree.cElementTree as ET 17 | 18 | ##################################### USER CONFIG HERE ######################################################## 19 | api_key = 'Your Key Here' 20 | feed_url = 'https://www.virustotal.com/intelligence/hunting/notifications-feed/?key=%s&output=xml' % api_key 21 | max_down = 20 22 | ################################################################################################################### 23 | 24 | # Set Global Vars 25 | counter = 0 26 | exists = 0 27 | downloads = 0 28 | 29 | def main(): 30 | parser = OptionParser(usage='usage: %prog [options] ruleName\n' + __description__, version='%prog ' + __version__) 31 | parser.add_option("-s", "--RuleSet", action='store_true', default=False, help="Specify a Single RuleSet") 32 | parser.add_option("-n", "--RuleName", action='store_true', default=False, help="Specify a Single RuleName") 33 | parser.add_option("-l", "--list", action='store_true', default=False, help="List And Coutn All Matches") 34 | (options, args) = parser.parse_args() 35 | if (options.RuleSet == True or options.RuleName == True) and len(args) != 1: 36 | parser.print_help() 37 | sys.exit() 38 | 39 | #Pull the XML Feed from VT Int 40 | request = urllib2.Request(feed_url, headers={"Accept" : "application/xml"}) 41 | xml = urllib2.urlopen(request) 42 | #Parse the XML 43 | tree = ET.parse(xml) 44 | root = tree.getroot() 45 | 46 | 47 | if options.list == True: # Just list the output of the feed 48 | list(root) 49 | sys.exit() 50 | #Im only interested in rule names and the sha to download 51 | for item in root[0].findall('item'): 52 | try: 53 | sha256, ruleset, rulename = item.find('title').text.split() 54 | except: 55 | print "Could Not Find Valid Title Line, Does your RuleSet, RuleName contain Spaces?" 56 | sys.exit() 57 | if options.RuleSet == True: 58 | if args[0] == ruleset: 59 | download(sha256, ruleset, rulename) 60 | elif options.RuleName == True: 61 | if args[0] == rulename: 62 | download(sha256, ruleset, rulename) 63 | else: 64 | download(sha256, ruleset, rulename) 65 | 66 | print "\nTotal Hashes in Feed", counter 67 | print "Files Skipped", exists 68 | print "Files downloaded", downloads 69 | 70 | # This is the donwnloads function 71 | def download(sha256, ruleset, rulename): 72 | global exists, downloads, max_down, counter 73 | # create the folder paths 74 | if not os.path.exists(os.path.join(ruleset, rulename)): 75 | os.makedirs(os.path.join(ruleset, rulename)) 76 | # if the file already exists dont download again 77 | if os.path.exists(os.path.join(ruleset, rulename, sha256)): 78 | print "Hash Already Exists, Passing" 79 | exists +=1 80 | #Stay below the max_down count and download 81 | elif downloads < max_down: 82 | url = "https://www.virustotal.com/intelligence/download/?hash=%s&apikey=%s" %(sha256, api_key) 83 | print "downloading %s" % sha256 84 | f = urllib2.urlopen(url) 85 | data = f.read() 86 | 87 | # Save each file in to a folder after the rule 88 | with open(os.path.join(ruleset, rulename, sha256), "wb") as save_file: 89 | save_file.write(data) 90 | downloads +=1 91 | elif downloads > max_down: 92 | print "Self Imposed Download Limit Reached" 93 | counter +=1 94 | 95 | # List all the samples in the feed that dont exist in the sample folders 96 | def list(root): 97 | counts = {} 98 | for item in root[0].findall('item'): 99 | try: 100 | sha256, ruleset, rulename = item.find('title').text.split() 101 | if rulename in counts and (not os.path.exists(os.path.join(ruleset, rulename, sha256))): # if rule is in feed and doesnt exist increment the counter 102 | counts[rulename] +=1 103 | elif not os.path.exists(os.path.join(ruleset, rulename, sha256)): # If theres no entry in the counter yet set one. 104 | counts[rulename] = 1 105 | except: 106 | print "Could Not Find Valid Title Line, Does your RuleSet, RuleName contain Spaces?" 107 | sys.exit() 108 | if len(counts) > 0: 109 | print "New Items in feed not already saved" # Print the results of the counts 110 | for rule, count in counts.items(): 111 | print rule,count 112 | else: 113 | print "No New Match's For Download" 114 | 115 | if __name__ == "__main__": 116 | if api_key == 'Your Key Here': 117 | print "You need to add your own API key" 118 | sys.exit() 119 | main() 120 | -------------------------------------------------------------------------------- /wide.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from Crypto.Cipher import AES 3 | import re 4 | keyFile = "keyAES.dat" 5 | #configFile = "config.dat" 6 | 7 | enckey = open(keyFile, 'rb').read() 8 | #conf = open(configFile, 'rb').read() 9 | #new = enckey.encode('hex') 10 | 11 | cipher = AES.new(enckey) 12 | 13 | with open('raw-stream2.hex', 'rb') as f: 14 | f1 = re.findall( b'\x00\x18\x00(.{47}?)|\x00\x40\x00(.{127}?)|\x00\x2C\x00(.{87}?)|\x00\x81\x00\x2d\x00\x68\x00\x20(.{252}?)|\x00\x6f\x00\x2d\x00\x68\x00\x20(.{216}?)|\x00\x70\x00\x2d\x00\x68\x00\x20(.{222}?)', f.read()) 15 | 16 | for x in f1: 17 | for y in x: 18 | if y != "": 19 | 20 | string = base64.b64decode(y) 21 | decstring = cipher.decrypt(string) 22 | with open('outnewish.txt', 'a') as out: 23 | stripped = (char for char in decstring if 31 < ord(char) < 127) 24 | line = ''.join(stripped) 25 | out.write(line) 26 | out.write('\n') 27 | print decstring, '\n' 28 | 29 | 30 | 31 | #decConfig = cipher.decrypt(conf) 32 | 33 | 34 | 35 | --------------------------------------------------------------------------------