├── modules ├── lib │ ├── __init__.py │ ├── graphs.pyc │ ├── markup.pyc │ ├── __init__.pyc │ ├── hostchecker.pyc │ ├── htmlExport.pyc │ ├── hostchecker.py │ ├── htmlExport.py │ └── markup.py ├── discovery │ ├── IPy.pyc │ ├── crtsh.pyc │ ├── DNS │ │ ├── Lib.py │ │ ├── Lib.pyc │ │ ├── Type.py │ │ ├── Base.pyc │ │ ├── Class.pyc │ │ ├── Opcode.pyc │ │ ├── Status.pyc │ │ ├── Type.pyc │ │ ├── lazy.pyc │ │ ├── __init__.pyc │ │ ├── Opcode.py │ │ ├── Class.py │ │ ├── lazy.py │ │ ├── __init__.py │ │ ├── Status.py │ │ ├── win32dns.py │ │ └── Base.py │ ├── jigsaw.pyc │ ├── __init__.pyc │ ├── bingsearch.pyc │ ├── dnssearch.pyc │ ├── googleCSE.pyc │ ├── googlesets.pyc │ ├── netcraft.pyc │ ├── pgpsearch.pyc │ ├── shodan │ │ ├── api.pyc │ │ ├── __init__.py │ │ ├── __init__.pyc │ │ ├── wps.py │ │ └── api.py │ ├── virustotal.pyc │ ├── baidusearch.pyc │ ├── googlesearch.pyc │ ├── shodansearch.pyc │ ├── yahoosearch.pyc │ ├── yandexsearch.pyc │ ├── dogpilesearch.pyc │ ├── exaleadsearch.pyc │ ├── linkedinsearch.pyc │ ├── twittersearch.pyc │ ├── googleplussearch.pyc │ ├── __init__.py │ ├── shodansearch.py │ ├── pgpsearch.py │ ├── crtsh.py │ ├── virustotal.py │ ├── googlesets.py │ ├── netcraft.py │ ├── linkedinsearch.py │ ├── baidusearch.py │ ├── yahoosearch.py │ ├── dogpilesearch.py │ ├── twittersearch.py │ ├── googleplussearch.py │ ├── asksearch.py │ ├── jigsaw.py │ ├── yandexsearch.py │ ├── googlesearch.py │ ├── exaleadsearch.py │ ├── bingsearch.py │ ├── googleCSE.py │ ├── dnssearch-threads.py │ └── dnssearch.py ├── tests │ └── myparser_test.py ├── goofile.py ├── sub.py ├── theHarvester.py └── myparser.py ├── Packages.gz ├── magnifier_1.0-1_amd64.deb ├── magnifier_1.0-1.debian.tar.xz ├── snap └── snapcraft.yaml ├── magnifier_1.0-1.dsc ├── LICENSE ├── README.md ├── magnifier_1.0-1_source.buildinfo ├── magnifier.py ├── magnifier_1.0-1_source.build └── magnifier_1.0-1_amd64.build /modules/lib/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["markup", "graphs", "hostchecker"] 2 | -------------------------------------------------------------------------------- /Packages.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/Packages.gz -------------------------------------------------------------------------------- /modules/lib/graphs.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/lib/graphs.pyc -------------------------------------------------------------------------------- /modules/lib/markup.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/lib/markup.pyc -------------------------------------------------------------------------------- /modules/lib/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/lib/__init__.pyc -------------------------------------------------------------------------------- /magnifier_1.0-1_amd64.deb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/magnifier_1.0-1_amd64.deb -------------------------------------------------------------------------------- /modules/discovery/IPy.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/IPy.pyc -------------------------------------------------------------------------------- /modules/discovery/crtsh.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/crtsh.pyc -------------------------------------------------------------------------------- /modules/lib/hostchecker.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/lib/hostchecker.pyc -------------------------------------------------------------------------------- /modules/lib/htmlExport.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/lib/htmlExport.pyc -------------------------------------------------------------------------------- /magnifier_1.0-1.debian.tar.xz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/magnifier_1.0-1.debian.tar.xz -------------------------------------------------------------------------------- /modules/discovery/DNS/Lib.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Lib.py -------------------------------------------------------------------------------- /modules/discovery/DNS/Lib.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Lib.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/Type.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Type.py -------------------------------------------------------------------------------- /modules/discovery/jigsaw.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/jigsaw.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/Base.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Base.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/Class.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Class.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/Opcode.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Opcode.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/Status.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Status.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/Type.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/Type.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/lazy.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/lazy.pyc -------------------------------------------------------------------------------- /modules/discovery/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/__init__.pyc -------------------------------------------------------------------------------- /modules/discovery/bingsearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/bingsearch.pyc -------------------------------------------------------------------------------- /modules/discovery/dnssearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/dnssearch.pyc -------------------------------------------------------------------------------- /modules/discovery/googleCSE.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/googleCSE.pyc -------------------------------------------------------------------------------- /modules/discovery/googlesets.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/googlesets.pyc -------------------------------------------------------------------------------- /modules/discovery/netcraft.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/netcraft.pyc -------------------------------------------------------------------------------- /modules/discovery/pgpsearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/pgpsearch.pyc -------------------------------------------------------------------------------- /modules/discovery/shodan/api.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/shodan/api.pyc -------------------------------------------------------------------------------- /modules/discovery/virustotal.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/virustotal.pyc -------------------------------------------------------------------------------- /modules/discovery/DNS/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/DNS/__init__.pyc -------------------------------------------------------------------------------- /modules/discovery/baidusearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/baidusearch.pyc -------------------------------------------------------------------------------- /modules/discovery/googlesearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/googlesearch.pyc -------------------------------------------------------------------------------- /modules/discovery/shodan/__init__.py: -------------------------------------------------------------------------------- 1 | from api import WebAPI 2 | 3 | __version__ = "0.5.0" 4 | 5 | __all__ = ['WebAPI'] 6 | -------------------------------------------------------------------------------- /modules/discovery/shodansearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/shodansearch.pyc -------------------------------------------------------------------------------- /modules/discovery/yahoosearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/yahoosearch.pyc -------------------------------------------------------------------------------- /modules/discovery/yandexsearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/yandexsearch.pyc -------------------------------------------------------------------------------- /modules/discovery/dogpilesearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/dogpilesearch.pyc -------------------------------------------------------------------------------- /modules/discovery/exaleadsearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/exaleadsearch.pyc -------------------------------------------------------------------------------- /modules/discovery/linkedinsearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/linkedinsearch.pyc -------------------------------------------------------------------------------- /modules/discovery/shodan/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/shodan/__init__.pyc -------------------------------------------------------------------------------- /modules/discovery/twittersearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/twittersearch.pyc -------------------------------------------------------------------------------- /modules/discovery/googleplussearch.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TheEyeOfCyber/Magnifier/HEAD/modules/discovery/googleplussearch.pyc -------------------------------------------------------------------------------- /snap/snapcraft.yaml: -------------------------------------------------------------------------------- 1 | 2 | name: Magnifier 3 | version: '1.0' 4 | summary: Magnifier is an information gathing tool to gather info on IP or domains. 5 | description: | 6 | Infosploit is an information gathering tool used to gain info on an IP adrees or domain. Info gathered cand be tracerutes, geolocations, and even dns lookup. 7 | 8 | grade: stable 9 | confinement: strict 10 | 11 | parts: 12 | my-part: 13 | # See 'snapcraft plugins' 14 | plugin: nil 15 | 16 | -------------------------------------------------------------------------------- /modules/tests/myparser_test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Unit tests for myparser.py 3 | # 4 | import myparser 5 | 6 | import unittest 7 | 8 | class TestMyParser(unittest.TestCase): 9 | 10 | def test_emails(self): 11 | word = 'domain.com' 12 | results = '@domain.com***a@domain***banotherdomain.com***c@domain.com***d@sub.domain.com***' 13 | p = myparser.parser(results, word) 14 | emails = sorted(p.emails()) 15 | self.assertEquals(emails, [ 'c@domain.com', 'd@sub.domain.com' ]) 16 | 17 | if __name__ == '__main__': 18 | unittest.main() 19 | -------------------------------------------------------------------------------- /modules/discovery/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["bingsearch", 2 | "googlesearch", 3 | "googleplussearch", 4 | "pgpsearch", 5 | "linkedinsearch", 6 | "exaleadsearch", 7 | "yandexsearch", 8 | "googlesets", 9 | "dnssearch", 10 | "shodansearch", 11 | "jigsaw", 12 | "twittersearch", 13 | "dogpilesearch", 14 | "baidusearch", 15 | "yahoosearch", 16 | "netcraft", 17 | "crtsh", 18 | "virustotal", 19 | "googleCSE"] 20 | -------------------------------------------------------------------------------- /modules/lib/hostchecker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | """ 4 | Created by laramies on 2008-08-21. 5 | """ 6 | 7 | import sys 8 | import socket 9 | 10 | 11 | class Checker(): 12 | 13 | def __init__(self, hosts): 14 | self.hosts = hosts 15 | self.realhosts = [] 16 | 17 | def check(self): 18 | for x in self.hosts: 19 | try: 20 | res = socket.gethostbyname(x) 21 | self.realhosts.append(res + ":" + x) 22 | except Exception as e: 23 | pass 24 | return self.realhosts 25 | -------------------------------------------------------------------------------- /modules/discovery/shodansearch.py: -------------------------------------------------------------------------------- 1 | from shodan import WebAPI 2 | import sys 3 | 4 | 5 | class search_shodan(): 6 | 7 | def __init__(self, host): 8 | self.host = host 9 | self.key = "oCiMsgM6rQWqiTvPxFHYcExlZgg7wvTt" 10 | if self.key == "": 11 | print "You need an API key in order to use SHODAN database. You can get one here: http://www.shodanhq.com/" 12 | sys.exit() 13 | self.api = WebAPI(self.key) 14 | 15 | def run(self): 16 | try: 17 | host = self.api.host(self.host) 18 | return host['data'] 19 | except: 20 | print "SHODAN empty reply or error in the call" 21 | return "error" 22 | -------------------------------------------------------------------------------- /magnifier_1.0-1.dsc: -------------------------------------------------------------------------------- 1 | Format: 3.0 (quilt) 2 | Source: magnifier 3 | Binary: magnifier 4 | Architecture: any 5 | Version: 1.0-1 6 | Maintainer: Humayun Ali Khan 7 | Standards-Version: 1.0 8 | Build-Depends: debhelper (>= 11~), dh-python, python-all, python-setuptools 9 | Package-List: 10 | magnifier deb Information Gathering optional arch=any 11 | Checksums-Sha1: 12 | 6bce60ccf9b7408a71f595dbc432de4dfca21e39 502312 magnifier_1.0.orig.tar.gz 13 | 1f4f939e154ab863ad3216439ce8f7ce26dab357 740 magnifier_1.0-1.debian.tar.xz 14 | Checksums-Sha256: 15 | 2979e07c5a67f03dd624b10a54c09d14d7c10d4bd82bfeb749af78af76e40fcc 502312 magnifier_1.0.orig.tar.gz 16 | f0586344e0bd18abd88feb9297a9854d524f9bad64f624488961548550363eed 740 magnifier_1.0-1.debian.tar.xz 17 | Files: 18 | 35755aef690afed7ced4032636819e5c 502312 magnifier_1.0.orig.tar.gz 19 | 9af6611116d9b6dd90a87a65790a8483 740 magnifier_1.0-1.debian.tar.xz 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2008 Humayun Ali Khan 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /modules/discovery/pgpsearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | 6 | 7 | class search_pgp: 8 | 9 | def __init__(self, word): 10 | self.word = word 11 | self.results = "" 12 | self.server = "pgp.mit.edu" 13 | #self.server = "pgp.rediris.es:11371" Not working at the moment 14 | self.hostname = "pgp.mit.edu" 15 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 16 | 17 | def process(self): 18 | h = httplib.HTTP(self.server) 19 | h.putrequest('GET', "/pks/lookup?search=" + self.word + "&op=index") 20 | h.putheader('Host', self.hostname) 21 | h.putheader('User-agent', self.userAgent) 22 | h.endheaders() 23 | returncode, returnmsg, headers = h.getreply() 24 | print returncode 25 | print returnmsg 26 | self.results = h.getfile().read() 27 | 28 | def get_emails(self): 29 | rawres = myparser.parser(self.results, self.word) 30 | return rawres.emails() 31 | 32 | def get_hostnames(self): 33 | rawres = myparser.parser(self.results, self.word) 34 | return rawres.hostnames() 35 | -------------------------------------------------------------------------------- /modules/discovery/crtsh.py: -------------------------------------------------------------------------------- 1 | import string 2 | import requests 3 | import sys 4 | import myparser 5 | import re 6 | 7 | 8 | class search_crtsh: 9 | 10 | def __init__(self, word): 11 | self.word = word.replace(' ', '%20') 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "www.google.com" 15 | self.hostname = "www.google.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7" 17 | self.quantity = "100" 18 | self.counter = 0 19 | 20 | 21 | def do_search(self): 22 | try: 23 | urly="https://crt.sh/?q=%25" + self.word 24 | except Exception, e: 25 | print e 26 | headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'} 27 | try: 28 | r=requests.get(urly,headers=headers) 29 | except Exception,e: 30 | print e 31 | self.results = r.content 32 | self.totalresults += self.results 33 | 34 | def get_hostnames(self): 35 | rawres = myparser.parser(self.results, self.word) 36 | return rawres.hostnames() 37 | 38 | def process(self): 39 | self.do_search() 40 | print "\tSearching CRT.sh results.." -------------------------------------------------------------------------------- /modules/discovery/DNS/Opcode.py: -------------------------------------------------------------------------------- 1 | """ 2 | $Id: Opcode.py,v 1.6 2002/04/23 10:51:43 anthonybaxter Exp $ 3 | 4 | This file is part of the pydns project. 5 | Homepage: http://pydns.sourceforge.net 6 | 7 | This code is covered by the standard Python License. 8 | 9 | Opcode values in message header. RFC 1035, 1996, 2136. 10 | """ 11 | 12 | 13 | QUERY = 0 14 | IQUERY = 1 15 | STATUS = 2 16 | NOTIFY = 4 17 | UPDATE = 5 18 | 19 | # Construct reverse mapping dictionary 20 | 21 | _names = dir() 22 | opcodemap = {} 23 | for _name in _names: 24 | if _name[0] != '_': 25 | opcodemap[eval(_name)] = _name 26 | 27 | 28 | def opcodestr(opcode): 29 | if opcode in opcodemap: 30 | return opcodemap[opcode] 31 | else: 32 | return repr(opcode) 33 | 34 | # 35 | # $Log: Opcode.py,v $ 36 | # Revision 1.6 2002/04/23 10:51:43 anthonybaxter 37 | # Added UPDATE, NOTIFY. 38 | # 39 | # Revision 1.5 2002/03/19 12:41:33 anthonybaxter 40 | # tabnannied and reindented everything. 4 space indent, no tabs. 41 | # yay. 42 | # 43 | # Revision 1.4 2002/03/19 12:26:13 anthonybaxter 44 | # death to leading tabs. 45 | # 46 | # Revision 1.3 2001/08/09 09:08:55 anthonybaxter 47 | # added identifying header to top of each file 48 | # 49 | # Revision 1.2 2001/07/19 06:57:07 anthony 50 | # cvs keywords added 51 | # 52 | # 53 | -------------------------------------------------------------------------------- /modules/discovery/virustotal.py: -------------------------------------------------------------------------------- 1 | import string 2 | import requests 3 | import sys 4 | import myparser 5 | import re 6 | 7 | 8 | class search_virustotal: 9 | 10 | def __init__(self, word): 11 | self.word = word.replace(' ', '%20') 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "www.google.com" 15 | self.hostname = "www.google.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7" 17 | self.quantity = "100" 18 | self.counter = 0 19 | 20 | 21 | def do_search(self): 22 | try: 23 | urly="https://www.virustotal.com/en/domain/" + self.word + "/information/" 24 | except Exception, e: 25 | print e 26 | headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'} 27 | try: 28 | r=requests.get(urly,headers=headers) 29 | except Exception,e: 30 | print e 31 | self.results = r.content 32 | self.totalresults += self.results 33 | 34 | def get_hostnames(self): 35 | rawres = myparser.parser(self.results, self.word) 36 | return rawres.hostnames() 37 | 38 | def process(self): 39 | self.do_search() 40 | print "\tSearching CRT.sh results.." -------------------------------------------------------------------------------- /modules/discovery/googlesets.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | import re 6 | import time 7 | 8 | 9 | class search_google_labs: 10 | 11 | def __init__(self, list): 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "labs.google.com" 15 | self.hostname = "labs.google.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 17 | id = 0 18 | self.set = "" 19 | for x in list: 20 | id += 1 21 | if id == 1: 22 | self.set = self.set + "q" + str(id) + "=" + str(x) 23 | else: 24 | self.set = self.set + "&q" + str(id) + "=" + str(x) 25 | 26 | def do_search(self): 27 | h = httplib.HTTP(self.server) 28 | h.putrequest('GET', "/sets?hl=en&" + self.set) 29 | h.putheader('Host', self.hostname) 30 | h.putheader('User-agent', self.userAgent) 31 | h.endheaders() 32 | returncode, returnmsg, headers = h.getreply() 33 | self.results = h.getfile().read() 34 | self.totalresults += self.results 35 | 36 | def get_set(self): 37 | rawres = myparser.parser(self.totalresults, list) 38 | return rawres.set() 39 | 40 | def process(self): 41 | self.do_search() 42 | -------------------------------------------------------------------------------- /modules/discovery/netcraft.py: -------------------------------------------------------------------------------- 1 | import string 2 | import requests 3 | import sys 4 | import myparser 5 | import re 6 | 7 | 8 | class search_netcraft: 9 | 10 | def __init__(self, word): 11 | self.word = word.replace(' ', '%20') 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "www.google.com" 15 | self.hostname = "www.google.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7" 17 | self.quantity = "100" 18 | self.counter = 0 19 | 20 | 21 | def do_search(self): 22 | try: 23 | urly="https://searchdns.netcraft.com/?restriction=site+ends+with&host=" + self.word 24 | except Exception, e: 25 | print e 26 | headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'} 27 | try: 28 | r=requests.get(urly,headers=headers) 29 | except Exception,e: 30 | print e 31 | self.results = r.content 32 | self.totalresults += self.results 33 | 34 | def get_hostnames(self): 35 | rawres = myparser.parser(self.results, self.word) 36 | return rawres.hostnames() 37 | 38 | def process(self): 39 | self.do_search() 40 | print "\tSearching Netcraft results.." 41 | -------------------------------------------------------------------------------- /modules/discovery/linkedinsearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import requests 3 | import sys 4 | import myparser 5 | import re 6 | 7 | 8 | class search_linkedin: 9 | 10 | def __init__(self, word, limit): 11 | self.word = word.replace(' ', '%20') 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "www.google.com" 15 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 16 | self.quantity = "100" 17 | self.limit = int(limit) 18 | self.counter = 0 19 | 20 | def do_search(self): 21 | try: 22 | urly="http://"+ self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Alinkedin.com/in%20" + self.word 23 | except Exception, e: 24 | print e 25 | try: 26 | r=requests.get(urly) 27 | except Exception,e: 28 | print e 29 | self.results = r.content 30 | self.totalresults += self.results 31 | 32 | def get_people(self): 33 | rawres = myparser.parser(self.totalresults, self.word) 34 | return rawres.people_linkedin() 35 | 36 | def process(self): 37 | while (self.counter < self.limit): 38 | self.do_search() 39 | self.counter += 100 40 | print "\tSearching " + str(self.counter) + " results.." 41 | -------------------------------------------------------------------------------- /modules/discovery/baidusearch.py: -------------------------------------------------------------------------------- 1 | import httplib 2 | import myparser 3 | import time 4 | import sys 5 | 6 | 7 | class search_baidu: 8 | 9 | def __init__(self, word, limit): 10 | self.word = word 11 | self.total_results = "" 12 | self.server = "www.baidu.com" 13 | self.hostname = "www.baidu.com" 14 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 15 | self.limit = limit 16 | self.counter = 0 17 | 18 | def do_search(self): 19 | h = httplib.HTTP(self.server) 20 | 21 | h.putrequest('GET', "/s?wd=%40" + self.word 22 | + "&pn=" + str(self.counter)) 23 | h.putheader('Host', self.hostname) 24 | h.putheader('User-agent', self.userAgent) 25 | h.endheaders() 26 | returncode, returnmsg, headers = h.getreply() 27 | 28 | self.total_results += h.getfile().read() 29 | 30 | def process(self): 31 | while self.counter <= self.limit and self.counter <= 1000: 32 | self.do_search() 33 | time.sleep(1) 34 | 35 | print "\tSearching " + str(self.counter) + " results..." 36 | self.counter += 10 37 | 38 | def get_emails(self): 39 | rawres = myparser.parser(self.total_results, self.word) 40 | return rawres.emails() 41 | 42 | def get_hostnames(self): 43 | rawres = myparser.parser(self.total_results, self.word) 44 | return rawres.hostnames() 45 | -------------------------------------------------------------------------------- /modules/discovery/yahoosearch.py: -------------------------------------------------------------------------------- 1 | import httplib 2 | import myparser 3 | import time 4 | import sys 5 | 6 | 7 | class search_yahoo: 8 | 9 | def __init__(self, word, limit): 10 | self.word = word 11 | self.total_results = "" 12 | self.server = "search.yahoo.com" 13 | self.hostname = "search.yahoo.com" 14 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 15 | self.limit = limit 16 | self.counter = 0 17 | 18 | def do_search(self): 19 | h = httplib.HTTP(self.server) 20 | 21 | h.putrequest('GET', "/search?p=\"%40" + self.word 22 | + "\"&b=" + str(self.counter) + "&pz=10") 23 | h.putheader('Host', self.hostname) 24 | h.putheader('User-agent', self.userAgent) 25 | h.endheaders() 26 | returncode, returnmsg, headers = h.getreply() 27 | 28 | self.total_results += h.getfile().read() 29 | 30 | def process(self): 31 | while self.counter <= self.limit and self.counter <= 1000: 32 | self.do_search() 33 | time.sleep(1) 34 | 35 | print "\tSearching " + str(self.counter) + " results..." 36 | self.counter += 10 37 | 38 | def get_emails(self): 39 | rawres = myparser.parser(self.total_results, self.word) 40 | return rawres.emails() 41 | 42 | def get_hostnames(self): 43 | rawres = myparser.parser(self.total_results, self.word) 44 | return rawres.hostnames() 45 | -------------------------------------------------------------------------------- /modules/discovery/dogpilesearch.py: -------------------------------------------------------------------------------- 1 | import httplib 2 | import myparser 3 | import time 4 | import sys 5 | 6 | 7 | class search_dogpile: 8 | 9 | def __init__(self, word, limit): 10 | self.word = word 11 | self.total_results = "" 12 | self.server = "www.dogpile.com" 13 | self.hostname = "www.dogpile.com" 14 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 15 | self.limit = limit 16 | self.counter = 0 17 | 18 | def do_search(self): 19 | h = httplib.HTTP(self.server) 20 | 21 | # Dogpile is hardcoded to return 10 results 22 | h.putrequest('GET', "/search/web?qsi=" + str(self.counter) 23 | + "&q=\"%40" + self.word + "\"") 24 | h.putheader('Host', self.hostname) 25 | h.putheader('User-agent', self.userAgent) 26 | h.endheaders() 27 | returncode, returnmsg, headers = h.getreply() 28 | 29 | self.total_results += h.getfile().read() 30 | 31 | def process(self): 32 | while self.counter <= self.limit and self.counter <= 1000: 33 | self.do_search() 34 | time.sleep(1) 35 | 36 | print "\tSearching " + str(self.counter) + " results..." 37 | self.counter += 10 38 | 39 | def get_emails(self): 40 | rawres = myparser.parser(self.total_results, self.word) 41 | return rawres.emails() 42 | 43 | def get_hostnames(self): 44 | rawres = myparser.parser(self.total_results, self.word) 45 | return rawres.hostnames() 46 | -------------------------------------------------------------------------------- /modules/discovery/twittersearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import requests 3 | import sys 4 | import myparser 5 | import re 6 | 7 | 8 | class search_twitter: 9 | 10 | def __init__(self, word, limit): 11 | self.word = word.replace(' ', '%20') 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "www.google.com" 15 | self.hostname = "www.google.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100116 Firefox/3.7" 17 | self.quantity = "100" 18 | self.limit = int(limit) 19 | self.counter = 0 20 | 21 | def do_search(self): 22 | try: 23 | urly="https://"+ self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Atwitter.com%20intitle%3A%22on+Twitter%22%20" + self.word 24 | except Exception, e: 25 | print e 26 | headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'} 27 | try: 28 | r=requests.get(urly,headers=headers) 29 | except Exception,e: 30 | print e 31 | self.results = r.content 32 | self.totalresults += self.results 33 | 34 | def get_people(self): 35 | rawres = myparser.parser(self.totalresults, self.word) 36 | return rawres.people_twitter() 37 | 38 | def process(self): 39 | while (self.counter < self.limit): 40 | self.do_search() 41 | self.counter += 100 42 | print "\tSearching " + str(self.counter) + " results.." 43 | -------------------------------------------------------------------------------- /modules/discovery/DNS/Class.py: -------------------------------------------------------------------------------- 1 | """ 2 | $Id: Class.py,v 1.6 2002/04/23 12:52:19 anthonybaxter Exp $ 3 | 4 | This file is part of the pydns project. 5 | Homepage: http://pydns.sourceforge.net 6 | 7 | This code is covered by the standard Python License. 8 | 9 | CLASS values (section 3.2.4) 10 | """ 11 | 12 | 13 | IN = 1 # the Internet 14 | CS = 2 # the CSNET class (Obsolete - used only for examples in 15 | # some obsolete RFCs) 16 | CH = 3 # the CHAOS class. When someone shows me python running on 17 | # a Symbolics Lisp machine, I'll look at implementing this. 18 | HS = 4 # Hesiod [Dyer 87] 19 | 20 | # QCLASS values (section 3.2.5) 21 | 22 | ANY = 255 # any class 23 | 24 | 25 | # Construct reverse mapping dictionary 26 | 27 | _names = dir() 28 | classmap = {} 29 | for _name in _names: 30 | if _name[0] != '_': 31 | classmap[eval(_name)] = _name 32 | 33 | 34 | def classstr(klass): 35 | if klass in classmap: 36 | return classmap[klass] 37 | else: 38 | return repr(klass) 39 | 40 | # 41 | # $Log: Class.py,v $ 42 | # Revision 1.6 2002/04/23 12:52:19 anthonybaxter 43 | # cleanup whitespace. 44 | # 45 | # Revision 1.5 2002/03/19 12:41:33 anthonybaxter 46 | # tabnannied and reindented everything. 4 space indent, no tabs. 47 | # yay. 48 | # 49 | # Revision 1.4 2002/03/19 12:26:13 anthonybaxter 50 | # death to leading tabs. 51 | # 52 | # Revision 1.3 2001/08/09 09:08:55 anthonybaxter 53 | # added identifying header to top of each file 54 | # 55 | # Revision 1.2 2001/07/19 06:57:07 anthony 56 | # cvs keywords added 57 | # 58 | # 59 | -------------------------------------------------------------------------------- /modules/discovery/googleplussearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import requests 3 | import sys 4 | import myparser 5 | import re 6 | 7 | 8 | class search_googleplus: 9 | 10 | def __init__(self, word, limit): 11 | self.word = word.replace(' ', '%20') 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "www.google.com" 15 | self.hostname = "www.google.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 17 | self.quantity = "100" 18 | self.limit = int(limit) 19 | self.counter = 0 20 | 21 | def do_search(self): 22 | try: 23 | urly="https://" + self.server + "/search?num=100&start=" + str(self.counter) + "&hl=en&meta=&q=site%3Aplus.google.com%20intext%3A%22Works%20at%22%20" + self.word+ "%20-inurl%3Aphotos%20-inurl%3Aabout%20-inurl%3Aposts%20-inurl%3Aplusones" 24 | except Exception, e: 25 | print e 26 | try: 27 | headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'} 28 | r=requests.get(urly,headers=headers) 29 | except Exception,e: 30 | print e 31 | self.results = r.content 32 | self.totalresults += self.results 33 | 34 | def get_people(self): 35 | rawres = myparser.parser(self.totalresults, self.word) 36 | return rawres.people_googleplus() 37 | 38 | def process(self): 39 | while (self.counter < self.limit): 40 | self.do_search() 41 | self.counter += 100 42 | print "\tSearching " + str(self.counter) + " results.." 43 | -------------------------------------------------------------------------------- /modules/discovery/DNS/lazy.py: -------------------------------------------------------------------------------- 1 | # $Id: lazy.py,v 1.5.2.1 2007/05/22 20:23:38 customdesigned Exp $ 2 | # 3 | # This file is part of the pydns project. 4 | # Homepage: http://pydns.sourceforge.net 5 | # 6 | # This code is covered by the standard Python License. 7 | # 8 | 9 | # routines for lazy people. 10 | import Base 11 | import string 12 | 13 | 14 | def revlookup(name): 15 | "convenience routine for doing a reverse lookup of an address" 16 | if Base.defaults['server'] == []: 17 | Base.DiscoverNameServers() 18 | a = string.split(name, '.') 19 | a.reverse() 20 | b = string.join(a, '.') + '.in-addr.arpa' 21 | # this will only return one of any records returned. 22 | return Base.DnsRequest(b, qtype='ptr').req().answers[0]['data'] 23 | 24 | 25 | def mxlookup(name): 26 | """ 27 | convenience routine for doing an MX lookup of a name. returns a 28 | sorted list of (preference, mail exchanger) records 29 | """ 30 | if Base.defaults['server'] == []: 31 | Base.DiscoverNameServers() 32 | a = Base.DnsRequest(name, qtype='mx').req().answers 33 | l = sorted(map(lambda x: x['data'], a)) 34 | return l 35 | 36 | # 37 | # $Log: lazy.py,v $ 38 | # Revision 1.5.2.1 2007/05/22 20:23:38 customdesigned 39 | # Lazy call to DiscoverNameServers 40 | # 41 | # Revision 1.5 2002/05/06 06:14:38 anthonybaxter 42 | # reformat, move import to top of file. 43 | # 44 | # Revision 1.4 2002/03/19 12:41:33 anthonybaxter 45 | # tabnannied and reindented everything. 4 space indent, no tabs. 46 | # yay. 47 | # 48 | # Revision 1.3 2001/08/09 09:08:55 anthonybaxter 49 | # added identifying header to top of each file 50 | # 51 | # Revision 1.2 2001/07/19 06:57:07 anthony 52 | # cvs keywords added 53 | # 54 | # 55 | -------------------------------------------------------------------------------- /modules/discovery/asksearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | import re 6 | 7 | 8 | class search_ask: 9 | 10 | def __init__(self, word, limit): 11 | self.word = word.replace(' ', '%20') 12 | self.results = "" 13 | self.totalresults = "" 14 | self.server = "www.ask.com" 15 | self.hostname = "www.ask.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 17 | self.quantity = "100" 18 | self.limit = int(limit) 19 | self.counter = 0 20 | 21 | def do_search(self): 22 | h = httplib.HTTP(self.server) 23 | h.putrequest( 24 | 'GET', 25 | "/web?q=%40" + 26 | self.word + 27 | "&pu=100&page=" + 28 | self.counter) 29 | h.putheader('User-agent', self.userAgent) 30 | h.endheaders() 31 | returncode, returnmsg, headers = h.getreply() 32 | self.results = h.getfile().read() 33 | self.totalresults += self.results 34 | 35 | def check_next(self): 36 | renext = re.compile('> Next <') 37 | nextres = renext.findall(self.results) 38 | if nextres != []: 39 | nexty = "1" 40 | else: 41 | nexty = "0" 42 | return nexty 43 | 44 | def get_people(self): 45 | rawres = myparser.parser(self.totalresults, self.word) 46 | return rawres.people_jigsaw() 47 | 48 | def process(self): 49 | while (self.counter < self.limit): 50 | self.do_search() 51 | more = self.check_next() 52 | if more == "1": 53 | self.counter += 100 54 | else: 55 | break 56 | -------------------------------------------------------------------------------- /modules/discovery/DNS/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | # $Id: __init__.py,v 1.8.2.2 2007/05/22 21:06:52 customdesigned Exp $ 3 | # 4 | # This file is part of the pydns project. 5 | # Homepage: http://pydns.sourceforge.net 6 | # 7 | # This code is covered by the standard Python License. 8 | # 9 | 10 | # __init__.py for DNS class. 11 | 12 | __version__ = '2.3.1' 13 | 14 | import Type 15 | import Opcode 16 | import Status 17 | import Class 18 | from Base import DnsRequest, DNSError 19 | from Lib import DnsResult 20 | from Base import * 21 | from Lib import * 22 | Error = DNSError 23 | from lazy import * 24 | Request = DnsRequest 25 | Result = DnsResult 26 | 27 | # 28 | # $Log: __init__.py,v $ 29 | # Revision 1.8.2.2 2007/05/22 21:06:52 customdesigned 30 | # utf-8 in __init__.py 31 | # 32 | # Revision 1.8.2.1 2007/05/22 20:39:20 customdesigned 33 | # Release 2.3.1 34 | # 35 | # Revision 1.8 2002/05/06 06:17:49 anthonybaxter 36 | # found that the old README file called itself release 2.2. So make 37 | # this one 2.3... 38 | # 39 | # Revision 1.7 2002/05/06 06:16:15 anthonybaxter 40 | # make some sort of reasonable version string. releasewards ho! 41 | # 42 | # Revision 1.6 2002/03/19 13:05:02 anthonybaxter 43 | # converted to class based exceptions (there goes the python1.4 compatibility :) 44 | # 45 | # removed a quite gross use of 'eval()'. 46 | # 47 | # Revision 1.5 2002/03/19 12:41:33 anthonybaxter 48 | # tabnannied and reindented everything. 4 space indent, no tabs. 49 | # yay. 50 | # 51 | # Revision 1.4 2001/11/26 17:57:51 stroeder 52 | # Added __version__ 53 | # 54 | # Revision 1.3 2001/08/09 09:08:55 anthonybaxter 55 | # added identifying header to top of each file 56 | # 57 | # Revision 1.2 2001/07/19 06:57:07 anthony 58 | # cvs keywords added 59 | # 60 | # 61 | -------------------------------------------------------------------------------- /modules/discovery/jigsaw.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | import re 6 | # http://www.jigsaw.com/SearchAcrossCompanies.xhtml?opCode=refresh&rpage=4&mode=0&cnCountry=&order=0&orderby=0&cmName=accuvant&cnDead=false&cnExOwned=false&count=0&screenNameType=0&screenName=&omitScreenNameType=0&omitScreenName=&companyId=0&estimatedCount=277&rowsPerPage=50 7 | 8 | 9 | class search_jigsaw: 10 | 11 | def __init__(self, word, limit): 12 | self.word = word.replace(' ', '%20') 13 | self.results = "" 14 | self.totalresults = "" 15 | self.server = "www.jigsaw.com" 16 | self.hostname = "www.jigsaw.com" 17 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 18 | self.quantity = "100" 19 | self.limit = int(limit) 20 | self.counter = 0 21 | 22 | def do_search(self): 23 | h = httplib.HTTP(self.server) 24 | h.putrequest( 25 | 'GET', 26 | "/FreeTextSearch.xhtml?opCode=search&autoSuggested=True&freeText=" + 27 | self.word) 28 | h.putheader('User-agent', self.userAgent) 29 | h.endheaders() 30 | returncode, returnmsg, headers = h.getreply() 31 | self.results = h.getfile().read() 32 | self.totalresults += self.results 33 | 34 | def check_next(self): 35 | renext = re.compile('> Next <') 36 | nextres = renext.findall(self.results) 37 | if nextres != []: 38 | nexty = "1" 39 | else: 40 | nexty = "0" 41 | return nexty 42 | 43 | def get_people(self): 44 | rawres = myparser.parser(self.totalresults, self.word) 45 | return rawres.people_jigsaw() 46 | 47 | def process(self): 48 | while (self.counter < self.limit): 49 | self.do_search() 50 | more = self.check_next() 51 | if more == "1": 52 | self.counter += 100 53 | else: 54 | break 55 | -------------------------------------------------------------------------------- /modules/discovery/shodan/wps.py: -------------------------------------------------------------------------------- 1 | """ 2 | WiFi Positioning System 3 | 4 | Wrappers around the SkyHook and Google Locations APIs to resolve 5 | wireless routers' MAC addresses (BSSID) to physical locations. 6 | """ 7 | try: 8 | from json import dumps, loads 9 | except: 10 | from simplejson import dumps, loads 11 | from urllib2 import Request, urlopen 12 | from urllib import urlencode 13 | 14 | 15 | class Skyhook: 16 | 17 | """Not yet ready for production, use the GoogleLocation class instead.""" 18 | 19 | def __init__(self, username='api', realm='shodan'): 20 | self.username = username 21 | self.realm = realm 22 | self.url = 'https://api.skyhookwireless.com/wps2/location' 23 | 24 | def locate(self, mac): 25 | # Remove the ':' 26 | mac = mac.replace(':', '') 27 | print mac 28 | data = """ 29 | 30 | 31 | 32 | %s 33 | %s 34 | 35 | 36 | 37 | %s 38 | -50 39 | 40 | """ % (self.username, self.realm, mac) 41 | request = Request( 42 | url=self.url, 43 | data=data, 44 | headers={'Content-type': 'text/xml'}) 45 | response = urlopen(request) 46 | result = response.read() 47 | return result 48 | 49 | 50 | class GoogleLocation: 51 | 52 | def __init__(self): 53 | self.url = 'http://www.google.com/loc/json' 54 | 55 | def locate(self, mac): 56 | data = { 57 | 'version': '1.1.0', 58 | 'request_address': True, 59 | 'wifi_towers': [{ 60 | 'mac_address': mac, 61 | 'ssid': 'g', 62 | 'signal_strength': -72 63 | }] 64 | } 65 | response = urlopen(self.url, dumps(data)) 66 | data = response.read() 67 | return loads(data) 68 | -------------------------------------------------------------------------------- /modules/goofile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Goofile v1.5 3 | # My Website: http://www.g13net.com 4 | # Project Page: http://code.google.com/p/goofile 5 | # 6 | # TheHarvester used for inspiration 7 | # A many thanks to the Edge-Security team! 8 | # 9 | 10 | import string 11 | import httplib 12 | import sys 13 | import re 14 | import getopt 15 | 16 | global result 17 | result =[] 18 | 19 | def usage(): 20 | print "Goofile 1.5\n" 21 | print "usage: goofile options \n" 22 | print " -d: domain to search\n" 23 | print " -f: filetype (ex. pdf)\n" 24 | print "example:./goofile.py -d test.com -f txt\n" 25 | sys.exit() 26 | 27 | def run(dmn,file): 28 | 29 | h = httplib.HTTP('www.google.com') 30 | h.putrequest('GET',"/search?num=500&q=site:"+dmn+"+filetype:"+file) 31 | h.putheader('Host', 'www.google.com') 32 | h.putheader('User-agent', 'Internet Explorer 6.0 ') 33 | h.putheader('Referrer', 'www.g13net.com') 34 | h.endheaders() 35 | returncode, returnmsg, headers = h.getreply() 36 | data=h.getfile().read() 37 | data=re.sub('','',data) 38 | for e in ('>','=','<','\\','(',')','"','http',':','//'): 39 | data = string.replace(data,e,' ') 40 | r1 = re.compile('[-_.a-zA-Z0-9.-_]*'+'\.'+file) 41 | res = r1.findall(data) 42 | return res 43 | 44 | 45 | def search(argv): 46 | global limit 47 | limit = 100 48 | if len(sys.argv) < 2: 49 | usage() 50 | try : 51 | opts, args = getopt.getopt(argv,"d:f:") 52 | 53 | except getopt.GetoptError: 54 | usage() 55 | sys.exit() 56 | 57 | for opt,arg in opts : 58 | if opt == '-f' : 59 | file=arg 60 | elif opt == '-d': 61 | dmn=arg 62 | 63 | print "Searching in "+dmn+" for "+ file 64 | print "========================================" 65 | 66 | 67 | cant = 0 68 | 69 | while cant < limit: 70 | res = run(dmn,file) 71 | for x in res: 72 | if result.count(x) == 0: 73 | result.append(x) 74 | cant+=100 75 | 76 | 77 | print "\nFiles found:" 78 | print "====================\n" 79 | t=0 80 | if result==[]: 81 | print "No results were found" 82 | else: 83 | for x in result: 84 | x= re.sub('
  • ','',x) 85 | x= re.sub('
  • ','',x) 86 | print x 87 | t+=1 88 | print "====================\n" 89 | 90 | 91 | if __name__ == "__main__": 92 | try: search(sys.argv[1:]) 93 | except KeyboardInterrupt: 94 | print "Search interrupted by user.." 95 | except: 96 | sys.exit() 97 | -------------------------------------------------------------------------------- /modules/discovery/DNS/Status.py: -------------------------------------------------------------------------------- 1 | """ 2 | $Id: Status.py,v 1.7 2002/04/23 12:52:19 anthonybaxter Exp $ 3 | 4 | This file is part of the pydns project. 5 | Homepage: http://pydns.sourceforge.net 6 | 7 | This code is covered by the standard Python License. 8 | 9 | Status values in message header 10 | """ 11 | 12 | NOERROR = 0 # No Error [RFC 1035] 13 | FORMERR = 1 # Format Error [RFC 1035] 14 | SERVFAIL = 2 # Server Failure [RFC 1035] 15 | NXDOMAIN = 3 # Non-Existent Domain [RFC 1035] 16 | NOTIMP = 4 # Not Implemented [RFC 1035] 17 | REFUSED = 5 # Query Refused [RFC 1035] 18 | YXDOMAIN = 6 # Name Exists when it should not [RFC 2136] 19 | YXRRSET = 7 # RR Set Exists when it should not [RFC 2136] 20 | NXRRSET = 8 # RR Set that should exist does not [RFC 2136] 21 | NOTAUTH = 9 # Server Not Authoritative for zone [RFC 2136] 22 | NOTZONE = 10 # Name not contained in zone [RFC 2136] 23 | BADVERS = 16 # Bad OPT Version [RFC 2671] 24 | BADSIG = 16 # TSIG Signature Failure [RFC 2845] 25 | BADKEY = 17 # Key not recognized [RFC 2845] 26 | BADTIME = 18 # Signature out of time window [RFC 2845] 27 | BADMODE = 19 # Bad TKEY Mode [RFC 2930] 28 | BADNAME = 20 # Duplicate key name [RFC 2930] 29 | BADALG = 21 # Algorithm not supported [RFC 2930] 30 | 31 | # Construct reverse mapping dictionary 32 | 33 | _names = dir() 34 | statusmap = {} 35 | for _name in _names: 36 | if _name[0] != '_': 37 | statusmap[eval(_name)] = _name 38 | 39 | 40 | def statusstr(status): 41 | if status in statusmap: 42 | return statusmap[status] 43 | else: 44 | return repr(status) 45 | 46 | # 47 | # $Log: Status.py,v $ 48 | # Revision 1.7 2002/04/23 12:52:19 anthonybaxter 49 | # cleanup whitespace. 50 | # 51 | # Revision 1.6 2002/04/23 10:57:57 anthonybaxter 52 | # update to complete the list of response codes. 53 | # 54 | # Revision 1.5 2002/03/19 12:41:33 anthonybaxter 55 | # tabnannied and reindented everything. 4 space indent, no tabs. 56 | # yay. 57 | # 58 | # Revision 1.4 2002/03/19 12:26:13 anthonybaxter 59 | # death to leading tabs. 60 | # 61 | # Revision 1.3 2001/08/09 09:08:55 anthonybaxter 62 | # added identifying header to top of each file 63 | # 64 | # Revision 1.2 2001/07/19 06:57:07 anthony 65 | # cvs keywords added 66 | # 67 | # 68 | -------------------------------------------------------------------------------- /modules/sub.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # -*- coding: utf-8 -*- 3 | 4 | # _____ _ _ _ 5 | #/ ___| | | | | (_) 6 | #\ `--. _ _| |__ __| | ___ _ __ ___ __ _ _ _ __ 7 | # `--. \ | | | '_ \ / _` |/ _ \| '_ ` _ \ / _` | | '_ \ 8 | #/\__/ / |_| | |_) | (_| | (_) | | | | | | (_| | | | | | 9 | #\____/ \__,_|_.__/ \__,_|\___/|_| |_| |_|\__,_|_|_| |_| 10 | # author : joker-Security 11 | # WEBSITE : http://dev-labs.co 12 | # GITHUB : https://github.com/joker25000 13 | # Twitter : https://twitter.com/SecurityJoker 14 | # YOUTUBE : https://www.youtube.com/c/Professionalhacker25 15 | # FACE Pg : https://facebook.com/kali.linux.pentesting.tutorials 16 | 17 | #Module dependencies 18 | import json 19 | import requests 20 | import argparse 21 | from urlparse import urlparse 22 | 23 | #Colors 24 | class colors(): 25 | bleu = "\033[94m" 26 | rouge= "\033[91m" 27 | 28 | def results(algeria): 29 | with open ('resultat.txt','w') as k: 30 | json.dump(algeria,k) 31 | parser = argparse.ArgumentParser(description='Simple Script Python For Searching Subdomain And Domain ') 32 | parser.add_argument('-t','--targets', action='store',help='Your Target Choice',required=True) 33 | parser.add_argument('-l','--language', help='Your Language Of Searching \n\n\t(fr)-French\n\t(en)-English', required=False) 34 | kader = parser.parse_args() 35 | targets=kader.targets 36 | language=kader.language 37 | if language is None: 38 | language="en" 39 | if ((language != "fr") and (language !="en")): 40 | print "\033[91mThis Language not available" 41 | exit(1) 42 | link="https://sedo.com/service/common.php?safe_search=2&synonyms=true&number_of_words_min=1&number_of_words_max=0&len_min=1&len_max=0&special_characters%5B%5D=3&special_characters%5B%5D=1&special_characters%5B%5D=2&cat%5B%5D=0&cat%5B%5D=0&cat%5B%5D=0&type=0&special_inventory=4&kws=contains&age_min=0&age_max=0&keyword="+targets+"&page=1&rel=6&orderdirection=2&domainIds=&cc=&member=&v=0.1&o=json&m=search&f=requestSearch&pagesize=100&keywords_join=AND&language="+language 43 | joker=requests.get(link) 44 | sanfour=joker.text 45 | algeria = json.loads(sanfour) 46 | 47 | name=[] 48 | print "Your Target Choice :\033[91m"+targets 49 | for dz in algeria['b']['general']['searchRequest']['resultList']: 50 | if dz != None: 51 | print dz['0'] 52 | name.append(dz['0']) 53 | results(name) 54 | -------------------------------------------------------------------------------- /modules/discovery/yandexsearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | import re 6 | import time 7 | 8 | 9 | class search_yandex: 10 | 11 | def __init__(self, word, limit, start): 12 | self.word = word 13 | self.results = "" 14 | self.totalresults = "" 15 | self.server = "yandex.com" 16 | self.hostname = "yandex.com" 17 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 18 | self.limit = limit 19 | self.counter = start 20 | 21 | def do_search(self): 22 | h = httplib.HTTP(self.server) 23 | h.putrequest('GET', "/search?text=%40" + self.word + 24 | "&numdoc=50&lr=" + str(self.counter)) 25 | h.putheader('Host', self.hostname) 26 | h.putheader('User-agent', self.userAgent) 27 | h.endheaders() 28 | returncode, returnmsg, headers = h.getreply() 29 | self.results = h.getfile().read() 30 | self.totalresults += self.results 31 | print self.results 32 | 33 | def do_search_files(self, files): # TODO 34 | h = httplib.HTTP(self.server) 35 | h.putrequest('GET', "/search?text=%40" + self.word + 36 | "&numdoc=50&lr=" + str(self.counter)) 37 | h.putheader('Host', self.hostname) 38 | h.putheader('User-agent', self.userAgent) 39 | h.endheaders() 40 | returncode, returnmsg, headers = h.getreply() 41 | self.results = h.getfile().read() 42 | self.totalresults += self.results 43 | 44 | def check_next(self): 45 | renext = re.compile('topNextUrl') 46 | nextres = renext.findall(self.results) 47 | if nextres != []: 48 | nexty = "1" 49 | print str(self.counter) 50 | else: 51 | nexty = "0" 52 | return nexty 53 | 54 | def get_emails(self): 55 | rawres = myparser.parser(self.totalresults, self.word) 56 | return rawres.emails() 57 | 58 | def get_hostnames(self): 59 | rawres = myparser.parser(self.totalresults, self.word) 60 | return rawres.hostnames() 61 | 62 | def get_files(self): 63 | rawres = myparser.parser(self.totalresults, self.word) 64 | return rawres.fileurls(self.files) 65 | 66 | def process(self): 67 | while self.counter <= self.limit: 68 | self.do_search() 69 | self.counter += 50 70 | print "Searching " + str(self.counter) + " results..." 71 | 72 | def process_files(self, files): 73 | while self.counter < self.limit: 74 | self.do_search_files(files) 75 | time.sleep(0.3) 76 | self.counter += 50 77 | -------------------------------------------------------------------------------- /modules/discovery/googlesearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import sys 3 | import myparser 4 | import re 5 | import time 6 | import requests 7 | 8 | 9 | class search_google: 10 | 11 | def __init__(self, word, limit, start): 12 | self.word = word 13 | self.results = "" 14 | self.totalresults = "" 15 | self.server = "www.google.com" 16 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 17 | self.quantity = "100" 18 | self.limit = limit 19 | self.counter = start 20 | 21 | def do_search(self): 22 | try: 23 | urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=%40\"" + self.word + "\"" 24 | except Exception, e: 25 | print e 26 | try: 27 | r=requests.get(urly) 28 | except Exception,e: 29 | print e 30 | self.results = r.content 31 | self.totalresults += self.results 32 | 33 | 34 | def do_search_profiles(self): 35 | try: 36 | urly="http://" + self.server + "/search?num=" + self.quantity + "&start=" + str(self.counter) + "&hl=en&meta=&q=site:www.google.com%20intitle:\"Google%20Profile\"%20\"Companies%20I%27ve%20worked%20for\"%20\"at%20" + self.word + "\"" 37 | except Exception, e: 38 | print e 39 | try: 40 | r=requests.get(urly) 41 | except Exception,e: 42 | print e 43 | self.results = r.content 44 | 45 | #'&hl=en&meta=&q=site:www.google.com%20intitle:"Google%20Profile"%20"Companies%20I%27ve%20worked%20for"%20"at%20' + self.word + '"') 46 | self.totalresults += self.results 47 | 48 | def get_emails(self): 49 | rawres = myparser.parser(self.totalresults, self.word) 50 | return rawres.emails() 51 | 52 | def get_hostnames(self): 53 | rawres = myparser.parser(self.totalresults, self.word) 54 | return rawres.hostnames() 55 | 56 | def get_files(self): 57 | rawres = myparser.parser(self.totalresults, self.word) 58 | return rawres.fileurls(self.files) 59 | 60 | def get_profiles(self): 61 | rawres = myparser.parser(self.totalresults, self.word) 62 | return rawres.profiles() 63 | 64 | def process(self): 65 | while self.counter <= self.limit and self.counter <= 1000: 66 | self.do_search() 67 | #more = self.check_next() 68 | time.sleep(1) 69 | self.counter += 100 70 | 71 | 72 | def process_profiles(self): 73 | while self.counter < self.limit: 74 | self.do_search_profiles() 75 | time.sleep(0.3) 76 | self.counter += 100 -------------------------------------------------------------------------------- /modules/discovery/exaleadsearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | import re 6 | import time 7 | 8 | 9 | class search_exalead: 10 | 11 | def __init__(self, word, limit, start): 12 | self.word = word 13 | self.files = "pdf" 14 | self.results = "" 15 | self.totalresults = "" 16 | self.server = "www.exalead.com" 17 | self.hostname = "www.exalead.com" 18 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/4.0" 19 | self.limit = limit 20 | self.counter = start 21 | 22 | def do_search(self): 23 | h = httplib.HTTP(self.server) 24 | h.putrequest('GET', "/search/web/results/?q=%40" + self.word + 25 | "&elements_per_page=50&start_index=" + str(self.counter)) 26 | h.putheader('Host', self.hostname) 27 | h.putheader( 28 | 'Referer', 29 | "http://" + 30 | self.hostname + 31 | "/search/web/results/?q=%40" + 32 | self.word) 33 | h.putheader('User-agent', self.userAgent) 34 | h.endheaders() 35 | returncode, returnmsg, headers = h.getreply() 36 | self.results = h.getfile().read() 37 | self.totalresults += self.results 38 | 39 | def do_search_files(self, files): 40 | h = httplib.HTTP(self.server) 41 | h.putrequest( 42 | 'GET', 43 | "search/web/results/?q=" + 44 | self.word + 45 | "filetype:" + 46 | self.files + 47 | "&elements_per_page=50&start_index=" + 48 | self.counter) 49 | h.putheader('Host', self.hostname) 50 | h.putheader('User-agent', self.userAgent) 51 | h.endheaders() 52 | returncode, returnmsg, headers = h.getreply() 53 | self.results = h.getfile().read() 54 | self.totalresults += self.results 55 | 56 | def check_next(self): 57 | renext = re.compile('topNextUrl') 58 | nextres = renext.findall(self.results) 59 | if nextres != []: 60 | nexty = "1" 61 | print str(self.counter) 62 | else: 63 | nexty = "0" 64 | return nexty 65 | 66 | def get_emails(self): 67 | rawres = myparser.parser(self.totalresults, self.word) 68 | return rawres.emails() 69 | 70 | def get_hostnames(self): 71 | rawres = myparser.parser(self.totalresults, self.word) 72 | return rawres.hostnames() 73 | 74 | def get_files(self): 75 | rawres = myparser.parser(self.totalresults, self.word) 76 | return rawres.fileurls(self.files) 77 | 78 | def process(self): 79 | while self.counter <= self.limit: 80 | self.do_search() 81 | self.counter += 50 82 | 83 | def process_files(self, files): 84 | while self.counter < self.limit: 85 | self.do_search_files(files) 86 | time.sleep(1) 87 | more = self.check_next() 88 | if more == "1": 89 | self.counter += 50 90 | else: 91 | break -------------------------------------------------------------------------------- /modules/discovery/bingsearch.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | import re 6 | import time 7 | 8 | 9 | class search_bing: 10 | 11 | def __init__(self, word, limit, start): 12 | self.word = word.replace(' ', '%20') 13 | self.results = "" 14 | self.totalresults = "" 15 | self.server = "www.bing.com" 16 | self.apiserver = "api.search.live.net" 17 | self.hostname = "www.bing.com" 18 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 19 | self.quantity = "50" 20 | self.limit = int(limit) 21 | self.bingApi = "" 22 | self.counter = start 23 | 24 | def do_search(self): 25 | h = httplib.HTTP(self.server) 26 | h.putrequest('GET', "/search?q=%40" + self.word + 27 | "&count=50&first=" + str(self.counter)) 28 | h.putheader('Host', self.hostname) 29 | h.putheader('Cookie', 'SRCHHPGUSR=ADLT=DEMOTE&NRSLT=50') 30 | h.putheader('Accept-Language', 'en-us,en') 31 | h.putheader('User-agent', self.userAgent) 32 | h.endheaders() 33 | returncode, returnmsg, headers = h.getreply() 34 | self.results = h.getfile().read() 35 | self.totalresults += self.results 36 | 37 | def do_search_api(self): 38 | h = httplib.HTTP(self.apiserver) 39 | h.putrequest('GET', "/xml.aspx?Appid=" + self.bingApi + "&query=%40" + 40 | self.word + "&sources=web&web.count=40&web.offset=" + str(self.counter)) 41 | h.putheader('Host', "api.search.live.net") 42 | h.putheader('User-agent', self.userAgent) 43 | h.endheaders() 44 | returncode, returnmsg, headers = h.getreply() 45 | self.results = h.getfile().read() 46 | self.totalresults += self.results 47 | 48 | def do_search_vhost(self): 49 | h = httplib.HTTP(self.server) 50 | h.putrequest('GET', "/search?q=ip:" + self.word + 51 | "&go=&count=50&FORM=QBHL&qs=n&first=" + str(self.counter)) 52 | h.putheader('Host', self.hostname) 53 | h.putheader( 54 | 'Cookie', 'mkt=en-US;ui=en-US;SRCHHPGUSR=NEWWND=0&ADLT=DEMOTE&NRSLT=50') 55 | h.putheader('Accept-Language', 'en-us,en') 56 | h.putheader('User-agent', self.userAgent) 57 | h.endheaders() 58 | returncode, returnmsg, headers = h.getreply() 59 | self.results = h.getfile().read() 60 | self.totalresults += self.results 61 | 62 | def get_emails(self): 63 | rawres = myparser.parser(self.totalresults, self.word) 64 | return rawres.emails() 65 | 66 | def get_hostnames(self): 67 | rawres = myparser.parser(self.totalresults, self.word) 68 | return rawres.hostnames() 69 | 70 | def get_allhostnames(self): 71 | rawres = myparser.parser(self.totalresults, self.word) 72 | return rawres.hostnames_all() 73 | 74 | def process(self, api): 75 | if api == "yes": 76 | if self.bingApi == "": 77 | print "Please insert your API key in the discovery/bingsearch.py" 78 | sys.exit() 79 | while (self.counter < self.limit): 80 | if api == "yes": 81 | self.do_search_api() 82 | time.sleep(0.3) 83 | else: 84 | self.do_search() 85 | time.sleep(1) 86 | self.counter += 50 87 | 88 | def process_vhost(self): 89 | # Maybe it is good to use other limit for this. 90 | while (self.counter < self.limit): 91 | self.do_search_vhost() 92 | self.counter += 50 93 | -------------------------------------------------------------------------------- /modules/discovery/googleCSE.py: -------------------------------------------------------------------------------- 1 | import string 2 | import httplib 3 | import sys 4 | import myparser 5 | import re 6 | import time 7 | 8 | 9 | class search_googleCSE: 10 | 11 | def __init__(self, word, limit, start): 12 | self.word = word 13 | self.files = "pdf" 14 | self.results = "" 15 | self.totalresults = "" 16 | self.server = "www.googleapis.com" 17 | self.hostname = "www.googleapis.com" 18 | self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6" 19 | self.quantity = "10" 20 | self.limit = limit 21 | self.counter = 1 22 | self.api_key = "" 23 | self.cse_id = "" 24 | self.lowRange = start 25 | self.highRange = start+100 26 | 27 | def do_search(self): 28 | h = httplib.HTTPS(self.server) 29 | h.putrequest('GET', "/customsearch/v1?key=" + self.api_key +"&highRange=" + str(self.highRange) + "&lowRange=" + str(self.lowRange) + "&cx=" +self.cse_id + 30 | "&start=" + str(self.counter) + "&q=%40\"" + self.word + "\"") 31 | h.putheader('Host', self.server) 32 | h.putheader('User-agent', self.userAgent) 33 | h.endheaders() 34 | returncode, returnmsg, headers = h.getreply() 35 | self.results = h.getfile().read() 36 | self.totalresults += self.results 37 | 38 | def do_search_files(self): 39 | h = httplib.HTTPS(self.server) 40 | h.putrequest('GET', "/customsearch/v1?key=" + self.api_key +"&highRange=" + str(self.highRange) + "&lowRange=" + str(self.lowRange) + "&cx=" +self.cse_id + 41 | "&start=" + str(self.counter) + "&q=filetype:" + files +"%20site:" + self.word) 42 | h.putheader('Host', self.server) 43 | h.putheader('User-agent', self.userAgent) 44 | h.endheaders() 45 | returncode, returnmsg, headers = h.getreply() 46 | self.results = h.getfile().read() 47 | self.totalresults += self.results 48 | 49 | 50 | def check_next(self): 51 | renext = re.compile('> Next <') 52 | nextres = renext.findall(self.results) 53 | if nextres != []: 54 | nexty = "1" 55 | else: 56 | nexty = "0" 57 | return nexty 58 | 59 | def get_emails(self): 60 | rawres = myparser.parser(self.totalresults, self.word) 61 | return rawres.emails() 62 | 63 | def get_hostnames(self): 64 | rawres = myparser.parser(self.totalresults, self.word) 65 | return rawres.hostnames() 66 | 67 | def get_files(self): 68 | rawres = myparser.parser(self.totalresults, self.word) 69 | return rawres.fileurls(self.files) 70 | 71 | 72 | def process(self): 73 | tracker=self.counter + self.lowRange 74 | while tracker <= self.limit: 75 | self.do_search() 76 | #time.sleep(1) 77 | ESC=chr(27) 78 | sys.stdout.write(ESC + '[2K' + ESC+'[G') 79 | sys.stdout.write("\r\t" + "Searching " + str(self.counter+self.lowRange) + " results ..." ) 80 | sys.stdout.flush() 81 | #print "\tSearching " + str(self.counter+self.lowRange) + " results...\t\t\t\t\t\r" 82 | if self.counter == 101: 83 | self.counter = 1 84 | self.lowRange +=100 85 | self.highRange +=100 86 | else: 87 | self.counter += 10 88 | tracker=self.counter + self.lowRange 89 | 90 | def store_results(self): 91 | filename = "debug_results.txt" 92 | file = open(filename, 'w') 93 | file.write(self.totalresults) 94 | 95 | 96 | def process_files(self, files): 97 | while self.counter <= self.limit: 98 | self.do_search_files(files) 99 | time.sleep(1) 100 | self.counter += 100 101 | print "\tSearching " + str(self.counter) + " results..." 102 | -------------------------------------------------------------------------------- /modules/discovery/dnssearch-threads.py: -------------------------------------------------------------------------------- 1 | import IPy 2 | import DNS 3 | import string 4 | import socket 5 | import sys 6 | 7 | 8 | class dns_reverse(): 9 | 10 | def __init__(self, range, verbose=True): 11 | self.range = range 12 | self.iplist = '' 13 | self.results = [] 14 | self.verbose = verbose 15 | try: 16 | DNS.ParseResolvConf("/etc/resolv.conf") 17 | nameserver = DNS.defaults['server'][0] 18 | except: 19 | print "Error in DNS resolvers" 20 | sys.exit() 21 | 22 | def run(self, host): 23 | a = string.split(host, '.') 24 | a.reverse() 25 | b = string.join(a, '.') + '.in-addr.arpa' 26 | nameserver = DNS.defaults['server'][0] 27 | if self.verbose: 28 | ESC = chr(27) 29 | sys.stdout.write(ESC + '[2K' + ESC + '[G') 30 | sys.stdout.write("\r" + host) 31 | sys.stdout.flush() 32 | try: 33 | name = DNS.Base.DnsRequest(b, qtype='ptr').req().answers[0]['data'] 34 | return host + ":" + name 35 | except: 36 | pass 37 | 38 | def get_ip_list(self, ips): 39 | """Generates the list of ips to reverse""" 40 | try: 41 | list = IPy.IP(ips) 42 | except: 43 | print "Error in IP format, check the input and try again. (Eg. 192.168.1.0/24)" 44 | sys.exit() 45 | name = [] 46 | for x in list: 47 | name.append(str(x)) 48 | return name 49 | 50 | def list(self): 51 | self.iplist = self.get_ip_list(self.range) 52 | return self.iplist 53 | 54 | def process(self): 55 | for x in self.iplist: 56 | host = self.run(x) 57 | if host is not None: 58 | self.results.append(host) 59 | return self.results 60 | 61 | 62 | class dns_force(): 63 | 64 | def __init__(self, domain, dnsserver, verbose=False): 65 | self.domain = domain 66 | self.server = dnsserver 67 | self.file = "dns-names.txt" 68 | self.subdo = False 69 | self.verbose = verbose 70 | try: 71 | f = open(self.file, "r") 72 | except: 73 | print "Error opening dns dictionary file" 74 | sys.exit() 75 | self.list = f.readlines() 76 | 77 | def getdns(self, domain): 78 | DNS.ParseResolvConf("/etc/resolv.conf") 79 | nameserver = DNS.defaults['server'][0] 80 | dom = domain 81 | if self.subdo == True: 82 | dom = domain.split(".") 83 | dom.pop(0) 84 | rootdom = ".".join(dom) 85 | else: 86 | rootdom = dom 87 | if self.server == False: 88 | r = DNS.Request(rootdom, qtype='SOA').req() 89 | primary, email, serial, refresh, retry, expire, minimum = r.answers[ 90 | 0]['data'] 91 | test = DNS.Request(rootdom, qtype='NS', server=primary, aa=1).req() 92 | if test.header['status'] != "NOERROR": 93 | print "Error" 94 | sys.exit() 95 | self.nameserver = test.answers[0]['data'] 96 | return self.nameserver 97 | 98 | def run(self, host): 99 | self.nameserver = self.getdns(self.domain) 100 | hostname = str(host.split("\n")[0]) + "." + str(self.domain) 101 | # nameserver=DNS.defaults['server'][0] 102 | if self.verbose: 103 | ESC = chr(27) 104 | sys.stdout.write(ESC + '[2K' + ESC + '[G') 105 | sys.stdout.write("\r" + hostname) 106 | sys.stdout.flush() 107 | try: 108 | test = DNS.Request( 109 | hostname, 110 | qtype='a', 111 | server=self.nameserver).req( 112 | ) 113 | hostip = test.answers[0]['data'] 114 | return hostip + ":" + hostname 115 | except Exception as e: 116 | pass 117 | 118 | def process(self): 119 | results = [] 120 | for x in self.list: 121 | host = self.run(x) 122 | if host is not None: 123 | results.append(host) 124 | return results 125 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

    Magnifier


    2 |

    3 | 4 | GitPoint 5 | 6 |

    7 | 8 |

    9 | A python tool for information gathering information 💁‍♀️ 10 |

    11 | 12 | 13 | 14 | ## Table of Contents 15 | 16 | - [Introduction](#introduction) 17 | - [Features](#features) 18 | - [Dependencies](#dependencies) 19 | - [Installation](#installation) 20 | - [Feedback](#feedback) 21 | 22 | 23 | 24 | 25 | 26 | ## Introduction 27 |

    Welcome

    28 |

    29 | Magnifier is used as an information gathering tools. Magnifier is used to scan websites for information gathering and finding vulnerabilities in websites and webapps. Magnifier is one of the easiest and useful tool for performing reconnaissance on websites and web apps. The Magnifier tool is also available for Linux, window, and android phones (termux) that are coded in both bash and python languages. Magnifier interface is very similar to Metasploit 1 and Metasploit. Magnifier provides a command-line interface that you can run on Kali Linux. This tool can be used to get information about our target(domain). We can target any domain using Magnifier. The interactive console provides a number of helpful features, such as command completion and contextual help. This tool is written in python language.You must have Python installed in your Kali Linux to use this tool. Magnifier can detect WordPress, Drupal, Joomla, and Magento CMS, WordPress sensitive files, and WordPress version-related vulnerabilities. Magnifier uses different modules for doing all the scanning. The whois data collection gives us information about Geoip lookup, Banner grabbing, DNS lookup, port scanning, sub-domain information, reverse IP, and MX records lookup. Overall Magnifier is a vulnerability Scanner. Magnifier has the 30 | following modules DNS Lookup, WHOIS lookup, GEO-Lookup, Subnet lookup, port scanner, Links extractor, etc. Magnifier can detect closed and open ports of networks.Magnifier also called a complete package of Information gathering tools. 31 | 32 |

    33 | 34 |

    35 | 36 | ### Features 37 | 1. DNS Lookup 38 | 2. Whois Lookup 39 | 3. GeoIP Lookup 40 | 4. Subnet Lookup 41 | 5. Port Scanner 42 | 6. Extract Links 43 | 7. Zone Transfer 44 | 8. HTTP Header 45 | 9. Host Finder 46 | 10. Robots.txt 47 | 11. IP-Locator 48 | 12. Traceroute Host DNS Finder 49 | 13. Revrse IP Lookup 50 | 14. Collection Email 51 | 15. Subdomain Finder 52 |

    53 | 54 |

    55 | 56 | #### Installation 57 | For installation kindly follow the given instructions 👇 58 | ```linux 59 | apt update 60 | apt upgrade 61 | apt install git 62 | apt install python2 63 | git clone https://github.com/TheEyeOfCyber/Magnifier 64 | ``` 65 | > How to execute??? 66 | 67 | ```console 68 | cd Magnifier 69 | python2 magnifier.py 70 | ``` 71 | 72 | ### Dependencies 73 | * LINUX 74 | * PYTHON 75 | * LIBRARY WE USE (urllib2, platform) 76 | 77 | Urllib2 : urllib2 is a Python module that can be used for fetching URLs. It defines functions and 78 | classes to help with URL actions (basic and digest. authentication, redirections, cookies, etc) 79 |
    Platform: used to access the underlying platform's data, such as, hardware, operating system, and 80 | interpreter version information. 81 | * STATEMENT WE USE (if else) 82 | 83 | ### Feedback 84 | Feel free to send us feedback on [Twitter](https://twitter.com/theeyeofcyber) or [Facebook](https://facebook.com/theeyeofcyber). Feature requests are always welcome. If you wish to contribute, please take a quick look at the [guidelines](./CONTRIBUTING.md)! 85 | 86 | YOUTUBE : https://www.youtube.com/channel/UCEkclbUf3LMNkVOJ9_ln-mA 87 |
    FACEBOOK PAGE : https://www.facebook.com/theeyeofcyber1 88 | Tested On : Windows / Linux / Android Phone (Termux No root) 89 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /modules/theHarvester.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import string 4 | import httplib 5 | import sys 6 | import os 7 | from socket import * 8 | import re 9 | import getopt 10 | import requests 11 | 12 | from discovery import * 13 | from lib import htmlExport 14 | from lib import hostchecker 15 | 16 | def usage(): 17 | 18 | comm = os.path.basename(sys.argv[0]) 19 | 20 | if os.path.dirname(sys.argv[0]) == os.getcwd(): 21 | comm = "./" + comm 22 | 23 | 24 | def start(argv): 25 | if len(sys.argv) < 4: 26 | sys.exit() 27 | try: 28 | opts, args = getopt.getopt(argv, "l:d:b:s:vf:nhcte:") 29 | except getopt.GetoptError: 30 | sys.exit() 31 | start = 0 32 | host_ip = [] 33 | filename = "" 34 | bingapi = "yes" 35 | dnslookup = False 36 | dnsbrute = False 37 | dnstld = False 38 | shodan = False 39 | vhost = [] 40 | virtual = False 41 | limit = 100 42 | dnsserver = "" 43 | for opt, arg in opts: 44 | if opt == '-l': 45 | limit = int(arg) 46 | elif opt == '-d': 47 | word = arg 48 | elif opt == '-s': 49 | start = int(arg) 50 | elif opt == '-v': 51 | virtual = "basic" 52 | elif opt == '-b': 53 | engine = arg 54 | if engine not in ("baidu", "bing", "crtsh","bingapi","dogpile", "google", "googleCSE","virustotal", "googleplus", "google-profiles","linkedin", "pgp", "twitter", "vhost", "yahoo","netcraft","all"): 55 | usage() 56 | sys.exit() 57 | else: 58 | pass 59 | if engine == "all": 60 | all_emails = [] 61 | all_hosts = [] 62 | virtual = "basic" 63 | print '\033[1;97m[>]\033[1;m Initiating 3 intel modules' 64 | 65 | print "\033[1;97m[>]\033[1;m Loading Alpha module (1/3)" 66 | search = googlesearch.search_google(word, limit, start) 67 | search.process() 68 | emails = search.get_emails() 69 | hosts = search.get_hostnames() 70 | all_emails.extend(emails) 71 | all_hosts.extend(hosts) 72 | 73 | print "\033[1;97m[>]\033[1;m Beta module deployed (2/3)" 74 | bingapi = "no" 75 | search = bingsearch.search_bing(word, limit, start) 76 | search.process(bingapi) 77 | emails = search.get_emails() 78 | hosts = search.get_hostnames() 79 | all_hosts.extend(hosts) 80 | all_emails.extend(emails) 81 | 82 | print "\033[1;97m[>]\033[1;m Gamma module initiated (3/3)" 83 | search = exaleadsearch.search_exalead(word, limit, start) 84 | search.process() 85 | emails = search.get_emails() 86 | hosts = search.get_hostnames() 87 | all_hosts.extend(hosts) 88 | all_emails.extend(emails) 89 | 90 | #Clean up email list, sort and uniq 91 | all_emails=sorted(set(all_emails)) 92 | #Results############################################################ 93 | if all_emails == []: 94 | print "No emails found" 95 | else: 96 | print "\n\n[+] Emails found:" 97 | print "------------------" 98 | print "\n".join(all_emails) 99 | 100 | if all_hosts == []: 101 | print "No hosts found" 102 | else: 103 | print "\n[+] Hosts found in search engines:" 104 | print "------------------------------------" 105 | all_hosts=sorted(set(all_hosts)) 106 | print "[-] Resolving hostnames IPs... " 107 | full_host = hostchecker.Checker(all_hosts) 108 | full = full_host.check() 109 | for host in full: 110 | ip = host.split(':')[0] 111 | print host 112 | if host_ip.count(ip.lower()): 113 | pass 114 | else: 115 | host_ip.append(ip.lower()) 116 | 117 | #Virtual hosts search############################################### 118 | if virtual == "basic": 119 | print "[+] Virtual hosts:" 120 | print "-----------------" 121 | for l in host_ip: 122 | search = bingsearch.search_bing(l, limit, start) 123 | search.process_vhost() 124 | res = search.get_allhostnames() 125 | for x in res: 126 | x = re.sub(r'[[\<\/?]*[\w]*>]*','',x) 127 | x = re.sub('<','',x) 128 | x = re.sub('>','',x) 129 | print l + "\t" + x 130 | vhost.append(l + ":" + x) 131 | full.append(l + ":" + x) 132 | vhost=sorted(set(vhost)) 133 | else: 134 | pass 135 | if __name__ == "__main__": 136 | try: 137 | start(sys.argv[1:]) 138 | except KeyboardInterrupt: 139 | print "Search interrupted by user.." 140 | except: 141 | sys.exit() 142 | -------------------------------------------------------------------------------- /modules/discovery/DNS/win32dns.py: -------------------------------------------------------------------------------- 1 | """ 2 | $Id: win32dns.py,v 1.3.2.1 2007/05/22 20:26:49 customdesigned Exp $ 3 | 4 | Extract a list of TCP/IP name servers from the registry 0.1 5 | 0.1 Strobl 2001-07-19 6 | Usage: 7 | RegistryResolve() returns a list of ip numbers (dotted quads), by 8 | scouring the registry for addresses of name servers 9 | 10 | Tested on Windows NT4 Server SP6a, Windows 2000 Pro SP2 and 11 | Whistler Pro (XP) Build 2462 and Windows ME 12 | ... all having a different registry layout wrt name servers :-/ 13 | 14 | Todo: 15 | 16 | Program doesn't check whether an interface is up or down 17 | 18 | (c) 2001 Copyright by Wolfgang Strobl ws@mystrobl.de, 19 | License analog to the current Python license 20 | """ 21 | 22 | import string 23 | import re 24 | import _winreg 25 | 26 | 27 | def binipdisplay(s): 28 | "convert a binary array of ip adresses to a python list" 29 | if len(s) % 4 != 0: 30 | raise EnvironmentError # well ... 31 | ol = [] 32 | for i in range(len(s) / 4): 33 | s1 = s[:4] 34 | s = s[4:] 35 | ip = [] 36 | for j in s1: 37 | ip.append(str(ord(j))) 38 | ol.append(string.join(ip, '.')) 39 | return ol 40 | 41 | 42 | def stringdisplay(s): 43 | '''convert "d.d.d.d,d.d.d.d" to ["d.d.d.d","d.d.d.d"]. 44 | also handle u'd.d.d.d d.d.d.d', as reporting on SF 45 | ''' 46 | import re 47 | return map(str, re.split("[ ,]", s)) 48 | 49 | 50 | def RegistryResolve(): 51 | nameservers = [] 52 | x = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE) 53 | try: 54 | y = _winreg.OpenKey(x, 55 | r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters") 56 | except EnvironmentError: # so it isn't NT/2000/XP 57 | # windows ME, perhaps? 58 | try: # for Windows ME 59 | y = _winreg.OpenKey(x, 60 | r"SYSTEM\CurrentControlSet\Services\VxD\MSTCP") 61 | nameserver, dummytype = _winreg.QueryValueEx(y, 'NameServer') 62 | if nameserver and not (nameserver in nameservers): 63 | nameservers.extend(stringdisplay(nameserver)) 64 | except EnvironmentError: 65 | pass 66 | return nameservers # no idea 67 | try: 68 | nameserver = _winreg.QueryValueEx(y, "DhcpNameServer")[0].split() 69 | except: 70 | nameserver = _winreg.QueryValueEx(y, "NameServer")[0].split() 71 | if nameserver: 72 | nameservers = nameserver 73 | nameserver = _winreg.QueryValueEx(y, "NameServer")[0] 74 | _winreg.CloseKey(y) 75 | try: # for win2000 76 | y = _winreg.OpenKey(x, 77 | r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\DNSRegisteredAdapters") 78 | for i in range(1000): 79 | try: 80 | n = _winreg.EnumKey(y, i) 81 | z = _winreg.OpenKey(y, n) 82 | dnscount, dnscounttype = _winreg.QueryValueEx(z, 83 | 'DNSServerAddressCount') 84 | dnsvalues, dnsvaluestype = _winreg.QueryValueEx(z, 85 | 'DNSServerAddresses') 86 | nameservers.extend(binipdisplay(dnsvalues)) 87 | _winreg.CloseKey(z) 88 | except EnvironmentError: 89 | break 90 | _winreg.CloseKey(y) 91 | except EnvironmentError: 92 | pass 93 | # 94 | try: # for whistler 95 | y = _winreg.OpenKey(x, 96 | r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces") 97 | for i in range(1000): 98 | try: 99 | n = _winreg.EnumKey(y, i) 100 | z = _winreg.OpenKey(y, n) 101 | try: 102 | nameserver, dummytype = _winreg.QueryValueEx( 103 | z, 'NameServer') 104 | if nameserver and not (nameserver in nameservers): 105 | nameservers.extend(stringdisplay(nameserver)) 106 | except EnvironmentError: 107 | pass 108 | _winreg.CloseKey(z) 109 | except EnvironmentError: 110 | break 111 | _winreg.CloseKey(y) 112 | except EnvironmentError: 113 | # print "Key Interfaces not found, just do nothing" 114 | pass 115 | # 116 | _winreg.CloseKey(x) 117 | return nameservers 118 | 119 | if __name__ == "__main__": 120 | print "Name servers:", RegistryResolve() 121 | 122 | # 123 | # $Log: win32dns.py,v $ 124 | # Revision 1.3.2.1 2007/05/22 20:26:49 customdesigned 125 | # Fix win32 nameserver discovery. 126 | # 127 | # Revision 1.3 2002/05/06 06:15:31 anthonybaxter 128 | # apparently some versions of windows return servers as unicode 129 | # string with space sep, rather than strings with comma sep. 130 | # *sigh* 131 | # 132 | # Revision 1.2 2002/03/19 12:41:33 anthonybaxter 133 | # tabnannied and reindented everything. 4 space indent, no tabs. 134 | # yay. 135 | # 136 | # Revision 1.1 2001/08/09 09:22:28 anthonybaxter 137 | # added what I hope is win32 resolver lookup support. I'll need to try 138 | # and figure out how to get the CVS checkout onto my windows machine to 139 | # make sure it works (wow, doing something other than games on the 140 | # windows machine :) 141 | # 142 | # Code from Wolfgang.Strobl@gmd.de 143 | # win32dns.py from 144 | # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66260 145 | # 146 | # Really, ParseResolvConf() should be renamed "FindNameServers" or 147 | # some such. 148 | # 149 | # 150 | -------------------------------------------------------------------------------- /modules/lib/htmlExport.py: -------------------------------------------------------------------------------- 1 | from lib import markup 2 | from lib import graphs 3 | import re 4 | 5 | 6 | class htmlExport(): 7 | 8 | def __init__(self, users, hosts, vhosts, dnsres, 9 | dnsrev, file, domain, shodan, tldres): 10 | self.users = users 11 | self.hosts = hosts 12 | self.vhost = vhosts 13 | self.fname = file 14 | self.dnsres = dnsres 15 | self.dnsrev = dnsrev 16 | self.domain = domain 17 | self.shodan = shodan 18 | self.tldres = tldres 19 | self.style = "" 20 | 21 | def styler(self): 22 | a = """ 82 | """ 83 | self.style = a 84 | 85 | def writehtml(self): 86 | page = markup.page() 87 | # page.init (title="theHarvester 88 | # Results",css=('edge.css'),footer="Edge-security 2011")A 89 | page.html() 90 | self.styler() 91 | page.head(self.style) 92 | page.body() 93 | page.h1("theHarvester results") 94 | page.h2("for :" + self.domain) 95 | page.h3("Dashboard:") 96 | graph = graphs.BarGraph('vBar') 97 | graph.values = [len( 98 | self.users), 99 | len(self.hosts), 100 | len(self.vhost), 101 | len(self.tldres), 102 | len(self.shodan)] 103 | graph.labels = ['Emails', 'hosts', 'Vhost', 'TLD', 'Shodan'] 104 | graph.showValues = 1 105 | page.body(graph.create()) 106 | page.h3("E-mails names found:") 107 | if self.users != []: 108 | page.ul(class_="userslist") 109 | page.li(self.users, class_="useritem") 110 | page.ul.close() 111 | else: 112 | page.h2("No emails found") 113 | page.h3("Hosts found:") 114 | if self.hosts != []: 115 | page.ul(class_="softlist") 116 | page.li(self.hosts, class_="softitem") 117 | page.ul.close() 118 | else: 119 | page.h2("No hosts found") 120 | if self.tldres != []: 121 | page.h3("TLD domains found in TLD expansion:") 122 | page.ul(class_="tldlist") 123 | page.li(self.tldres, class_="tlditem") 124 | page.ul.close() 125 | if self.dnsres != []: 126 | page.h3("Hosts found in DNS brute force:") 127 | page.ul(class_="dnslist") 128 | page.li(self.dnsres, class_="dnsitem") 129 | page.ul.close() 130 | if self.dnsrev != []: 131 | page.h3("Hosts found with reverse lookup :") 132 | page.ul(class_="dnsrevlist") 133 | page.li(self.dnsrev, class_="dnsrevitem") 134 | page.ul.close() 135 | if self.vhost != []: 136 | page.h3("Virtual hosts found:") 137 | page.ul(class_="pathslist") 138 | page.li(self.vhost, class_="pathitem") 139 | page.ul.close() 140 | if self.shodan != []: 141 | shodanalysis = [] 142 | page.h3("Shodan results:") 143 | for x in self.shodan: 144 | res = x.split("SAPO") 145 | page.h3(res[0]) 146 | page.a("Port :" + res[2]) 147 | page.pre(res[1]) 148 | page.pre.close() 149 | ban = res[1] 150 | reg_server = re.compile('Server:.*') 151 | temp = reg_server.findall(res[1]) 152 | if temp != []: 153 | shodanalysis.append(res[0] + ":" + temp[0]) 154 | if shodanalysis != []: 155 | page.h3("Server technologies:") 156 | repeated = [] 157 | for x in shodanalysis: 158 | if x not in repeated: 159 | page.pre(x) 160 | page.pre.close() 161 | repeated.append(x) 162 | page.body.close() 163 | page.html.close() 164 | file = open(self.fname, 'w') 165 | for x in page.content: 166 | try: 167 | file.write(x) 168 | except: 169 | print "Exception" + x # send to logs 170 | pass 171 | file.close 172 | return "ok" 173 | -------------------------------------------------------------------------------- /modules/myparser.py: -------------------------------------------------------------------------------- 1 | import string 2 | import re 3 | 4 | 5 | class parser: 6 | 7 | def __init__(self, results, word): 8 | self.results = results 9 | self.word = word 10 | self.temp = [] 11 | 12 | def genericClean(self): 13 | self.results = re.sub('', '', self.results) 14 | self.results = re.sub('', '', self.results) 15 | self.results = re.sub('', '', self.results) 16 | self.results = re.sub('', '', self.results) 17 | self.results = re.sub('%2f', ' ', self.results) 18 | self.results = re.sub('%3a', ' ', self.results) 19 | self.results = re.sub('', '', self.results) 20 | self.results = re.sub('', '', self.results) 21 | self.results = re.sub('','',self.results) 22 | self.results = re.sub('','',self.results) 23 | 24 | 25 | for e in ('>', ':', '=', '<', '/', '\\', ';', '&', '%3A', '%3D', '%3C'): 26 | self.results = string.replace(self.results, e, ' ') 27 | 28 | def urlClean(self): 29 | self.results = re.sub('', '', self.results) 30 | self.results = re.sub('', '', self.results) 31 | self.results = re.sub('%2f', ' ', self.results) 32 | self.results = re.sub('%3a', ' ', self.results) 33 | 34 | for e in ('<', '>', ':', '=', ';', '&', '%3A', '%3D', '%3C'): 35 | self.results = string.replace(self.results, e, ' ') 36 | 37 | def emails(self): 38 | self.genericClean() 39 | reg_emails = re.compile( 40 | # Local part is required, charset is flexible 41 | # https://tools.ietf.org/html/rfc6531 (removed * and () as they provide FP mostly ) 42 | '[a-zA-Z0-9.\-_+#~!$&\',;=:]+' + 43 | '@' + 44 | '[a-zA-Z0-9.-]*' + 45 | self.word) 46 | self.temp = reg_emails.findall(self.results) 47 | emails = self.unique() 48 | return emails 49 | 50 | def fileurls(self, file): 51 | urls = [] 52 | reg_urls = re.compile('', '', self.results) 64 | self.results = re.sub('', '', self.results) 65 | reg_people = re.compile('>[a-zA-Z0-9._ ]* - Google\+') 66 | #reg_people = re.compile('">[a-zA-Z0-9._ -]* profiles | LinkedIn') 67 | self.temp = reg_people.findall(self.results) 68 | resul = [] 69 | for x in self.temp: 70 | y = string.replace(x, ' | LinkedIn', '') 71 | y = string.replace(y, ' profiles ', '') 72 | y = string.replace(y, 'LinkedIn', '') 73 | y = string.replace(y, '"', '') 74 | y = string.replace(y, '>', '') 75 | if y != " ": 76 | resul.append(y) 77 | return resul 78 | 79 | 80 | 81 | def people_twitter(self): 82 | reg_people = re.compile('(@[a-zA-Z0-9._ -]*)') 83 | #reg_people = re.compile('">[a-zA-Z0-9._ -]* profiles | LinkedIn') 84 | self.temp = reg_people.findall(self.results) 85 | users = self.unique() 86 | resul = [] 87 | for x in users: 88 | y = string.replace(x, ' | LinkedIn', '') 89 | y = string.replace(y, ' profiles ', '') 90 | y = string.replace(y, 'LinkedIn', '') 91 | y = string.replace(y, '"', '') 92 | y = string.replace(y, '>', '') 93 | if y != " ": 94 | resul.append(y) 95 | return resul 96 | 97 | def people_linkedin(self): 98 | reg_people = re.compile('">[a-zA-Z0-9._ -]* \| LinkedIn') 99 | #reg_people = re.compile('">[a-zA-Z0-9._ -]* profiles | LinkedIn') 100 | self.temp = reg_people.findall(self.results) 101 | resul = [] 102 | for x in self.temp: 103 | y = string.replace(x, ' | LinkedIn', '') 104 | y = string.replace(y, ' profiles ', '') 105 | y = string.replace(y, 'LinkedIn', '') 106 | y = string.replace(y, '"', '') 107 | y = string.replace(y, '>', '') 108 | if y != " ": 109 | resul.append(y) 110 | return resul 111 | 112 | def profiles(self): 113 | reg_people = re.compile('">[a-zA-Z0-9._ -]* - Google Profile') 114 | self.temp = reg_people.findall(self.results) 115 | resul = [] 116 | for x in self.temp: 117 | y = string.replace(x, ' Google Profile', '') 118 | y = string.replace(y, '-', '') 119 | y = string.replace(y, '">', '') 120 | if y != " ": 121 | resul.append(y) 122 | return resul 123 | 124 | def people_jigsaw(self): 125 | res = [] 126 | #reg_people = re.compile("'tblrow' title='[a-zA-Z0-9.-]*'>") 127 | reg_people = re.compile( 128 | "href=javascript:showContact\('[0-9]*'\)>[a-zA-Z0-9., ]*") 129 | self.temp = reg_people.findall(self.results) 130 | for x in self.temp: 131 | a = x.split('>')[1].replace("[a-zA-Z0-9]*') 144 | self.temp = reg_sets.findall(self.results) 145 | sets = [] 146 | for x in self.temp: 147 | y = string.replace(x, '>', '') 148 | y = string.replace(y, '(.*?)') 154 | temp = reg_hosts.findall(self.results) 155 | for x in temp: 156 | if x.count(':'): 157 | res = x.split(':')[1].split('/')[2] 158 | else: 159 | res = x.split("/")[0] 160 | self.temp.append(res) 161 | hostnames = self.unique() 162 | return hostnames 163 | 164 | def unique(self): 165 | self.new = [] 166 | for x in self.temp: 167 | if x not in self.new: 168 | self.new.append(x) 169 | return self.new 170 | -------------------------------------------------------------------------------- /magnifier_1.0-1_source.buildinfo: -------------------------------------------------------------------------------- 1 | Format: 1.0 2 | Source: magnifier 3 | Binary: magnifier 4 | Architecture: source 5 | Version: 1.0-1 6 | Checksums-Md5: 7 | 80348a6ceb632da34fe8fd83d2a6c80b 845 magnifier_1.0-1.dsc 8 | Checksums-Sha1: 9 | be7dd73c434fa70c007ad114a85395381a7d7ef5 845 magnifier_1.0-1.dsc 10 | Checksums-Sha256: 11 | b93469c1822323e8cd3991b1b9308e178367589be7ffc3549d6932c8232295f7 845 magnifier_1.0-1.dsc 12 | Build-Origin: Kali 13 | Build-Architecture: amd64 14 | Build-Date: Sun, 28 Oct 2018 21:36:37 -0400 15 | Installed-Build-Depends: 16 | autoconf (= 2.69-11), 17 | automake (= 1:1.16.1-2), 18 | autopoint (= 0.19.8.1-8), 19 | autotools-dev (= 20180224.1), 20 | base-files (= 1:2018.4.0), 21 | base-passwd (= 3.5.45), 22 | bash (= 4.4.18-3.1), 23 | binutils (= 2.31.1-7), 24 | binutils-common (= 2.31.1-7), 25 | binutils-x86-64-linux-gnu (= 2.31.1-7), 26 | bsdmainutils (= 11.1.2+b1), 27 | bsdutils (= 1:2.32.1-0.1), 28 | build-essential (= 12.5), 29 | bzip2 (= 1.0.6-9), 30 | clang-4.0 (= 1:4.0.1-10), 31 | clang-6.0 (= 1:6.0.1-9.1), 32 | coreutils (= 8.30-1), 33 | cpp (= 4:8.1.0-1), 34 | cpp-7 (= 7.3.0-29), 35 | cpp-8 (= 8.2.0-8), 36 | dash (= 0.5.10.2-1), 37 | debconf (= 1.5.69), 38 | debhelper (= 11.4.1), 39 | debianutils (= 4.8.6), 40 | dh-autoreconf (= 19), 41 | dh-python (= 3.20180927), 42 | dh-strip-nondeterminism (= 0.043-2), 43 | diffutils (= 1:3.6-1), 44 | dpkg (= 1.19.2kali1), 45 | dpkg-dev (= 1.19.2kali1), 46 | dwz (= 0.12-2), 47 | fdisk (= 2.32.1-0.1), 48 | file (= 1:5.34-2), 49 | findutils (= 4.6.0+git+20180808-2), 50 | g++ (= 4:8.1.0-1), 51 | g++-8 (= 8.2.0-8), 52 | gawk (= 1:4.2.1+dfsg-1), 53 | gcc (= 4:8.1.0-1), 54 | gcc-7 (= 7.3.0-29), 55 | gcc-7-base (= 7.3.0-29), 56 | gcc-8 (= 8.2.0-8), 57 | gcc-8-base (= 8.2.0-8), 58 | gettext (= 0.19.8.1-8), 59 | gettext-base (= 0.19.8.1-8), 60 | grep (= 3.1-2), 61 | groff-base (= 1.22.3-10), 62 | gzip (= 1.9-2.1), 63 | hostname (= 3.21), 64 | init-system-helpers (= 1.54kali1), 65 | install-info (= 6.5.0.dfsg.1-4), 66 | intltool-debian (= 0.35.0+20060710.4), 67 | lib32gcc1 (= 1:8.2.0-8), 68 | lib32stdc++6 (= 8.2.0-8), 69 | libacl1 (= 2.2.52-3+b1), 70 | libarchive-zip-perl (= 1.64-1), 71 | libasan4 (= 7.3.0-29), 72 | libasan5 (= 8.2.0-8), 73 | libatomic1 (= 8.2.0-8), 74 | libattr1 (= 1:2.4.47-2+b2), 75 | libaudit-common (= 1:2.8.4-2), 76 | libaudit1 (= 1:2.8.4-2), 77 | libbinutils (= 2.31.1-7), 78 | libblkid1 (= 2.32.1-0.1), 79 | libbsd0 (= 0.9.1-1), 80 | libbz2-1.0 (= 1.0.6-9), 81 | libc-bin (= 2.27-6), 82 | libc-dev-bin (= 2.27-6), 83 | libc6 (= 2.27-6), 84 | libc6-dev (= 2.27-6), 85 | libc6-i386 (= 2.27-6), 86 | libcap-ng0 (= 0.7.9-1), 87 | libcc1-0 (= 8.2.0-8), 88 | libcilkrts5 (= 7.3.0-29), 89 | libclang-common-4.0-dev (= 1:4.0.1-10), 90 | libclang-common-6.0-dev (= 1:6.0.1-9.1), 91 | libclang1-4.0 (= 1:4.0.1-10), 92 | libclang1-6.0 (= 1:6.0.1-9.1), 93 | libcroco3 (= 0.6.12-2), 94 | libdb5.3 (= 5.3.28+dfsg1-0.2), 95 | libdebconfclient0 (= 0.245), 96 | libdpkg-perl (= 1.19.2kali1), 97 | libedit2 (= 3.1-20180525-1), 98 | libelf1 (= 0.170-0.5), 99 | libexpat1 (= 2.2.6-1), 100 | libfdisk1 (= 2.32.1-0.1), 101 | libffi6 (= 3.2.1-8), 102 | libfile-stripnondeterminism-perl (= 0.043-2), 103 | libfreetype6 (= 2.8.1-2), 104 | libgc1c2 (= 1:7.6.4-0.4), 105 | libgcc-7-dev (= 7.3.0-29), 106 | libgcc-8-dev (= 8.2.0-8), 107 | libgcc1 (= 1:8.2.0-8), 108 | libgcrypt20 (= 1.8.3-1), 109 | libgdbm-compat4 (= 1.18-2), 110 | libgdbm6 (= 1.18-2), 111 | libglib2.0-0 (= 2.58.1-2), 112 | libgmp10 (= 2:6.1.2+dfsg-3), 113 | libgomp1 (= 8.2.0-8), 114 | libgpg-error0 (= 1.32-3), 115 | libgraphite2-3 (= 1.3.12-1), 116 | libharfbuzz0b (= 1.9.0-1), 117 | libicu-le-hb0 (= 1.0.3+git161113-5), 118 | libicu60 (= 60.2-6), 119 | libisl19 (= 0.20-2), 120 | libitm1 (= 8.2.0-8), 121 | libjsoncpp1 (= 1.7.4-3), 122 | libllvm4.0 (= 1:4.0.1-10), 123 | libllvm6.0 (= 1:6.0.1-9.1), 124 | liblsan0 (= 8.2.0-8), 125 | liblz4-1 (= 1.8.2-1), 126 | liblzma5 (= 5.2.2-1.3), 127 | libmagic-mgc (= 1:5.34-2), 128 | libmagic1 (= 1:5.34-2), 129 | libmount1 (= 2.32.1-0.1), 130 | libmpc3 (= 1.1.0-1), 131 | libmpdec2 (= 2.4.2-2), 132 | libmpfr6 (= 4.0.1-1), 133 | libmpx2 (= 8.2.0-8), 134 | libncurses6 (= 6.1+20181013-1), 135 | libncursesw6 (= 6.1+20181013-1), 136 | libobjc-7-dev (= 7.3.0-29), 137 | libobjc-8-dev (= 8.2.0-8), 138 | libobjc4 (= 8.2.0-8), 139 | libpam-modules (= 1.1.8-3.8), 140 | libpam-modules-bin (= 1.1.8-3.8), 141 | libpam-runtime (= 1.1.8-3.8), 142 | libpam0g (= 1.1.8-3.8), 143 | libpcre3 (= 2:8.39-11), 144 | libperl5.26 (= 5.26.2-7+b1), 145 | libpipeline1 (= 1.5.0-1), 146 | libpng16-16 (= 1.6.34-2), 147 | libpython-stdlib (= 2.7.15-3), 148 | libpython2-stdlib (= 2.7.15-3), 149 | libpython2.7-minimal (= 2.7.15-4), 150 | libpython2.7-stdlib (= 2.7.15-4), 151 | libpython3-stdlib (= 3.6.7-1), 152 | libpython3.6-minimal (= 3.6.7-1), 153 | libpython3.6-stdlib (= 3.6.7-1), 154 | libquadmath0 (= 8.2.0-8), 155 | libreadline7 (= 7.0-5), 156 | libseccomp2 (= 2.3.3-3), 157 | libselinux1 (= 2.8-1+b1), 158 | libsigsegv2 (= 2.12-2), 159 | libsmartcols1 (= 2.32.1-0.1), 160 | libsqlite3-0 (= 3.25.2-1), 161 | libssl1.1 (= 1.1.1-1), 162 | libstdc++-7-dev (= 7.3.0-29), 163 | libstdc++-8-dev (= 8.2.0-8), 164 | libstdc++6 (= 8.2.0-8), 165 | libsystemd0 (= 239-10), 166 | libtinfo5 (= 6.1+20181013-1), 167 | libtinfo6 (= 6.1+20181013-1), 168 | libtool (= 2.4.6-6), 169 | libtsan0 (= 8.2.0-8), 170 | libubsan0 (= 7.3.0-29), 171 | libubsan1 (= 8.2.0-8), 172 | libudev1 (= 239-10), 173 | libunistring2 (= 0.9.10-1), 174 | libuuid1 (= 2.32.1-0.1), 175 | libxml2 (= 2.9.4+dfsg1-7+b1), 176 | linux-libc-dev (= 4.18.10-2kali1), 177 | login (= 1:4.5-1.1), 178 | m4 (= 1.4.18-1), 179 | make (= 4.2.1-1.2), 180 | man-db (= 2.8.4-2+b1), 181 | mawk (= 1.3.3-17+b3), 182 | mime-support (= 3.61), 183 | ncurses-base (= 6.1+20181013-1), 184 | ncurses-bin (= 6.1+20181013-1), 185 | patch (= 2.7.6-3), 186 | perl (= 5.26.2-7+b1), 187 | perl-base (= 5.26.2-7+b1), 188 | perl-modules-5.26 (= 5.26.2-7), 189 | po-debconf (= 1.0.20), 190 | python (= 2.7.15-3), 191 | python-all (= 2.7.15-3), 192 | python-minimal (= 2.7.15-3), 193 | python-pkg-resources (= 40.4.3-1), 194 | python-setuptools (= 40.4.3-1), 195 | python2 (= 2.7.15-3), 196 | python2-minimal (= 2.7.15-3), 197 | python2.7 (= 2.7.15-4), 198 | python2.7-minimal (= 2.7.15-4), 199 | python3 (= 3.6.7-1), 200 | python3-distutils (= 3.7.1-1), 201 | python3-lib2to3 (= 3.7.1-1), 202 | python3-minimal (= 3.6.7-1), 203 | python3.6 (= 3.6.7-1), 204 | python3.6-minimal (= 3.6.7-1), 205 | readline-common (= 7.0-5), 206 | sed (= 4.5-2), 207 | sysvinit-utils (= 2.88dsf-59.11), 208 | tar (= 1.30+dfsg-2), 209 | util-linux (= 2.32.1-0.1), 210 | xz-utils (= 5.2.2-1.3), 211 | zlib1g (= 1:1.2.11.dfsg-1) 212 | Environment: 213 | DEB_BUILD_OPTIONS="parallel=4" 214 | LANG="en_US.UTF-8" 215 | SOURCE_DATE_EPOCH="1540669835" 216 | -------------------------------------------------------------------------------- /modules/discovery/shodan/api.py: -------------------------------------------------------------------------------- 1 | try: 2 | from json import dumps, loads 3 | except: 4 | from simplejson import dumps, loads 5 | from urllib2 import urlopen 6 | from urllib import urlencode 7 | 8 | __all__ = ['WebAPI'] 9 | 10 | 11 | class WebAPIError(Exception): 12 | 13 | def __init__(self, value): 14 | self.value = value 15 | 16 | def __str__(self): 17 | return self.value 18 | 19 | 20 | class WebAPI: 21 | 22 | """Wrapper around the SHODAN webservices API""" 23 | 24 | class DatalossDb: 25 | 26 | def __init__(self, parent): 27 | self.parent = parent 28 | 29 | def search(self, **kwargs): 30 | """Search the Dataloss DB archive. 31 | 32 | Arguments: 33 | name -- Name of the affected company/ organisation 34 | 35 | arrest -- whether the incident resulted in an arrest 36 | breaches -- the type of breach that occurred (Hack, MissingLaptop etc.) 37 | country -- country where the incident took place 38 | ext -- whether an external, third party was affected 39 | ext_names -- the name of the third party company that was affected 40 | lawsuit -- whether the incident resulted in a lawsuit 41 | records -- the number of records that were lost/ stolen 42 | recovered -- whether the affected items were recovered 43 | sub_types -- the sub-categorization of the affected company/ organization 44 | source -- whether the incident occurred from inside or outside the organization 45 | stocks -- stock symbol of the affected company 46 | types -- the basic type of organization (government, business, educational) 47 | uid -- unique ID for the incident 48 | 49 | Returns: 50 | A dictionary with 2 main items: matches (list) and total (int). 51 | 52 | """ 53 | return self.parent._request('datalossdb/search', dict(**kwargs)) 54 | 55 | class Exploits: 56 | 57 | def __init__(self, parent): 58 | self.parent = parent 59 | 60 | def search(self, query, sources=[], 61 | cve=None, osvdb=None, msb=None, bid=None): 62 | """Search the entire Shodan Exploits archive using the same query syntax 63 | as the website. 64 | 65 | Arguments: 66 | query -- exploit search query; same syntax as website 67 | 68 | Optional arguments: 69 | sources -- metasploit, cve, osvdb, exploitdb, or packetstorm 70 | cve -- CVE identifier (ex. 2010-0432) 71 | osvdb -- OSVDB identifier (ex. 11666) 72 | msb -- Microsoft Security Bulletin ID (ex. MS05-030) 73 | bid -- Bugtraq identifier (ex. 13951) 74 | 75 | """ 76 | if sources: 77 | query += ' source:' + ','.join(sources) 78 | if cve: 79 | query += ' cve:%s' % (str(cve).strip()) 80 | if osvdb: 81 | query += ' osvdb:%s' % (str(osvdb).strip()) 82 | if msb: 83 | query += ' msb:%s' % (str(msb).strip()) 84 | if bid: 85 | query += ' bid:%s' % (str(bid).strip()) 86 | return self.parent._request('search_exploits', {'q': query}) 87 | 88 | class ExploitDb: 89 | 90 | def __init__(self, parent): 91 | self.parent = parent 92 | 93 | def download(self, id): 94 | """Download the exploit code from the ExploitDB archive. 95 | 96 | Arguments: 97 | id -- ID of the ExploitDB entry 98 | 99 | Returns: 100 | A dictionary with the following fields: 101 | filename -- Name of the file 102 | content-type -- Mimetype 103 | data -- Contents of the file 104 | 105 | """ 106 | return self.parent._request('exploitdb/download', {'id': id}) 107 | 108 | def search(self, query, **kwargs): 109 | """Search the ExploitDB archive. 110 | 111 | Arguments: 112 | query -- Search terms 113 | 114 | Optional arguments: 115 | author -- Name of the exploit submitter 116 | platform -- Target platform (e.g. windows, linux, hardware etc.) 117 | port -- Service port number 118 | type -- Any, dos, local, papers, remote, shellcode and webapps 119 | 120 | Returns: 121 | A dictionary with 2 main items: matches (list) and total (int). 122 | Each item in 'matches' is a dictionary with the following elements: 123 | 124 | id 125 | author 126 | date 127 | description 128 | platform 129 | port 130 | type 131 | 132 | """ 133 | return ( 134 | self.parent._request( 135 | 'exploitdb/search', dict(q=query, **kwargs)) 136 | ) 137 | 138 | class Msf: 139 | 140 | def __init__(self, parent): 141 | self.parent = parent 142 | 143 | def download(self, id): 144 | """Download a metasploit module given the fullname (id) of it. 145 | 146 | Arguments: 147 | id -- fullname of the module (ex. auxiliary/admin/backupexec/dump) 148 | 149 | Returns: 150 | A dictionary with the following fields: 151 | filename -- Name of the file 152 | content-type -- Mimetype 153 | data -- File content 154 | """ 155 | return self.parent._request('msf/download', {'id': id}) 156 | 157 | def search(self, query, **kwargs): 158 | """Search for a Metasploit module. 159 | """ 160 | return self.parent._request('msf/search', dict(q=query, **kwargs)) 161 | 162 | def __init__(self, key): 163 | """Initializes the API object. 164 | 165 | Arguments: 166 | key -- your API key 167 | 168 | """ 169 | self.api_key = key 170 | self.base_url = 'http://www.shodanhq.com/api/' 171 | self.dataloss = self.DatalossDb(self) 172 | self.exploits = self.Exploits(self) 173 | self.exploitdb = self.ExploitDb(self) 174 | self.msf = self.Msf(self) 175 | 176 | def _request(self, function, params): 177 | """General-purpose function to create web requests to SHODAN. 178 | 179 | Arguments: 180 | function -- name of the function you want to execute 181 | params -- dictionary of parameters for the function 182 | 183 | Returns 184 | A JSON string containing the function's results. 185 | 186 | """ 187 | # Add the API key parameter automatically 188 | params['key'] = self.api_key 189 | 190 | # Send the request 191 | data = urlopen( 192 | self.base_url + 193 | function + 194 | '?' + 195 | urlencode( 196 | params)).read( 197 | ) 198 | 199 | # Parse the text into JSON 200 | data = loads(data) 201 | 202 | # Raise an exception if an error occurred 203 | if data.get('error', None): 204 | raise WebAPIError(data['error']) 205 | 206 | # Return the data 207 | return data 208 | 209 | def fingerprint(self, banner): 210 | """Determine the software based on the banner. 211 | 212 | Arguments: 213 | banner - HTTP banner 214 | 215 | Returns: 216 | A list of software that matched the given banner. 217 | """ 218 | return self._request('fingerprint', {'banner': banner}) 219 | 220 | def host(self, ip): 221 | """Get all available information on an IP. 222 | 223 | Arguments: 224 | ip -- IP of the computer 225 | 226 | Returns: 227 | All available information SHODAN has on the given IP, 228 | subject to API key restrictions. 229 | 230 | """ 231 | return self._request('host', {'ip': ip}) 232 | 233 | def search(self, query): 234 | """Search the SHODAN database. 235 | 236 | Arguments: 237 | query -- search query; identical syntax to the website 238 | 239 | Returns: 240 | A dictionary with 3 main items: matches, countries and total. 241 | Visit the website for more detailed information. 242 | 243 | """ 244 | return self._request('search', {'q': query}) 245 | -------------------------------------------------------------------------------- /modules/discovery/dnssearch.py: -------------------------------------------------------------------------------- 1 | import IPy 2 | import DNS 3 | import string 4 | import socket 5 | import sys 6 | 7 | 8 | class dns_reverse(): 9 | 10 | def __init__(self, range, verbose=True): 11 | self.range = range 12 | self.iplist = '' 13 | self.results = [] 14 | self.verbose = verbose 15 | try: 16 | DNS.ParseResolvConf("/etc/resolv.conf") 17 | nameserver = DNS.defaults['server'][0] 18 | except: 19 | print "Error in DNS resolvers" 20 | sys.exit() 21 | 22 | def run(self, host): 23 | a = string.split(host, '.') 24 | a.reverse() 25 | b = string.join(a, '.') + '.in-addr.arpa' 26 | nameserver = DNS.defaults['server'][0] 27 | if self.verbose: 28 | ESC = chr(27) 29 | sys.stdout.write(ESC + '[2K' + ESC + '[G') 30 | sys.stdout.write("\r\t" + host) 31 | sys.stdout.flush() 32 | try: 33 | name = DNS.Base.DnsRequest(b, qtype='ptr').req().answers[0]['data'] 34 | return host + ":" + name 35 | except: 36 | pass 37 | 38 | def get_ip_list(self, ips): 39 | """Generates the list of ips to reverse""" 40 | try: 41 | list = IPy.IP(ips) 42 | except: 43 | print "Error in IP format, check the input and try again. (Eg. 192.168.1.0/24)" 44 | sys.exit() 45 | name = [] 46 | for x in list: 47 | name.append(str(x)) 48 | return name 49 | 50 | def list(self): 51 | self.iplist = self.get_ip_list(self.range) 52 | return self.iplist 53 | 54 | def process(self): 55 | for x in self.iplist: 56 | host = self.run(x) 57 | if host is not None: 58 | self.results.append(host) 59 | return self.results 60 | 61 | 62 | class dns_force(): 63 | 64 | def __init__(self, domain, dnsserver, verbose=False): 65 | self.domain = domain 66 | self.nameserver = dnsserver 67 | self.file = "dns-names.txt" 68 | self.subdo = False 69 | self.verbose = verbose 70 | try: 71 | f = open(self.file, "r") 72 | except: 73 | print "Error opening dns dictionary file" 74 | sys.exit() 75 | self.list = f.readlines() 76 | 77 | def getdns(self, domain): 78 | DNS.ParseResolvConf("/etc/resolv.conf") 79 | nameserver=DNS.defaults['server'][0] 80 | dom = domain 81 | if self.subdo == True: 82 | dom = domain.split(".") 83 | dom.pop(0) 84 | rootdom = ".".join(dom) 85 | else: 86 | rootdom = dom 87 | if self.nameserver == "": 88 | try: 89 | r = DNS.Request(rootdom, qtype='SOA').req() 90 | primary, email, serial, refresh, retry, expire, minimum = r.answers[ 91 | 0]['data'] 92 | test = DNS.Request( 93 | rootdom, 94 | qtype='NS', 95 | server=primary, 96 | aa=1).req() 97 | except Exception as e: 98 | print e 99 | 100 | if test.header['status'] != "NOERROR": 101 | print "Error" 102 | sys.exit() 103 | self.nameserver = test.answers[0]['data'] 104 | elif self.nameserver == "local": 105 | self.nameserver = nameserver 106 | return self.nameserver 107 | 108 | def run(self, host): 109 | if self.nameserver == "": 110 | self.nameserver = self.getdns(self.domain) 111 | print "Using DNS server: " + self.nameserver 112 | 113 | hostname = str(host.split("\n")[0]) + "." + str(self.domain) 114 | if self.verbose: 115 | ESC = chr(27) 116 | sys.stdout.write(ESC + '[2K' + ESC + '[G') 117 | sys.stdout.write("\r" + hostname) 118 | sys.stdout.flush() 119 | try: 120 | test = DNS.Request( 121 | hostname, 122 | qtype='a', 123 | server=self.nameserver).req( 124 | ) 125 | hostip = test.answers[0]['data'] 126 | return hostip + ":" + hostname 127 | except Exception as e: 128 | pass 129 | 130 | def process(self): 131 | results = [] 132 | for x in self.list: 133 | host = self.run(x) 134 | if host is not None: 135 | results.append(host) 136 | return results 137 | 138 | 139 | class dns_tld(): 140 | 141 | def __init__(self, domain, dnsserver, verbose=False): 142 | self.domain = domain 143 | self.nameserver = dnsserver 144 | self.subdo = False 145 | self.verbose = verbose 146 | # Updated from http://data.iana.org/TLD/tlds-alpha-by-domain.txt 147 | self.tlds = [ 148 | "ac", "academy", "ad", "ae", "aero", "af", "ag", "ai", "al", "am", "an", "ao", "aq", "ar", "arpa", "as", 149 | "asia", "at", "au", "aw", "ax", "az", "ba", "bb", "bd", "be", "bf", "bg", "bh", "bi", "bike", "biz", "bj", 150 | "bm", "bn", "bo", "br", "bs", "bt", "builders", "buzz", "bv", "bw", "by", "bz", "ca", "cab", "camera", 151 | "camp", "careers", "cat", "cc", "cd", "center", "ceo", "cf", "cg", "ch", "ci", "ck", "cl", "clothing", 152 | "cm", "cn", "co", "codes", "coffee", "com", "company", "computer", "construction", "contractors", "coop", 153 | "cr", "cu", "cv", "cw", "cx", "cy", "cz", "de", "diamonds", "directory", "dj", "dk", "dm", "do", 154 | "domains", "dz", "ec", "edu", "education", "ee", "eg", "email", "enterprises", "equipment", "er", "es", 155 | "estate", "et", "eu", "farm", "fi", "fj", "fk", "florist", "fm", "fo", "fr", "ga", "gallery", "gb", "gd", 156 | "ge", "gf", "gg", "gh", "gi", "gl", "glass", "gm", "gn", "gov", "gp", "gq", "gr", "graphics", "gs", "gt", 157 | "gu", "guru", "gw", "gy", "hk", "hm", "hn", "holdings", "holiday", "house", "hr", "ht", "hu", "id", "ie", 158 | "il", "im", "immobilien", "in", "info", "institute", "int", "international", "io", "iq", "ir", "is", "it", 159 | "je", "jm", "jo", "jobs", "jp", "kaufen", "ke", "kg", "kh", "ki", "kitchen", "kiwi", "km", "kn", "kp", 160 | "kr", "kw", "ky", "kz", "la", "land", "lb", "lc", "li", "lighting", "limo", "lk", "lr", "ls", "lt", "lu", 161 | "lv", "ly", "ma", "management", "mc", "md", "me", "menu", "mg", "mh", "mil", "mk", "ml", "mm", "mn", "mo", 162 | "mobi", "mp", "mq", "mr", "ms", "mt", "mu", "museum", "mv", "mw", "mx", "my", "mz", "na", "name", "nc", 163 | "ne", "net", "nf", "ng", "ni", "ninja", "nl", "no", "np", "nr", "nu", "nz", "om", "onl", "org", "pa", "pe", 164 | "pf", "pg", "ph", "photography", "photos", "pk", "pl", "plumbing", "pm", "pn", "post", "pr", "pro", "ps", 165 | "pt", "pw", "py", "qa", "re", "recipes", "repair", "ro", "rs", "ru", "ruhr", "rw", "sa", "sb", "sc", "sd", 166 | "se", "sexy", "sg", "sh", "shoes", "si", "singles", "sj", "sk", "sl", "sm", "sn", "so", "solar", 167 | "solutions", "sr", "st", "su", "support", "sv", "sx", "sy", "systems", "sz", "tattoo", "tc", "td", 168 | "technology", "tel", "tf", "tg", "th", "tips", "tj", "tk", "tl", "tm", "tn", "to", "today", "tp", "tr", 169 | "training", "travel", "tt", "tv", "tw", "tz", "ua", "ug", "uk", "uno", "us", "uy", "uz", "va", "vc", 170 | "ve", "ventures", "vg", "vi", "viajes", "vn", "voyage", "vu", "wang", "wf", "wien", "ws", "xxx", "ye", 171 | "yt", "za", "zm", "zw"] 172 | 173 | def getdns(self, domain): 174 | # DNS.ParseResolvConf("/etc/resolv.conf") 175 | # nameserver=DNS.defaults['server'][0] 176 | dom = domain 177 | if self.subdo == True: 178 | dom = domain.split(".") 179 | dom.pop(0) 180 | rootdom = ".".join(dom) 181 | else: 182 | rootdom = dom 183 | if self.nameserver == False: 184 | r = DNS.Request(rootdom, qtype='SOA').req() 185 | primary, email, serial, refresh, retry, expire, minimum = r.answers[ 186 | 0]['data'] 187 | test = DNS.Request(rootdom, qtype='NS', server=primary, aa=1).req() 188 | if test.header['status'] != "NOERROR": 189 | print "Error" 190 | sys.exit() 191 | self.nameserver = test.answers[0]['data'] 192 | elif self.nameserver == "local": 193 | self.nameserver = nameserver 194 | return self.nameserver 195 | 196 | def run(self, tld): 197 | self.nameserver = self.getdns(self.domain) 198 | hostname = self.domain.split(".")[0] + "." + tld 199 | if self.verbose: 200 | ESC = chr(27) 201 | sys.stdout.write(ESC + '[2K' + ESC + '[G') 202 | sys.stdout.write("\r\tSearching for: " + hostname) 203 | sys.stdout.flush() 204 | try: 205 | test = DNS.Request( 206 | hostname, 207 | qtype='a', 208 | server=self.nameserver).req( 209 | ) 210 | hostip = test.answers[0]['data'] 211 | return hostip + ":" + hostname 212 | except Exception as e: 213 | pass 214 | 215 | def process(self): 216 | results = [] 217 | for x in self.tlds: 218 | host = self.run(x) 219 | if host is not None: 220 | results.append(host) 221 | return results 222 | -------------------------------------------------------------------------------- /magnifier.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # magnifier . Version 1.0 3 | # magnifier - Information Gathering Tool 4 | ############################################ 5 | # Coder : Humayun Ali Khan @ Black Tiger 6 | ############################################ 7 | from urllib2 import * 8 | from platform import system 9 | import sys 10 | def clear(): 11 | if system() == 'Linux': 12 | os.system("clear") 13 | if system() == 'Windows': 14 | os.system('cls') 15 | os.system('color a') 16 | else: 17 | pass 18 | def slowprint(s): 19 | for c in s + '\n': 20 | sys.stdout.write(c) 21 | sys.stdout.flush() 22 | time.sleep(4. / 100) 23 | banner = ''' 24 | \033[37m 25 | ```.--:::::::--..``` 26 | `.-::/+++oooo++++///:::---..`` 27 | .-:///++//::---.......------..```... 28 | .://////:--....................--..````--. 29 | .//////:-.......``````````````.......--.``.-:. 30 | .//////:-....````````````````````````````.---.-::. 31 | -/////:....````````` ````` `.:::-:/- 32 | `:///+/-...``````` ``..:/://:` 33 | `://++:...`````` ``` .:+//+:` 34 | `://+o-...````` ` `-++++:` 35 | -/++o-...````` ` ```.-oooo- 36 | `/+++:...```` ````...:oss+` 37 | -+++/...```` ````.../oso- 38 | /+o/-..````` `````..-+oo/ 39 | `+oo:...```` ````...:oo+` 40 | -oo+-...```` ````...-+++. 41 | -o+/....```` ````..../+/- 42 | :++/....```` ````....///- 43 | -///`...```` ````...`//:- 44 | ./::.....```` ````....-::/. 45 | /:--....````` `````....:::/ 46 | ./-..-...````` `````...-:::/. 47 | ::.`.-...````` `````...-::-/: 48 | /-``.:.`.`````` ``````...::--:/ 49 | :-``.:-``.`````` ``````...-::--:: 50 | -:``.::.``.```````` ````````.``.:/---/- 51 | `:-..-/:.``````````````````````````````````.:/:--:/` 52 | .:---:/:-``````````````````````````````-:/:---:-.-.... 53 | .-:::///-..``````````````````````..-::----:/:-...-`:y: 54 | `.://+++/:-..``````````````..---------.``-//:-.`-..oy: 55 | `.-/+oosso+/::---------:::::---..` -:/::oss/.-os: 56 | `.-:/++++++++////:::--..`` ./sysoosss/.-os: 57 | ```......````` `/yhysoosso/.-os- 58 | `/yhysoosso/.-os- 59 | `/yhyooosso:.-os- 60 | ./yhyoossso:.-ss- 61 | .+yhyoossso:.:so- 62 | .+yhyoossso:.:so- 63 | .+yhyoossso:.:so- 64 | .+hhyoossso:.:so. 65 | .+hhyoossso:.:so. 66 | .+hhyoossso-.:so. 67 | .ohhyoossso-./s+. 68 | .ohhyoossso-./y+. 69 | -ohhyoosss+-./y+. 70 | -ohhsoosss+-`/y+ 71 | -ohhsoosss+-.s 72 | -shhsoossy+` \033[37m 73 | |'.... ....\033[93m magnifier \033[91m - \033[92m Information Gathering Tool \033[91m 74 | ==[[ .:: Name : magnifier ::.]]==\033[91m 75 | ==[[ .:: Version: 1.0 ::.]]==\033[96m 76 | ==[[ .:: Author : Humayun Ali Khan @ Black Tiger::.]]==\033[92m 77 | ''' 78 | print banner 79 | def menu(): 80 | print''' 81 | \033[91m 1 \033[96m} \033[91m ==\033[93m> \033[92m DNS Lookup 82 | \033[91m 2 \033[96m} \033[91m ==\033[93m> \033[92m Whois Lookup 83 | \033[91m 3 \033[96m} \033[91m ==\033[93m> \033[92m GeoIP Lookup 84 | \033[91m 4 \033[96m} \033[91m ==\033[93m> \033[92m Subnet Lookup 85 | \033[91m 5 \033[96m} \033[91m ==\033[93m> \033[92m Port Scanner 86 | \033[91m 6 \033[96m} \033[91m ==\033[93m> \033[92m Extract Links 87 | \033[91m 7 \033[96m} \033[91m ==\033[93m> \033[92m Zone Transfer 88 | \033[91m 8 \033[96m} \033[91m ==\033[93m> \033[92m HTTP Header 89 | \033[91m 9 \033[96m} \033[91m ==\033[93m> \033[92m Host Finder 90 | \033[91m 10\033[96m} \033[91m ==\033[93m> \033[92m IP-Locator 91 | \033[91m 11\033[96m} \033[91m ==\033[93m> \033[92m Traceroute 92 | \033[91m 12\033[96m} \033[91m ==\033[93m> \033[92m Robots.txt 93 | \033[91m 13\033[96m} \033[91m ==\033[93m> \033[92m Host DNS Finder 94 | \033[91m 14\033[96m} \033[91m ==\033[93m> \033[92m Revrse IP Lookup 95 | \033[91m 15\033[96m} \033[91m ==\033[93m> \033[92m Collection Email 96 | \033[91m 16\033[96m} \033[91m ==\033[93m> \033[92m Subdomain Finder 97 | \033[91m 17\033[96m} \033[91m ==\033[93m> \033[92m Install & Update 98 | \033[91m 18\033[96m} \033[91m ==\033[93m> \033[92m About Me 99 | \033[91m 00\033[96m} \033[91m ==\033[93m> \033[92m Exit 100 | ''' 101 | 102 | slowprint("\033[1;91mThis Is Simple Script By :\033[92m Humayun Ali Khan @ Black Tiger" + "\n \033[93m Let's Start \033[96m --> --> --> \033[91m ") 103 | 104 | menu() 105 | def ext(): 106 | ex = raw_input ('\033[92mContinue/Exit->-> ') 107 | if ex[0].upper() == 'E' : 108 | print 'Good-bye!!!' 109 | exit() 110 | else: 111 | clear() 112 | print (banner) 113 | menu() 114 | select() 115 | 116 | def select(): 117 | try: 118 | magnifier = input("\033[96mEnter \033[92m00/\033[91m18 => => ") 119 | if magnifier == 2: 120 | dz = raw_input('\033[91mEnter IP Address : \033[91m') 121 | whois = "http://api.hackertarget.com/whois/?q=" + dz 122 | dev = urlopen(whois).read() 123 | print (dev) 124 | ext() 125 | elif magnifier == 14: 126 | dz = raw_input('\033[92mEnter IP Address : \033[92m') 127 | revrse = "http://api.hackertarget.com/reverseiplookup/?q=" + dz 128 | lookup = urlopen(revrse).read() 129 | print (lookup) 130 | ext() 131 | elif magnifier == 1: 132 | dz = raw_input('\033[96mEnter Your Domain :\033[96m') 133 | dns = "http://api.hackertarget.com/dnslookup/?q=" + dz 134 | magnifier = urlopen(dns).read() 135 | print (magnifier) 136 | ext() 137 | elif magnifier == 3: 138 | dz = raw_input('\033[91mEnter IP Address : \033[91m') 139 | geo = "http://api.hackertarget.com/geoip/?q=" + dz 140 | ip = urlopen(geo).read() 141 | print (ip) 142 | ext() 143 | elif magnifier == 4: 144 | dz = raw_input('\033[92mEnter IP Address : \033[92m') 145 | sub = "http://api.hackertarget.com/subnetcalc/?q=" + dz 146 | net = urlopen(sub).read() 147 | print (net) 148 | ext() 149 | elif magnifier == 5: 150 | dz = raw_input('\033[96mEnter IP Address : \033[96m') 151 | port = "http://api.hackertarget.com/nmap/?q=" + dz 152 | scan = urlopen(port).read() 153 | print (scan) 154 | ext() 155 | elif magnifier == 6: 156 | dz = raw_input('\033[91mEnter Your Domain :\033[91m') 157 | get = "https://api.hackertarget.com/pagelinks/?q=" + dz 158 | page = urlopen(get).read() 159 | print(page) 160 | ext() 161 | elif magnifier == 7: 162 | dz = raw_input('\033[92mEnter Your Domain :\033[92m') 163 | zon = "http://api.hackertarget.com/zonetransfer/?q=" + dz 164 | tran = urlopen(zon).read() 165 | print (tran) 166 | ext() 167 | elif magnifier == 8: 168 | dz = raw_input('\033[96mEnter Your Domain :\033[96m') 169 | hea = "http://api.hackertarget.com/httpheaders/?q=" + dz 170 | der = urlopen(hea).read() 171 | print (der) 172 | ext() 173 | elif magnifier == 9: 174 | dz = raw_input('\033[91mEnter Your Domain :\033[91m') 175 | host = "http://api.hackertarget.com/hostsearch/?q=" + dz 176 | finder = urlopen(host).read() 177 | print (finder) 178 | ext() 179 | elif magnifier == 10: 180 | dz = raw_input('\033[91mEnter Your IP Address :\033[91m') 181 | host = "http://ip-api.com/json/" + dz 182 | kader = urlopen(host).read() 183 | print (kader) 184 | ext() 185 | elif magnifier== 11: 186 | dz = raw_input('\033[1;91mEnter Domain: \033[1;m') 187 | host = "http://api.hackertarget.com/findshareddns/?q=" + dz 188 | dns = urlopen(host).read() 189 | print (dns) 190 | ext() 191 | elif magnifier == 13: 192 | dz = raw_input('\033[91mEnter Your Domain :\033[91m') 193 | get = "https://api.hackertarget.com/mtr/?q=" + dz 194 | page = urlopen(get).read() 195 | print(page) 196 | ext() 197 | elif magnifier == 12: 198 | dz = raw_input('\033[91mEnter Your Domain :\033[91m') 199 | path = os.getcwd() 200 | os.system('cd ' + path + '/modules && python2 goofile.py -d %s -f txt' % dz) 201 | ext() 202 | elif magnifier == 15: 203 | dz = raw_input('\033[91mEnter Your Domain :\033[91m') 204 | path = os.getcwd() 205 | os.system('cd ' + path + '/modules && python2 theHarvester.py -d %s -b all' % dz) 206 | ext() 207 | elif magnifier == 16: 208 | dz = raw_input('\033[91mEnter Your Domain :\033[91m') 209 | print '-+-+-+-+-+-+-+-+-+-+-+-+-+-+-' 210 | path = os.getcwd() 211 | os.system('cd ' + path + '/modules && python2 sub.py -t %s -l fr ' % dz) 212 | ext() 213 | elif magnifier == 17: 214 | path = os.getcwd() 215 | os.system('cd ' + path + ' && bash install') 216 | os.system('cd ' + path + ' && python2 update.py') 217 | ext() 218 | elif magnifier == 18: 219 | slowprint("............... ") 220 | slowprint("Name : magnifier \033[92m") 221 | slowprint("...............") 222 | slowprint("Version : 1.0 \033[91m") 223 | slowprint(".............") 224 | slowprint("Author: Humayun Ali Khan @ magnifier \033[96m") 225 | slowprint("......................") 226 | slowprint("GitHub: https://github.com/TheEyeOfCyber/Magnifier") 227 | slowprint("......................") 228 | slowprint("YouTube: https://www.youtube.com/channel/UCEkclbUf3LMNkVOJ9_ln-mA") 229 | slowprint("......................") 230 | slowprint("Facebook: https://www.facebook.com/theeyeofcyber1") 231 | slowprint("......................") 232 | ext() 233 | elif magnifier == 00: 234 | print "Allah Hafiz!!" 235 | except(KeyboardInterrupt): 236 | print "\nCtrl + C -> Exiting!!" 237 | select() 238 | -------------------------------------------------------------------------------- /modules/discovery/DNS/Base.py: -------------------------------------------------------------------------------- 1 | """ 2 | $Id: Base.py,v 1.12.2.4 2007/05/22 20:28:31 customdesigned Exp $ 3 | 4 | This file is part of the pydns project. 5 | Homepage: http://pydns.sourceforge.net 6 | 7 | This code is covered by the standard Python License. 8 | 9 | Base functionality. Request and Response classes, that sort of thing. 10 | """ 11 | 12 | import socket 13 | import string 14 | import types 15 | import time 16 | import Type 17 | import Class 18 | import Opcode 19 | import asyncore 20 | 21 | 22 | class DNSError(Exception): 23 | pass 24 | 25 | defaults = {'protocol': 'udp', 'port': 53, 'opcode': Opcode.QUERY, 26 | 'qtype': Type.A, 'rd': 1, 'timing': 1, 'timeout': 30} 27 | 28 | defaults['server'] = [] 29 | 30 | 31 | def ParseResolvConf(resolv_path): 32 | global defaults 33 | try: 34 | lines = open(resolv_path).readlines() 35 | except: 36 | print "error in path" + resolv_path 37 | for line in lines: 38 | line = string.strip(line) 39 | if not line or line[0] == ';' or line[0] == '#': 40 | continue 41 | fields = string.split(line) 42 | if len(fields) < 2: 43 | continue 44 | if fields[0] == 'domain' and len(fields) > 1: 45 | defaults['domain'] = fields[1] 46 | if fields[0] == 'search': 47 | pass 48 | if fields[0] == 'options': 49 | pass 50 | if fields[0] == 'sortlist': 51 | pass 52 | if fields[0] == 'nameserver': 53 | defaults['server'].append(fields[1]) 54 | 55 | 56 | def DiscoverNameServers(): 57 | import sys 58 | if sys.platform in ('win32', 'nt'): 59 | import win32dns 60 | defaults['server'] = win32dns.RegistryResolve() 61 | else: 62 | return ParseResolvConf() 63 | 64 | 65 | class DnsRequest: 66 | 67 | """ high level Request object """ 68 | 69 | def __init__(self, *name, **args): 70 | self.donefunc = None 71 | self.async = None 72 | self.defaults = {} 73 | self.argparse(name, args) 74 | self.defaults = self.args 75 | 76 | def argparse(self, name, args): 77 | if not name and 'name' in self.defaults: 78 | args['name'] = self.defaults['name'] 79 | if isinstance(name, types.StringType): 80 | args['name'] = name 81 | else: 82 | if len(name) == 1: 83 | if name[0]: 84 | args['name'] = name[0] 85 | for i in defaults.keys(): 86 | if i not in args: 87 | if i in self.defaults: 88 | args[i] = self.defaults[i] 89 | else: 90 | args[i] = defaults[i] 91 | if isinstance(args['server'], types.StringType): 92 | args['server'] = [args['server']] 93 | self.args = args 94 | 95 | def socketInit(self, a, b): 96 | self.s = socket.socket(a, b) 97 | 98 | def processUDPReply(self): 99 | import time 100 | import select 101 | if self.args['timeout'] > 0: 102 | r, w, e = select.select([self.s], [], [], self.args['timeout']) 103 | if not len(r): 104 | raise DNSError('Timeout') 105 | self.reply = self.s.recv(1024) 106 | self.time_finish = time.time() 107 | self.args['server'] = self.ns 108 | return self.processReply() 109 | 110 | def processTCPReply(self): 111 | import time 112 | import Lib 113 | self.f = self.s.makefile('r') 114 | header = self.f.read(2) 115 | if len(header) < 2: 116 | raise DNSError('EOF') 117 | count = Lib.unpack16bit(header) 118 | self.reply = self.f.read(count) 119 | if len(self.reply) != count: 120 | raise DNSError('incomplete reply') 121 | self.time_finish = time.time() 122 | self.args['server'] = self.ns 123 | return self.processReply() 124 | 125 | def processReply(self): 126 | import Lib 127 | self.args['elapsed'] = (self.time_finish - self.time_start) * 1000 128 | u = Lib.Munpacker(self.reply) 129 | r = Lib.DnsResult(u, self.args) 130 | r.args = self.args 131 | # self.args=None # mark this DnsRequest object as used. 132 | return r 133 | #### TODO TODO TODO #### 134 | # if protocol == 'tcp' and qtype == Type.AXFR: 135 | # while 1: 136 | # header = f.read(2) 137 | # if len(header) < 2: 138 | # print '========== EOF ==========' 139 | # break 140 | # count = Lib.unpack16bit(header) 141 | # if not count: 142 | # print '========== ZERO COUNT ==========' 143 | # break 144 | # print '========== NEXT ==========' 145 | # reply = f.read(count) 146 | # if len(reply) != count: 147 | # print '*** Incomplete reply ***' 148 | # break 149 | # u = Lib.Munpacker(reply) 150 | # Lib.dumpM(u) 151 | 152 | def conn(self): 153 | self.s.connect((self.ns, self.port)) 154 | 155 | def req(self, *name, **args): 156 | " needs a refactoring " 157 | import time 158 | import Lib 159 | self.argparse(name, args) 160 | # if not self.args: 161 | # raise DNSError,'reinitialize request before reuse' 162 | protocol = self.args['protocol'] 163 | self.port = self.args['port'] 164 | opcode = self.args['opcode'] 165 | rd = self.args['rd'] 166 | server = self.args['server'] 167 | if isinstance(self.args['qtype'], types.StringType): 168 | try: 169 | qtype = getattr(Type, string.upper(self.args['qtype'])) 170 | except AttributeError: 171 | raise DNSError('unknown query type') 172 | else: 173 | qtype = self.args['qtype'] 174 | if 'name' not in self.args: 175 | print self.args 176 | raise DNSError('nothing to lookup') 177 | qname = self.args['name'] 178 | if qtype == Type.AXFR: 179 | print 'Query type AXFR, protocol forced to TCP' 180 | protocol = 'tcp' 181 | # print 'QTYPE %d(%s)' % (qtype, Type.typestr(qtype)) 182 | m = Lib.Mpacker() 183 | # jesus. keywords and default args would be good. TODO. 184 | m.addHeader(0, 185 | 0, opcode, 0, 0, rd, 0, 0, 0, 186 | 1, 0, 0, 0) 187 | m.addQuestion(qname, qtype, Class.IN) 188 | self.request = m.getbuf() 189 | try: 190 | if protocol == 'udp': 191 | self.sendUDPRequest(server) 192 | else: 193 | self.sendTCPRequest(server) 194 | except socket.error as reason: 195 | raise DNSError(reason) 196 | if self.async: 197 | return None 198 | else: 199 | return self.response 200 | 201 | def sendUDPRequest(self, server): 202 | "refactor me" 203 | self.response = None 204 | self.socketInit(socket.AF_INET, socket.SOCK_DGRAM) 205 | for self.ns in server: 206 | try: 207 | # TODO. Handle timeouts &c correctly (RFC) 208 | #self.s.connect((self.ns, self.port)) 209 | self.conn() 210 | self.time_start = time.time() 211 | if not self.async: 212 | self.s.send(self.request) 213 | self.response = self.processUDPReply() 214 | # except socket.error: 215 | except None: 216 | continue 217 | break 218 | if not self.response: 219 | if not self.async: 220 | raise DNSError('no working nameservers found') 221 | 222 | def sendTCPRequest(self, server): 223 | " do the work of sending a TCP request " 224 | import time 225 | import Lib 226 | self.response = None 227 | for self.ns in server: 228 | try: 229 | self.socketInit(socket.AF_INET, socket.SOCK_STREAM) 230 | self.time_start = time.time() 231 | self.conn() 232 | self.s.send(Lib.pack16bit(len(self.request)) + self.request) 233 | self.s.shutdown(1) 234 | self.response = self.processTCPReply() 235 | except socket.error: 236 | continue 237 | break 238 | if not self.response: 239 | raise DNSError('no working nameservers found') 240 | 241 | # class DnsAsyncRequest(DnsRequest): 242 | 243 | 244 | class DnsAsyncRequest(DnsRequest, asyncore.dispatcher_with_send): 245 | 246 | " an asynchronous request object. out of date, probably broken " 247 | 248 | def __init__(self, *name, **args): 249 | DnsRequest.__init__(self, *name, **args) 250 | # XXX todo 251 | if 'done' in args and args['done']: 252 | self.donefunc = args['done'] 253 | else: 254 | self.donefunc = self.showResult 255 | # self.realinit(name,args) # XXX todo 256 | self.async = 1 257 | 258 | def conn(self): 259 | import time 260 | self.connect((self.ns, self.port)) 261 | self.time_start = time.time() 262 | if 'start' in self.args and self.args['start']: 263 | asyncore.dispatcher.go(self) 264 | 265 | def socketInit(self, a, b): 266 | self.create_socket(a, b) 267 | asyncore.dispatcher.__init__(self) 268 | self.s = self 269 | 270 | def handle_read(self): 271 | if self.args['protocol'] == 'udp': 272 | self.response = self.processUDPReply() 273 | if self.donefunc: 274 | self.donefunc(*(self,)) 275 | 276 | def handle_connect(self): 277 | self.send(self.request) 278 | 279 | def handle_write(self): 280 | pass 281 | 282 | def showResult(self, *s): 283 | self.response.show() 284 | 285 | # 286 | # $Log: Base.py,v $ 287 | # Revision 1.12.2.4 2007/05/22 20:28:31 customdesigned 288 | # Missing import Lib 289 | # 290 | # Revision 1.12.2.3 2007/05/22 20:25:52 customdesigned 291 | # Use socket.inetntoa,inetaton. 292 | # 293 | # Revision 1.12.2.2 2007/05/22 20:21:46 customdesigned 294 | # Trap socket error 295 | # 296 | # Revision 1.12.2.1 2007/05/22 20:19:35 customdesigned 297 | # Skip bogus but non-empty lines in resolv.conf 298 | # 299 | # Revision 1.12 2002/04/23 06:04:27 anthonybaxter 300 | # attempt to refactor the DNSRequest.req method a little. after doing a bit 301 | # of this, I've decided to bite the bullet and just rewrite the puppy. will 302 | # be checkin in some design notes, then unit tests and then writing the sod. 303 | # 304 | # Revision 1.11 2002/03/19 13:05:02 anthonybaxter 305 | # converted to class based exceptions (there goes the python1.4 compatibility :) 306 | # 307 | # removed a quite gross use of 'eval()'. 308 | # 309 | # Revision 1.10 2002/03/19 12:41:33 anthonybaxter 310 | # tabnannied and reindented everything. 4 space indent, no tabs. 311 | # yay. 312 | # 313 | # Revision 1.9 2002/03/19 12:26:13 anthonybaxter 314 | # death to leading tabs. 315 | # 316 | # Revision 1.8 2002/03/19 10:30:33 anthonybaxter 317 | # first round of major bits and pieces. The major stuff here (summarised 318 | # from my local, off-net CVS server :/ this will cause some oddities with 319 | # the 320 | # 321 | # tests/testPackers.py: 322 | # a large slab of unit tests for the packer and unpacker code in DNS.Lib 323 | # 324 | # DNS/Lib.py: 325 | # placeholder for addSRV. 326 | # added 'klass' to addA, make it the same as the other A* records. 327 | # made addTXT check for being passed a string, turn it into a length 1 list. 328 | # explicitly check for adding a string of length > 255 (prohibited). 329 | # a bunch of cleanups from a first pass with pychecker 330 | # new code for pack/unpack. the bitwise stuff uses struct, for a smallish 331 | # (disappointly small, actually) improvement, while addr2bin is much 332 | # much faster now. 333 | # 334 | # DNS/Base.py: 335 | # added DiscoverNameServers. This automatically does the right thing 336 | # on unix/ win32. No idea how MacOS handles this. *sigh* 337 | # Incompatible change: Don't use ParseResolvConf on non-unix, use this 338 | # function, instead! 339 | # a bunch of cleanups from a first pass with pychecker 340 | # 341 | # Revision 1.5 2001/08/09 09:22:28 anthonybaxter 342 | # added what I hope is win32 resolver lookup support. I'll need to try 343 | # and figure out how to get the CVS checkout onto my windows machine to 344 | # make sure it works (wow, doing something other than games on the 345 | # windows machine :) 346 | # 347 | # Code from Wolfgang.Strobl@gmd.de 348 | # win32dns.py from 349 | # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66260 350 | # 351 | # Really, ParseResolvConf() should be renamed "FindNameServers" or 352 | # some such. 353 | # 354 | # Revision 1.4 2001/08/09 09:08:55 anthonybaxter 355 | # added identifying header to top of each file 356 | # 357 | # Revision 1.3 2001/07/19 07:20:12 anthony 358 | # Handle blank resolv.conf lines. 359 | # Patch from Bastian Kleineidam 360 | # 361 | # Revision 1.2 2001/07/19 06:57:07 anthony 362 | # cvs keywords added 363 | # 364 | # 365 | -------------------------------------------------------------------------------- /modules/lib/markup.py: -------------------------------------------------------------------------------- 1 | # This code is in the public domain, it comes 2 | # with absolutely no warranty and you can do 3 | # absolutely whatever you want with it. 4 | 5 | __date__ = '17 May 2007' 6 | __version__ = '1.7' 7 | __doc__ = """ 8 | This is markup.py - a Python module that attempts to 9 | make it easier to generate HTML/XML from a Python program 10 | in an intuitive, lightweight, customizable and pythonic way. 11 | 12 | The code is in the public domain. 13 | 14 | Version: %s as of %s. 15 | 16 | Documentation and further info is at http://markup.sourceforge.net/ 17 | 18 | Please send bug reports, feature requests, enhancement 19 | ideas or questions to nogradi at gmail dot com. 20 | 21 | Installation: drop markup.py somewhere into your Python path. 22 | """ % ( __version__, __date__ ) 23 | 24 | import string 25 | 26 | 27 | class element: 28 | 29 | """This class handles the addition of a new element.""" 30 | 31 | def __init__(self, tag, case='lower', parent=None): 32 | self.parent = parent 33 | 34 | if case == 'lower': 35 | self.tag = tag.lower() 36 | else: 37 | self.tag = tag.upper() 38 | 39 | def __call__(self, *args, **kwargs): 40 | if len(args) > 1: 41 | raise ArgumentError(self.tag) 42 | 43 | # if class_ was defined in parent it should be added to every element 44 | if self.parent is not None and self.parent.class_ is not None: 45 | if 'class_' not in kwargs: 46 | kwargs['class_'] = self.parent.class_ 47 | 48 | if self.parent is None and len(args) == 1: 49 | x = [self.render(self.tag, False, myarg, mydict) 50 | for myarg, mydict in _argsdicts(args, kwargs)] 51 | return '\n'.join(x) 52 | elif self.parent is None and len(args) == 0: 53 | x = [self.render(self.tag, True, myarg, mydict) 54 | for myarg, mydict in _argsdicts(args, kwargs)] 55 | return '\n'.join(x) 56 | 57 | if self.tag in self.parent.twotags: 58 | for myarg, mydict in _argsdicts(args, kwargs): 59 | self.render(self.tag, False, myarg, mydict) 60 | elif self.tag in self.parent.onetags: 61 | if len(args) == 0: 62 | for myarg, mydict in _argsdicts(args, kwargs): 63 | # here myarg is always None, because len( args ) = 0 64 | self.render(self.tag, True, myarg, mydict) 65 | else: 66 | raise ClosingError(self.tag) 67 | elif self.parent.mode == 'strict_html' and self.tag in self.parent.deptags: 68 | raise DeprecationError(self.tag) 69 | else: 70 | raise InvalidElementError(self.tag, self.parent.mode) 71 | 72 | def render(self, tag, single, between, kwargs): 73 | """Append the actual tags to content.""" 74 | 75 | out = "<%s" % tag 76 | for key, value in kwargs.iteritems(): 77 | # when value is None that means stuff like <... checked> 78 | if value is not None: 79 | # strip this so class_ will mean class, etc. 80 | key = key.strip('_') 81 | # special cases, maybe change _ to - overall? 82 | if key == 'http_equiv': 83 | key = 'http-equiv' 84 | elif key == 'accept_charset': 85 | key = 'accept-charset' 86 | out = "%s %s=\"%s\"" % (out, key, escape(value)) 87 | else: 88 | out = "%s %s" % (out, key) 89 | if between is not None: 90 | out = "%s>%s" % (out, between, tag) 91 | else: 92 | if single: 93 | out = "%s />" % out 94 | else: 95 | out = "%s>" % out 96 | if self.parent is not None: 97 | self.parent.content.append(out) 98 | else: 99 | return out 100 | 101 | def close(self): 102 | """Append a closing tag unless element has only opening tag.""" 103 | 104 | if self.tag in self.parent.twotags: 105 | self.parent.content.append("" % self.tag) 106 | elif self.tag in self.parent.onetags: 107 | raise ClosingError(self.tag) 108 | elif self.parent.mode == 'strict_html' and self.tag in self.parent.deptags: 109 | raise DeprecationError(self.tag) 110 | 111 | def open(self, **kwargs): 112 | """Append an opening tag.""" 113 | 114 | if self.tag in self.parent.twotags or self.tag in self.parent.onetags: 115 | self.render(self.tag, False, None, kwargs) 116 | elif self.mode == 'strict_html' and self.tag in self.parent.deptags: 117 | raise DeprecationError(self.tag) 118 | 119 | 120 | class page: 121 | 122 | """This is our main class representing a document. Elements are added 123 | as attributes of an instance of this class.""" 124 | 125 | def __init__(self, mode='strict_html', case='lower', 126 | onetags=None, twotags=None, separator='\n', class_=None): 127 | """Stuff that effects the whole document. 128 | 129 | mode -- 'strict_html' for HTML 4.01 (default) 130 | 'html' alias for 'strict_html' 131 | 'loose_html' to allow some deprecated elements 132 | 'xml' to allow arbitrary elements 133 | 134 | case -- 'lower' element names will be printed in lower case (default) 135 | 'upper' they will be printed in upper case 136 | 137 | onetags -- list or tuple of valid elements with opening tags only 138 | twotags -- list or tuple of valid elements with both opening and closing tags 139 | these two keyword arguments may be used to select 140 | the set of valid elements in 'xml' mode 141 | invalid elements will raise appropriate exceptions 142 | 143 | separator -- string to place between added elements, defaults to newline 144 | 145 | class_ -- a class that will be added to every element if defined""" 146 | 147 | valid_onetags = [ 148 | "AREA", 149 | "BASE", 150 | "BR", 151 | "COL", 152 | "FRAME", 153 | "HR", 154 | "IMG", 155 | "INPUT", 156 | "LINK", 157 | "META", 158 | "PARAM"] 159 | valid_twotags = [ 160 | "A", "ABBR", "ACRONYM", "ADDRESS", "B", "BDO", "BIG", "BLOCKQUOTE", "BODY", "BUTTON", 161 | "CAPTION", "CITE", "CODE", "COLGROUP", "DD", "DEL", "DFN", "DIV", "DL", "DT", "EM", "FIELDSET", 162 | "FORM", "FRAMESET", "H1", "H2", "H3", "H4", "H5", "H6", "HEAD", "HTML", "I", "IFRAME", "INS", 163 | "KBD", "LABEL", "LEGEND", "LI", "MAP", "NOFRAMES", "NOSCRIPT", "OBJECT", "OL", "OPTGROUP", 164 | "OPTION", "P", "PRE", "Q", "SAMP", "SCRIPT", "SELECT", "SMALL", "SPAN", "STRONG", "STYLE", 165 | "SUB", "SUP", "TABLE", "TBODY", "TD", "TEXTAREA", "TFOOT", "TH", "THEAD", "TITLE", "TR", 166 | "TT", "UL", "VAR"] 167 | deprecated_onetags = ["BASEFONT", "ISINDEX"] 168 | deprecated_twotags = [ 169 | "APPLET", 170 | "CENTER", 171 | "DIR", 172 | "FONT", 173 | "MENU", 174 | "S", 175 | "STRIKE", 176 | "U"] 177 | 178 | self.header = [] 179 | self.content = [] 180 | self.footer = [] 181 | self.case = case 182 | self.separator = separator 183 | 184 | # init( ) sets it to True so we know that has to be 185 | # printed at the end 186 | self._full = False 187 | self.class_ = class_ 188 | 189 | if mode == 'strict_html' or mode == 'html': 190 | self.onetags = valid_onetags 191 | self.onetags += map(string.lower, self.onetags) 192 | self.twotags = valid_twotags 193 | self.twotags += map(string.lower, self.twotags) 194 | self.deptags = deprecated_onetags + deprecated_twotags 195 | self.deptags += map(string.lower, self.deptags) 196 | self.mode = 'strict_html' 197 | elif mode == 'loose_html': 198 | self.onetags = valid_onetags + deprecated_onetags 199 | self.onetags += map(string.lower, self.onetags) 200 | self.twotags = valid_twotags + deprecated_twotags 201 | self.twotags += map(string.lower, self.twotags) 202 | self.mode = mode 203 | elif mode == 'xml': 204 | if onetags and twotags: 205 | self.onetags = onetags 206 | self.twotags = twotags 207 | elif (onetags and not twotags) or (twotags and not onetags): 208 | raise CustomizationError() 209 | else: 210 | self.onetags = russell() 211 | self.twotags = russell() 212 | self.mode = mode 213 | else: 214 | raise ModeError(mode) 215 | 216 | def __getattr__(self, attr): 217 | if attr.startswith("__") and attr.endswith("__"): 218 | raise AttributeError(attr) 219 | return element(attr, case=self.case, parent=self) 220 | 221 | def __str__(self): 222 | 223 | if self._full and (self.mode == 'strict_html' or self.mode == 'loose_html'): 224 | end = ['', ''] 225 | else: 226 | end = [] 227 | 228 | return ( 229 | self.separator.join( 230 | self.header + 231 | self.content + 232 | self.footer + 233 | end) 234 | ) 235 | 236 | def __call__(self, escape=False): 237 | """Return the document as a string. 238 | 239 | escape -- False print normally 240 | True replace < and > by < and > 241 | the default escape sequences in most browsers""" 242 | 243 | if escape: 244 | return _escape(self.__str__()) 245 | else: 246 | return self.__str__() 247 | 248 | def add(self, text): 249 | """This is an alias to addcontent.""" 250 | self.addcontent(text) 251 | 252 | def addfooter(self, text): 253 | """Add some text to the bottom of the document""" 254 | self.footer.append(text) 255 | 256 | def addheader(self, text): 257 | """Add some text to the top of the document""" 258 | self.header.append(text) 259 | 260 | def addcontent(self, text): 261 | """Add some text to the main part of the document""" 262 | self.content.append(text) 263 | 264 | def init(self, lang='en', css=None, metainfo=None, title=None, header=None, 265 | footer=None, charset=None, encoding=None, doctype=None, bodyattrs=None, script=None): 266 | """This method is used for complete documents with appropriate 267 | doctype, encoding, title, etc information. For an HTML/XML snippet 268 | omit this method. 269 | 270 | lang -- language, usually a two character string, will appear 271 | as in html mode (ignored in xml mode) 272 | 273 | css -- Cascading Style Sheet filename as a string or a list of 274 | strings for multiple css files (ignored in xml mode) 275 | 276 | metainfo -- a dictionary in the form { 'name':'content' } to be inserted 277 | into meta element(s) as 278 | (ignored in xml mode) 279 | 280 | bodyattrs --a dictionary in the form { 'key':'value', ... } which will be added 281 | as attributes of the element as 282 | (ignored in xml mode) 283 | 284 | script -- dictionary containing src:type pairs, 285 | 286 | title -- the title of the document as a string to be inserted into 287 | a title element as my title (ignored in xml mode) 288 | 289 | header -- some text to be inserted right after the element 290 | (ignored in xml mode) 291 | 292 | footer -- some text to be inserted right before the element 293 | (ignored in xml mode) 294 | 295 | charset -- a string defining the character set, will be inserted into a 296 | 297 | element (ignored in xml mode) 298 | 299 | encoding -- a string defining the encoding, will be put into to first line of 300 | the document as in 301 | xml mode (ignored in html mode) 302 | 303 | doctype -- the document type string, defaults to 304 | 305 | in html mode (ignored in xml mode)""" 306 | 307 | self._full = True 308 | 309 | if self.mode == 'strict_html' or self.mode == 'loose_html': 310 | if doctype is None: 311 | doctype = "" 312 | self.header.append(doctype) 313 | self.html(lang=lang) 314 | self.head() 315 | if charset is not None: 316 | self.meta( 317 | http_equiv='Content-Type', 318 | content="text/html; charset=%s" % 319 | charset) 320 | if metainfo is not None: 321 | self.metainfo(metainfo) 322 | if css is not None: 323 | self.css(css) 324 | if title is not None: 325 | self.title(title) 326 | if script is not None: 327 | self.scripts(script) 328 | self.head.close() 329 | if bodyattrs is not None: 330 | self.body(**bodyattrs) 331 | else: 332 | self.body() 333 | if header is not None: 334 | self.content.append(header) 335 | if footer is not None: 336 | self.footer.append(footer) 337 | 338 | elif self.mode == 'xml': 339 | if doctype is None: 340 | if encoding is not None: 341 | doctype = "" % encoding 342 | else: 343 | doctype = "" 344 | self.header.append(doctype) 345 | 346 | def css(self, filelist): 347 | """This convenience function is only useful for html. 348 | It adds css stylesheet(s) to the document via the element.""" 349 | 350 | if isinstance(filelist, basestring): 351 | self.link( 352 | href=filelist, 353 | rel='stylesheet', 354 | type='text/css', 355 | media='all') 356 | else: 357 | for file in filelist: 358 | self.link( 359 | href=file, 360 | rel='stylesheet', 361 | type='text/css', 362 | media='all') 363 | 364 | def metainfo(self, mydict): 365 | """This convenience function is only useful for html. 366 | It adds meta information via the element, the argument is 367 | a dictionary of the form { 'name':'content' }.""" 368 | 369 | if isinstance(mydict, dict): 370 | for name, content in mydict.iteritems(): 371 | self.meta(name=name, content=content) 372 | else: 373 | raise TypeError( 374 | "Metainfo should be called with a dictionary argument of name:content pairs.") 375 | 376 | def scripts(self, mydict): 377 | """Only useful in html, mydict is dictionary of src:type pairs will 378 | be rendered as """ 379 | 380 | if isinstance(mydict, dict): 381 | for src, type in mydict.iteritems(): 382 | self.script('', src=src, type='text/%s' % type) 383 | else: 384 | raise TypeError( 385 | "Script should be given a dictionary of src:type pairs.") 386 | 387 | 388 | class _oneliner: 389 | 390 | """An instance of oneliner returns a string corresponding to one element. 391 | This class can be used to write 'oneliners' that return a string 392 | immediately so there is no need to instantiate the page class.""" 393 | 394 | def __init__(self, case='lower'): 395 | self.case = case 396 | 397 | def __getattr__(self, attr): 398 | if attr.startswith("__") and attr.endswith("__"): 399 | raise AttributeError(attr) 400 | return element(attr, case=self.case, parent=None) 401 | 402 | oneliner = _oneliner(case='lower') 403 | upper_oneliner = _oneliner(case='upper') 404 | 405 | 406 | def _argsdicts(args, mydict): 407 | """A utility generator that pads argument list and dictionary values, will only be called with len( args ) = 0, 1.""" 408 | 409 | if len(args) == 0: 410 | args = None, 411 | elif len(args) == 1: 412 | args = _totuple(args[0]) 413 | else: 414 | raise Exception("We should have never gotten here.") 415 | 416 | mykeys = mydict.keys() 417 | myvalues = map(_totuple, mydict.values()) 418 | 419 | maxlength = max(map(len, [args] + myvalues)) 420 | 421 | for i in xrange(maxlength): 422 | thisdict = {} 423 | for key, value in zip(mykeys, myvalues): 424 | try: 425 | thisdict[key] = value[i] 426 | except IndexError: 427 | thisdict[key] = value[-1] 428 | try: 429 | thisarg = args[i] 430 | except IndexError: 431 | thisarg = args[-1] 432 | 433 | yield thisarg, thisdict 434 | 435 | 436 | def _totuple(x): 437 | """Utility stuff to convert string, int, float, None or anything to a usable tuple.""" 438 | 439 | if isinstance(x, basestring): 440 | out = x, 441 | elif isinstance(x, (int, float)): 442 | out = str(x), 443 | elif x is None: 444 | out = None, 445 | else: 446 | out = tuple(x) 447 | 448 | return out 449 | 450 | 451 | def escape(text, newline=False): 452 | """Escape special html characters.""" 453 | 454 | if isinstance(text, basestring): 455 | if '&' in text: 456 | text = text.replace('&', '&') 457 | if '>' in text: 458 | text = text.replace('>', '>') 459 | if '<' in text: 460 | text = text.replace('<', '<') 461 | if '\"' in text: 462 | text = text.replace('\"', '"') 463 | if '\'' in text: 464 | text = text.replace('\'', '"') 465 | if newline: 466 | if '\n' in text: 467 | text = text.replace('\n', '
    ') 468 | 469 | return text 470 | 471 | _escape = escape 472 | 473 | 474 | def unescape(text): 475 | """Inverse of escape.""" 476 | 477 | if isinstance(text, basestring): 478 | if '&' in text: 479 | text = text.replace('&', '&') 480 | if '>' in text: 481 | text = text.replace('>', '>') 482 | if '<' in text: 483 | text = text.replace('<', '<') 484 | if '"' in text: 485 | text = text.replace('"', '\"') 486 | 487 | return text 488 | 489 | 490 | class dummy: 491 | 492 | """A dummy class for attaching attributes.""" 493 | pass 494 | 495 | doctype = dummy() 496 | doctype.frameset = "" 497 | doctype.strict = "" 498 | doctype.loose = "" 499 | 500 | 501 | class russell: 502 | 503 | """A dummy class that contains anything.""" 504 | 505 | def __contains__(self, item): 506 | return True 507 | 508 | 509 | class MarkupError(Exception): 510 | 511 | """All our exceptions subclass this.""" 512 | 513 | def __str__(self): 514 | return self.message 515 | 516 | 517 | class ClosingError(MarkupError): 518 | 519 | def __init__(self, tag): 520 | self.message = "The element '%s' does not accept non-keyword arguments (has no closing tag)." % tag 521 | 522 | 523 | class OpeningError(MarkupError): 524 | 525 | def __init__(self, tag): 526 | self.message = "The element '%s' can not be opened." % tag 527 | 528 | 529 | class ArgumentError(MarkupError): 530 | 531 | def __init__(self, tag): 532 | self.message = "The element '%s' was called with more than one non-keyword argument." % tag 533 | 534 | 535 | class InvalidElementError(MarkupError): 536 | 537 | def __init__(self, tag, mode): 538 | self.message = "The element '%s' is not valid for your mode '%s'." % ( 539 | tag, 540 | mode) 541 | 542 | 543 | class DeprecationError(MarkupError): 544 | 545 | def __init__(self, tag): 546 | self.message = "The element '%s' is deprecated, instantiate markup.page with mode='loose_html' to allow it." % tag 547 | 548 | 549 | class ModeError(MarkupError): 550 | 551 | def __init__(self, mode): 552 | self.message = "Mode '%s' is invalid, possible values: strict_html, loose_html, xml." % mode 553 | 554 | 555 | class CustomizationError(MarkupError): 556 | 557 | def __init__(self): 558 | self.message = "If you customize the allowed elements, you must define both types 'onetags' and 'twotags'." 559 | 560 | if __name__ == '__main__': 561 | print __doc__ 562 | -------------------------------------------------------------------------------- /magnifier_1.0-1_source.build: -------------------------------------------------------------------------------- 1 | dpkg-buildpackage -rfakeroot -us -uc -ui -S 2 | dpkg-buildpackage: warning: using a gain-root-command while being root 3 | dpkg-buildpackage: info: source package magnifier 4 | dpkg-buildpackage: info: source version 1.0-1 5 | dpkg-buildpackage: info: source distribution stable 6 | dpkg-buildpackage: info: source changed by Humayun Ali Khan 7 | dpkg-source --before-build . 8 | fakeroot debian/rules clean 9 | dh clean --with python2 --system=pybuild 10 | dh_auto_clean -O--system=pybuild 11 | dh_autoreconf_clean -O--system=pybuild 12 | dh_clean -O--system=pybuild 13 | dpkg-source -b . 14 | dpkg-source: info: using source format '3.0 (quilt)' 15 | dpkg-source: info: building magnifier using existing ./magnifier_1.0.orig.tar.gz 16 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0 17 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/README.md, use --include-removal to override 18 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/update.py, use --include-removal to override 19 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/LICENSE, use --include-removal to override 20 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier.py, use --include-removal to override 21 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/install, use --include-removal to override 22 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/Dev 23 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/Dev/magnifier.png, use --include-removal to override 24 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/Dev/magnifier.desktop, use --include-removal to override 25 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules 26 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/theHarvester.py, use --include-removal to override 27 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/myparser.py, use --include-removal to override 28 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/sub.py, use --include-removal to override 29 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/goofile.py, use --include-removal to override 30 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/tests 31 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/tests/myparser_test.py, use --include-removal to override 32 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/discovery 33 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/twittersearch.pyc, use --include-removal to override 34 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleCSE.pyc, use --include-removal to override 35 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/netcraft.py, use --include-removal to override 36 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodansearch.pyc, use --include-removal to override 37 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/jigsaw.pyc, use --include-removal to override 38 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesets.pyc, use --include-removal to override 39 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yandexsearch.py, use --include-removal to override 40 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dogpilesearch.py, use --include-removal to override 41 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/pgpsearch.py, use --include-removal to override 42 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dogpilesearch.pyc, use --include-removal to override 43 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/__init__.py, use --include-removal to override 44 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/__init__.pyc, use --include-removal to override 45 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dnssearch-threads.py, use --include-removal to override 46 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/virustotal.py, use --include-removal to override 47 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/crtsh.py, use --include-removal to override 48 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/pgpsearch.pyc, use --include-removal to override 49 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/baidusearch.pyc, use --include-removal to override 50 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesearch.py, use --include-removal to override 51 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/bingsearch.pyc, use --include-removal to override 52 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleplussearch.pyc, use --include-removal to override 53 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/baidusearch.py, use --include-removal to override 54 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dnssearch.pyc, use --include-removal to override 55 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yahoosearch.py, use --include-removal to override 56 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/jigsaw.py, use --include-removal to override 57 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/IPy.pyc, use --include-removal to override 58 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/linkedinsearch.py, use --include-removal to override 59 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/exaleadsearch.pyc, use --include-removal to override 60 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/bingsearch.py, use --include-removal to override 61 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yandexsearch.pyc, use --include-removal to override 62 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yahoosearch.pyc, use --include-removal to override 63 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleplussearch.py, use --include-removal to override 64 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/netcraft.pyc, use --include-removal to override 65 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleCSE.py, use --include-removal to override 66 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dnssearch.py, use --include-removal to override 67 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesets.py, use --include-removal to override 68 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/linkedinsearch.pyc, use --include-removal to override 69 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/virustotal.pyc, use --include-removal to override 70 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/exaleadsearch.py, use --include-removal to override 71 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodansearch.py, use --include-removal to override 72 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesearch.pyc, use --include-removal to override 73 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/twittersearch.py, use --include-removal to override 74 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/asksearch.py, use --include-removal to override 75 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/crtsh.pyc, use --include-removal to override 76 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/IPy.py, use --include-removal to override 77 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/discovery/shodan 78 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/api.pyc, use --include-removal to override 79 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/__init__.py, use --include-removal to override 80 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/api.py, use --include-removal to override 81 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/__init__.pyc, use --include-removal to override 82 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/wps.py, use --include-removal to override 83 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/discovery/DNS 84 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Status.py, use --include-removal to override 85 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Class.pyc, use --include-removal to override 86 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Base.pyc, use --include-removal to override 87 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Status.pyc, use --include-removal to override 88 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Type.py, use --include-removal to override 89 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/__init__.py, use --include-removal to override 90 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/__init__.pyc, use --include-removal to override 91 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Class.py, use --include-removal to override 92 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Base.py, use --include-removal to override 93 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/lazy.py, use --include-removal to override 94 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Lib.py, use --include-removal to override 95 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/lazy.pyc, use --include-removal to override 96 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Opcode.py, use --include-removal to override 97 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Opcode.pyc, use --include-removal to override 98 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Type.pyc, use --include-removal to override 99 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Lib.pyc, use --include-removal to override 100 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/win32dns.py, use --include-removal to override 101 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/lib 102 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/markup.pyc, use --include-removal to override 103 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/__init__.py, use --include-removal to override 104 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/__init__.pyc, use --include-removal to override 105 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/htmlExport.py, use --include-removal to override 106 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/graphs.py, use --include-removal to override 107 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/hostchecker.pyc, use --include-removal to override 108 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/htmlExport.pyc, use --include-removal to override 109 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/graphs.pyc, use --include-removal to override 110 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/markup.py, use --include-removal to override 111 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/hostchecker.py, use --include-removal to override 112 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/snap 113 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/snap/snapcraft.yaml, use --include-removal to override 114 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/usr 115 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/usr/share 116 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/usr/share/applications 117 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/usr/share/applications/magnifier.png, use --include-removal to override 118 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/usr/share/applications/magnifier.desktop, use --include-removal to override 119 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier 120 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/README.md, use --include-removal to override 121 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/update.py, use --include-removal to override 122 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/LICENSE, use --include-removal to override 123 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/magnifier.py, use --include-removal to override 124 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/install, use --include-removal to override 125 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/Dev 126 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/Dev/magnifier.png, use --include-removal to override 127 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/Dev/magnifier.desktop, use --include-removal to override 128 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules 129 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/theHarvester.py, use --include-removal to override 130 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/myparser.py, use --include-removal to override 131 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/sub.py, use --include-removal to override 132 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/goofile.py, use --include-removal to override 133 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/tests 134 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/tests/myparser_test.py, use --include-removal to override 135 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/discovery 136 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/twittersearch.pyc, use --include-removal to override 137 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleCSE.pyc, use --include-removal to override 138 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/netcraft.py, use --include-removal to override 139 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodansearch.pyc, use --include-removal to override 140 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/jigsaw.pyc, use --include-removal to override 141 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesets.pyc, use --include-removal to override 142 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yandexsearch.py, use --include-removal to override 143 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dogpilesearch.py, use --include-removal to override 144 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/pgpsearch.py, use --include-removal to override 145 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dogpilesearch.pyc, use --include-removal to override 146 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/__init__.py, use --include-removal to override 147 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/__init__.pyc, use --include-removal to override 148 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dnssearch-threads.py, use --include-removal to override 149 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/virustotal.py, use --include-removal to override 150 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/crtsh.py, use --include-removal to override 151 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/pgpsearch.pyc, use --include-removal to override 152 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/baidusearch.pyc, use --include-removal to override 153 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesearch.py, use --include-removal to override 154 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/bingsearch.pyc, use --include-removal to override 155 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleplussearch.pyc, use --include-removal to override 156 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/baidusearch.py, use --include-removal to override 157 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dnssearch.pyc, use --include-removal to override 158 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yahoosearch.py, use --include-removal to override 159 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/jigsaw.py, use --include-removal to override 160 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/IPy.pyc, use --include-removal to override 161 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/linkedinsearch.py, use --include-removal to override 162 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/exaleadsearch.pyc, use --include-removal to override 163 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/bingsearch.py, use --include-removal to override 164 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yandexsearch.pyc, use --include-removal to override 165 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yahoosearch.pyc, use --include-removal to override 166 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleplussearch.py, use --include-removal to override 167 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/netcraft.pyc, use --include-removal to override 168 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleCSE.py, use --include-removal to override 169 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dnssearch.py, use --include-removal to override 170 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesets.py, use --include-removal to override 171 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/linkedinsearch.pyc, use --include-removal to override 172 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/virustotal.pyc, use --include-removal to override 173 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/exaleadsearch.py, use --include-removal to override 174 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodansearch.py, use --include-removal to override 175 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesearch.pyc, use --include-removal to override 176 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/twittersearch.py, use --include-removal to override 177 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/asksearch.py, use --include-removal to override 178 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/crtsh.pyc, use --include-removal to override 179 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/IPy.py, use --include-removal to override 180 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/discovery/shodan 181 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/api.pyc, use --include-removal to override 182 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/__init__.py, use --include-removal to override 183 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/api.py, use --include-removal to override 184 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/__init__.pyc, use --include-removal to override 185 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/wps.py, use --include-removal to override 186 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/discovery/DNS 187 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Status.py, use --include-removal to override 188 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Class.pyc, use --include-removal to override 189 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Base.pyc, use --include-removal to override 190 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Status.pyc, use --include-removal to override 191 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Type.py, use --include-removal to override 192 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/__init__.py, use --include-removal to override 193 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/__init__.pyc, use --include-removal to override 194 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Class.py, use --include-removal to override 195 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Base.py, use --include-removal to override 196 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/lazy.py, use --include-removal to override 197 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Lib.py, use --include-removal to override 198 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/lazy.pyc, use --include-removal to override 199 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Opcode.py, use --include-removal to override 200 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Opcode.pyc, use --include-removal to override 201 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Type.pyc, use --include-removal to override 202 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Lib.pyc, use --include-removal to override 203 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/win32dns.py, use --include-removal to override 204 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/lib 205 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/markup.pyc, use --include-removal to override 206 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/__init__.py, use --include-removal to override 207 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/__init__.pyc, use --include-removal to override 208 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/htmlExport.py, use --include-removal to override 209 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/graphs.py, use --include-removal to override 210 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/hostchecker.pyc, use --include-removal to override 211 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/htmlExport.pyc, use --include-removal to override 212 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/graphs.pyc, use --include-removal to override 213 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/markup.py, use --include-removal to override 214 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/hostchecker.py, use --include-removal to override 215 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/snap 216 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/snap/snapcraft.yaml, use --include-removal to override 217 | dpkg-source: info: building magnifier in magnifier_1.0-1.debian.tar.xz 218 | dpkg-source: info: building magnifier in magnifier_1.0-1.dsc 219 | dpkg-genbuildinfo --build=source 220 | dpkg-genchanges --build=source >../magnifier_1.0-1_source.changes 221 | dpkg-genchanges: error: badly formed line in files list file, line 1 222 | dpkg-buildpackage: error: dpkg-genchanges subprocess returned exit status 25 223 | -------------------------------------------------------------------------------- /magnifier_1.0-1_amd64.build: -------------------------------------------------------------------------------- 1 | dpkg-buildpackage -rfakeroot -us -uc -ui 2 | dpkg-buildpackage: warning: using a gain-root-command while being root 3 | dpkg-buildpackage: info: source package magnifier 4 | dpkg-buildpackage: info: source version 1.0-1 5 | dpkg-buildpackage: info: source distribution stable 6 | dpkg-buildpackage: info: source changed by Humayun Ali Khan 7 | dpkg-source --before-build . 8 | dpkg-buildpackage: info: host architecture amd64 9 | fakeroot debian/rules clean 10 | dh clean --with python2 --system=pybuild 11 | dh_auto_clean -O--system=pybuild 12 | dh_autoreconf_clean -O--system=pybuild 13 | dh_clean -O--system=pybuild 14 | dpkg-source -b . 15 | dpkg-source: info: using source format '3.0 (quilt)' 16 | dpkg-source: info: building magnifier using existing ./magnifier_1.0.orig.tar.gz 17 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0 18 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/README.md, use --include-removal to override 19 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/update.py, use --include-removal to override 20 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/LICENSE, use --include-removal to override 21 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier.py, use --include-removal to override 22 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/install, use --include-removal to override 23 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/Dev 24 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/Dev/magnifier.png, use --include-removal to override 25 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/Dev/magnifier.desktop, use --include-removal to override 26 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules 27 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/theHarvester.py, use --include-removal to override 28 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/myparser.py, use --include-removal to override 29 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/sub.py, use --include-removal to override 30 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/goofile.py, use --include-removal to override 31 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/tests 32 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/tests/myparser_test.py, use --include-removal to override 33 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/discovery 34 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/twittersearch.pyc, use --include-removal to override 35 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleCSE.pyc, use --include-removal to override 36 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/netcraft.py, use --include-removal to override 37 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodansearch.pyc, use --include-removal to override 38 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/jigsaw.pyc, use --include-removal to override 39 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesets.pyc, use --include-removal to override 40 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yandexsearch.py, use --include-removal to override 41 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dogpilesearch.py, use --include-removal to override 42 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/pgpsearch.py, use --include-removal to override 43 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dogpilesearch.pyc, use --include-removal to override 44 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/__init__.py, use --include-removal to override 45 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/__init__.pyc, use --include-removal to override 46 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dnssearch-threads.py, use --include-removal to override 47 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/virustotal.py, use --include-removal to override 48 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/crtsh.py, use --include-removal to override 49 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/pgpsearch.pyc, use --include-removal to override 50 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/baidusearch.pyc, use --include-removal to override 51 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesearch.py, use --include-removal to override 52 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/bingsearch.pyc, use --include-removal to override 53 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleplussearch.pyc, use --include-removal to override 54 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/baidusearch.py, use --include-removal to override 55 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dnssearch.pyc, use --include-removal to override 56 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yahoosearch.py, use --include-removal to override 57 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/jigsaw.py, use --include-removal to override 58 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/IPy.pyc, use --include-removal to override 59 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/linkedinsearch.py, use --include-removal to override 60 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/exaleadsearch.pyc, use --include-removal to override 61 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/bingsearch.py, use --include-removal to override 62 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yandexsearch.pyc, use --include-removal to override 63 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/yahoosearch.pyc, use --include-removal to override 64 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleplussearch.py, use --include-removal to override 65 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/netcraft.pyc, use --include-removal to override 66 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googleCSE.py, use --include-removal to override 67 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/dnssearch.py, use --include-removal to override 68 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesets.py, use --include-removal to override 69 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/linkedinsearch.pyc, use --include-removal to override 70 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/virustotal.pyc, use --include-removal to override 71 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/exaleadsearch.py, use --include-removal to override 72 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodansearch.py, use --include-removal to override 73 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/googlesearch.pyc, use --include-removal to override 74 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/twittersearch.py, use --include-removal to override 75 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/asksearch.py, use --include-removal to override 76 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/crtsh.pyc, use --include-removal to override 77 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/IPy.py, use --include-removal to override 78 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/discovery/shodan 79 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/api.pyc, use --include-removal to override 80 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/__init__.py, use --include-removal to override 81 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/api.py, use --include-removal to override 82 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/__init__.pyc, use --include-removal to override 83 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/shodan/wps.py, use --include-removal to override 84 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/discovery/DNS 85 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Status.py, use --include-removal to override 86 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Class.pyc, use --include-removal to override 87 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Base.pyc, use --include-removal to override 88 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Status.pyc, use --include-removal to override 89 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Type.py, use --include-removal to override 90 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/__init__.py, use --include-removal to override 91 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/__init__.pyc, use --include-removal to override 92 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Class.py, use --include-removal to override 93 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Base.py, use --include-removal to override 94 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/lazy.py, use --include-removal to override 95 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Lib.py, use --include-removal to override 96 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/lazy.pyc, use --include-removal to override 97 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Opcode.py, use --include-removal to override 98 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Opcode.pyc, use --include-removal to override 99 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Type.pyc, use --include-removal to override 100 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/Lib.pyc, use --include-removal to override 101 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/discovery/DNS/win32dns.py, use --include-removal to override 102 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/modules/lib 103 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/markup.pyc, use --include-removal to override 104 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/__init__.py, use --include-removal to override 105 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/__init__.pyc, use --include-removal to override 106 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/htmlExport.py, use --include-removal to override 107 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/graphs.py, use --include-removal to override 108 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/hostchecker.pyc, use --include-removal to override 109 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/htmlExport.pyc, use --include-removal to override 110 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/graphs.pyc, use --include-removal to override 111 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/markup.py, use --include-removal to override 112 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/modules/lib/hostchecker.py, use --include-removal to override 113 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/snap 114 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/snap/snapcraft.yaml, use --include-removal to override 115 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/usr 116 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/usr/share 117 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/usr/share/applications 118 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/usr/share/applications/magnifier.png, use --include-removal to override 119 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/usr/share/applications/magnifier.desktop, use --include-removal to override 120 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier 121 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/README.md, use --include-removal to override 122 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/update.py, use --include-removal to override 123 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/LICENSE, use --include-removal to override 124 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/magnifier.py, use --include-removal to override 125 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/install, use --include-removal to override 126 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/Dev 127 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/Dev/magnifier.png, use --include-removal to override 128 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/Dev/magnifier.desktop, use --include-removal to override 129 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules 130 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/theHarvester.py, use --include-removal to override 131 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/myparser.py, use --include-removal to override 132 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/sub.py, use --include-removal to override 133 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/goofile.py, use --include-removal to override 134 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/tests 135 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/tests/myparser_test.py, use --include-removal to override 136 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/discovery 137 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/twittersearch.pyc, use --include-removal to override 138 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleCSE.pyc, use --include-removal to override 139 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/netcraft.py, use --include-removal to override 140 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodansearch.pyc, use --include-removal to override 141 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/jigsaw.pyc, use --include-removal to override 142 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesets.pyc, use --include-removal to override 143 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yandexsearch.py, use --include-removal to override 144 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dogpilesearch.py, use --include-removal to override 145 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/pgpsearch.py, use --include-removal to override 146 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dogpilesearch.pyc, use --include-removal to override 147 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/__init__.py, use --include-removal to override 148 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/__init__.pyc, use --include-removal to override 149 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dnssearch-threads.py, use --include-removal to override 150 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/virustotal.py, use --include-removal to override 151 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/crtsh.py, use --include-removal to override 152 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/pgpsearch.pyc, use --include-removal to override 153 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/baidusearch.pyc, use --include-removal to override 154 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesearch.py, use --include-removal to override 155 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/bingsearch.pyc, use --include-removal to override 156 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleplussearch.pyc, use --include-removal to override 157 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/baidusearch.py, use --include-removal to override 158 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dnssearch.pyc, use --include-removal to override 159 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yahoosearch.py, use --include-removal to override 160 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/jigsaw.py, use --include-removal to override 161 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/IPy.pyc, use --include-removal to override 162 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/linkedinsearch.py, use --include-removal to override 163 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/exaleadsearch.pyc, use --include-removal to override 164 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/bingsearch.py, use --include-removal to override 165 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yandexsearch.pyc, use --include-removal to override 166 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/yahoosearch.pyc, use --include-removal to override 167 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleplussearch.py, use --include-removal to override 168 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/netcraft.pyc, use --include-removal to override 169 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googleCSE.py, use --include-removal to override 170 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/dnssearch.py, use --include-removal to override 171 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesets.py, use --include-removal to override 172 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/linkedinsearch.pyc, use --include-removal to override 173 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/virustotal.pyc, use --include-removal to override 174 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/exaleadsearch.py, use --include-removal to override 175 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodansearch.py, use --include-removal to override 176 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/googlesearch.pyc, use --include-removal to override 177 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/twittersearch.py, use --include-removal to override 178 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/asksearch.py, use --include-removal to override 179 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/crtsh.pyc, use --include-removal to override 180 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/IPy.py, use --include-removal to override 181 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/discovery/shodan 182 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/api.pyc, use --include-removal to override 183 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/__init__.py, use --include-removal to override 184 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/api.py, use --include-removal to override 185 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/__init__.pyc, use --include-removal to override 186 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/shodan/wps.py, use --include-removal to override 187 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/discovery/DNS 188 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Status.py, use --include-removal to override 189 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Class.pyc, use --include-removal to override 190 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Base.pyc, use --include-removal to override 191 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Status.pyc, use --include-removal to override 192 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Type.py, use --include-removal to override 193 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/__init__.py, use --include-removal to override 194 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/__init__.pyc, use --include-removal to override 195 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Class.py, use --include-removal to override 196 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Base.py, use --include-removal to override 197 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/lazy.py, use --include-removal to override 198 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Lib.py, use --include-removal to override 199 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/lazy.pyc, use --include-removal to override 200 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Opcode.py, use --include-removal to override 201 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Opcode.pyc, use --include-removal to override 202 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Type.pyc, use --include-removal to override 203 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/Lib.pyc, use --include-removal to override 204 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/discovery/DNS/win32dns.py, use --include-removal to override 205 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/modules/lib 206 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/markup.pyc, use --include-removal to override 207 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/__init__.py, use --include-removal to override 208 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/__init__.pyc, use --include-removal to override 209 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/htmlExport.py, use --include-removal to override 210 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/graphs.py, use --include-removal to override 211 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/hostchecker.pyc, use --include-removal to override 212 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/htmlExport.pyc, use --include-removal to override 213 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/graphs.pyc, use --include-removal to override 214 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/markup.py, use --include-removal to override 215 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/modules/lib/hostchecker.py, use --include-removal to override 216 | dpkg-source: warning: ignoring deletion of directory magnifier_1.0/magnifier/snap 217 | dpkg-source: warning: ignoring deletion of file magnifier_1.0/magnifier/snap/snapcraft.yaml, use --include-removal to override 218 | dpkg-source: info: building magnifier in magnifier_1.0-1.debian.tar.xz 219 | dpkg-source: info: building magnifier in magnifier_1.0-1.dsc 220 | debian/rules build 221 | dh build --with python2 --system=pybuild 222 | dh_update_autotools_config -O--system=pybuild 223 | dh_autoreconf -O--system=pybuild 224 | dh_auto_configure -O--system=pybuild 225 | dh_auto_build -O--system=pybuild 226 | dh_auto_test -O--system=pybuild 227 | create-stamp debian/debhelper-build-stamp 228 | fakeroot debian/rules binary 229 | dh binary --with python2 --system=pybuild 230 | dh_testroot -O--system=pybuild 231 | dh_prep -O--system=pybuild 232 | dh_installdirs -O--system=pybuild 233 | dh_auto_install -O--system=pybuild 234 | dh_install -O--system=pybuild 235 | dh_installdocs -O--system=pybuild 236 | dh_installchangelogs -O--system=pybuild 237 | dh_installexamples -O--system=pybuild 238 | dh_installman -O--system=pybuild 239 | dh_installcatalogs -O--system=pybuild 240 | dh_installcron -O--system=pybuild 241 | dh_installdebconf -O--system=pybuild 242 | dh_installemacsen -O--system=pybuild 243 | dh_installifupdown -O--system=pybuild 244 | dh_installinfo -O--system=pybuild 245 | dh_systemd_enable -O--system=pybuild 246 | dh_python2 -O--system=pybuild 247 | E: dh_python2 dh_python2:408: no package to act on (python-foo or one with ${python:Depends} in Depends) 248 | dh_installinit -O--system=pybuild 249 | dh_systemd_start -O--system=pybuild 250 | dh_installmenu -O--system=pybuild 251 | dh_installmime -O--system=pybuild 252 | dh_installmodules -O--system=pybuild 253 | dh_installlogcheck -O--system=pybuild 254 | dh_installlogrotate -O--system=pybuild 255 | dh_installpam -O--system=pybuild 256 | dh_installppp -O--system=pybuild 257 | dh_installudev -O--system=pybuild 258 | dh_installgsettings -O--system=pybuild 259 | dh_bugfiles -O--system=pybuild 260 | dh_ucf -O--system=pybuild 261 | dh_lintian -O--system=pybuild 262 | dh_gconf -O--system=pybuild 263 | dh_icons -O--system=pybuild 264 | dh_perl -O--system=pybuild 265 | dh_usrlocal -O--system=pybuild 266 | dh_link -O--system=pybuild 267 | dh_installwm -O--system=pybuild 268 | dh_installxfonts -O--system=pybuild 269 | dh_strip_nondeterminism -O--system=pybuild 270 | dh_compress -O--system=pybuild 271 | dh_fixperms -O--system=pybuild 272 | dh_missing -O--system=pybuild 273 | dh_strip -O--system=pybuild 274 | dh_makeshlibs -O--system=pybuild 275 | dh_shlibdeps -O--system=pybuild 276 | dh_installdeb -O--system=pybuild 277 | dh_gencontrol -O--system=pybuild 278 | dh_md5sums -O--system=pybuild 279 | dh_builddeb -O--system=pybuild 280 | dpkg-deb: building package 'magnifier' in '../magnifier_1.0-1_amd64.deb'. 281 | dpkg-genbuildinfo 282 | dpkg-genbuildinfo: error: badly formed line in files list file, line 1 283 | dpkg-buildpackage: error: dpkg-genbuildinfo subprocess returned exit status 25 284 | --------------------------------------------------------------------------------