├── anubis
├── scanners
│ ├── __init__.py
│ ├── dnssec.py
│ ├── censys.py
│ ├── shodan.py
│ ├── hudson_rock.py
│ ├── hackertarget.py
│ ├── anubis_db.py
│ ├── spyse.py
│ ├── zonetransfer.py
│ ├── ssl.py
│ ├── recursive.py
│ ├── netcraft.py
│ ├── pkey.py
│ ├── dnsdumpster.py
│ ├── nmap.py
│ └── crt.py
├── utils
│ ├── __init__.py
│ ├── color_print.py
│ ├── signal_handler.py
│ └── search_worker.py
├── __init__.py
├── commands
│ ├── __init__.py
│ ├── base.py
│ └── target.py
├── API.py
└── cli.py
├── .dockerignore
├── tests
├── domains.txt
├── test_cli.py
└── commands
│ └── test_target.py
├── renovate.json
├── setup.cfg
├── requirements.txt
├── .travis.yml
├── MANIFEST.in
├── Dockerfile
├── ISSUE_TEMPLATE.md
├── coverage.svg
├── LICENSE
├── .gitignore
├── CONTRIBUTING.md
├── setup.py
├── CODE_OF_CONDUCT.md
└── README.md
/anubis/scanners/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/anubis/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .gitignore
2 | tests/
3 |
--------------------------------------------------------------------------------
/anubis/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = '1.3.1'
2 |
--------------------------------------------------------------------------------
/anubis/commands/__init__.py:
--------------------------------------------------------------------------------
1 | from .target import *
2 |
--------------------------------------------------------------------------------
/tests/domains.txt:
--------------------------------------------------------------------------------
1 | example.com
2 | https://example.com
3 | https://jonlu.ca
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": [
3 | "config:recommended"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_wheel]
2 | universal = 1
3 | [metadata]
4 | description_file = README.md
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | setuptools
2 | python_nmap==0.7.1
3 | shodan>=1.31.0
4 | docopt==0.6.2
5 | requests>=2.32.3
6 | censys==2.2.16
7 | dnspython>=2.7.0
--------------------------------------------------------------------------------
/anubis/API.py:
--------------------------------------------------------------------------------
1 | # Public Shodan key bundled with Anubis
2 | CENSYS_ID = ""
3 | CENSYS_SECRET = ""
4 | # Spyse Token - https://spyse.com/user
5 | SPYSE_TOKEN = ""
6 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - "3.7"
4 |
5 | # command to install dependencies
6 | install:
7 | - pip3 install -r requirements.txt
8 | - pip3 install .
9 | # command to run tests
10 | script:
11 | - pytest # or py.test for Python versions 3.5 and below
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | exclude .gitignore
2 | exclude .coverage
3 | exclude .travis.yml
4 | include README.rst
5 | include setup.cfg
6 | include requirements.txt
7 | prune .cache
8 | prune .git
9 | prune build
10 | prune dist
11 | recursive-exclude *.egg-info *
12 | recursive-include tests *
13 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.13-slim
2 |
3 | RUN apt-get update && apt-get install -y --no-install-recommends \
4 | build-essential \
5 | libssl-dev \
6 | libffi-dev \
7 | python-dev \
8 | && rm -rf /var/lib/apt/lists/*
9 |
10 | WORKDIR /Anubis/
11 | COPY . /Anubis/
12 |
13 | RUN pip3 install .
14 |
15 | ENTRYPOINT ["anubis"]
16 |
--------------------------------------------------------------------------------
/anubis/commands/base.py:
--------------------------------------------------------------------------------
1 | """The base command."""
2 |
3 |
4 | class Base(object):
5 | """A base command."""
6 |
7 | def __init__(self, options, *args, **kwargs):
8 | self.options = options
9 | self.args = args
10 | self.kwargs = kwargs
11 |
12 | def run(self):
13 | raise NotImplementedError(
14 | 'run() method must be implemented by the overloading class')
15 |
--------------------------------------------------------------------------------
/anubis/scanners/dnssec.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from nmap import nmap
4 |
5 | from anubis.utils.color_print import ColorPrint
6 |
7 |
8 | def dnssecc_subdomain_enum(self, target):
9 | # Must run as root
10 | if os.getuid() == 0:
11 | print("Starting DNSSEC Enum")
12 | nm = nmap.PortScanner()
13 | arguments = '-sSU -p 53 --script dns-nsec-enum --script-args dns-nsec-enum.domains=' + target
14 |
15 | nm.scan(hosts=target, arguments=arguments)
16 | for host in nm.all_hosts():
17 | try:
18 | print(nm[host]['udp'][53]['script']['dns-nsec-enum'])
19 | except:
20 | pass
21 | else:
22 | ColorPrint.red(
23 | "To run a DNSSEC subdomain enumeration, Anubis must be run as root")
24 |
--------------------------------------------------------------------------------
/anubis/scanners/censys.py:
--------------------------------------------------------------------------------
1 | import censys
2 |
3 | from anubis.utils.color_print import ColorPrint
4 |
5 |
6 | def search_censys(self, target):
7 | print("Searching Censys")
8 | try:
9 | from anubis.API import CENSYS_ID, CENSYS_SECRET
10 | except ImportError:
11 | ColorPrint.red(
12 | "To run a Censys scan, you must add your API keys to anubis/API.py")
13 | return
14 | if not CENSYS_SECRET or not CENSYS_ID:
15 | ColorPrint.red(
16 | "To run a Censys scan, you must add your API keys to anubis/API.py")
17 | return
18 | # Print certificate information for domains
19 | c = censys.certificates.CensysCertificates(CENSYS_ID, CENSYS_SECRET)
20 | for cert in c.search("." + target):
21 | print(cert)
22 |
--------------------------------------------------------------------------------
/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | # Please follow the general troubleshooting steps first:
2 |
3 | - [ ] Check you are using Python >3.11
4 | - [ ] Make sure you are using pip3 (or any instance of pip associated with Python 3.x)
5 |
6 | ### Bug reports:
7 |
8 | Make sure you include the following:
9 |
10 | * Anubis Version
11 | * Python Version
12 | * Full stack trace if crash or installation error
13 | * Machine details (OS, build number, distro, etc.)
14 |
15 | Also, please assign @jonluca to all issues at first.
16 |
17 | Please replace this line with a brief summary of your issue.
18 |
19 | ### Features:
20 |
21 | **Please note by far the quickest way to get a new feature is to make a Pull Request.**
22 |
23 | Otherwise, open an issue and tag it with "feature-request"
24 |
--------------------------------------------------------------------------------
/anubis/utils/color_print.py:
--------------------------------------------------------------------------------
1 | class ColorPrint:
2 | RED = '\033[91m'
3 | GREEN = '\033[92m'
4 | YELLOW = '\033[93m'
5 | LIGHT_PURPLE = '\033[94m'
6 | PURPLE = '\033[95m'
7 | END = '\033[0m'
8 |
9 | @classmethod
10 | def red(self, s, **kwargs):
11 | print(self.RED + s + self.END, **kwargs)
12 |
13 | @classmethod
14 | def green(self, s, **kwargs):
15 | print(self.GREEN + s + self.END, **kwargs)
16 |
17 | @classmethod
18 | def yellow(self, s, **kwargs):
19 | print(self.YELLOW + s + self.END, **kwargs)
20 |
21 | @classmethod
22 | def light_purple(self, s, **kwargs):
23 | print(self.LIGHT_PURPLE + s + self.END, **kwargs)
24 |
25 | @classmethod
26 | def purple(self, s, **kwargs):
27 | print(self.PURPLE + s + self.END, **kwargs)
28 |
--------------------------------------------------------------------------------
/anubis/utils/signal_handler.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 |
4 | class SignalHandler:
5 | """
6 | The object that will handle signals and stop the worker threads.
7 | """
8 |
9 | #: The stop event that's shared by this handler and threads.
10 | stopper = None
11 |
12 | #: The pool of worker threads
13 | workers = None
14 |
15 | def __init__(self, stopper, workers):
16 | self.stopper = stopper
17 | self.workers = workers
18 |
19 | def __call__(self, signum, frame):
20 | """
21 | This will be called by the python signal module
22 |
23 | https://docs.python.org/3/library/signal.html#signal.signal
24 | """
25 | self.stopper.set()
26 |
27 | for worker in self.workers:
28 | worker.join()
29 |
30 | sys.__stdout__.write("Quitting...")
31 | sys.__stdout__.flush()
32 | sys.exit(0)
33 |
--------------------------------------------------------------------------------
/anubis/scanners/shodan.py:
--------------------------------------------------------------------------------
1 | import os
2 | import socket
3 | import shodan
4 |
5 | def search_shodan(self):
6 | print("Searching Shodan.io for additional information")
7 | api_key = os.environ.get("SHODAN_API_KEY", None)
8 | if api_key is None:
9 | return
10 |
11 | api = shodan.Shodan(api_key)
12 | for i in range(len(self.options["TARGET"])):
13 | try:
14 | results = api.host(socket.gethostbyname(self.options["TARGET"][i]))
15 |
16 | print('Server Location: ' + str(results['city']) + ", " + str(
17 | results['country_code']) + ' - ' + str(results['postal_code']))
18 |
19 | print("ISP or Hosting Company: %s" % str(results['isp']))
20 |
21 | if results['os'] is not None:
22 | print("Possible OS: %s" % str(results['os']))
23 | except Exception as e:
24 | self.handle_exception(e, "Error retrieving additional info")
25 |
--------------------------------------------------------------------------------
/anubis/scanners/hudson_rock.py:
--------------------------------------------------------------------------------
1 | from json import loads
2 |
3 | import requests
4 |
5 | from anubis.utils.color_print import ColorPrint
6 |
7 |
8 | def search_hudsonrock(self, target):
9 | try:
10 | print("Searching HudsonRock")
11 | res = requests.get("https://cavalier.hudsonrock.com/api/json/v2/osint-tools/search-by-domain?domain=" + target)
12 | data = res.json()
13 | if hasattr(data, "data") and hasattr(data['data'], 'all_urls'):
14 | urls = data['data']['all_urls']
15 | for url_entry in urls:
16 | if hasattr(url_entry, 'url'):
17 | url = url_entry['url']
18 | if url not in self.domains:
19 | self.domains.append(url)
20 | if self.options["--verbose"]:
21 | print("HudsonRock Found Domain:", url)
22 |
23 | except:
24 | print("Error searching HudsonRock")
25 | return
--------------------------------------------------------------------------------
/anubis/scanners/hackertarget.py:
--------------------------------------------------------------------------------
1 | import requests
2 |
3 |
4 | def subdomain_hackertarget(self, target):
5 | print("Searching HackerTarget")
6 | headers = {
7 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36', }
8 | params = (('q', target),)
9 |
10 | results = requests.get('http://api.hackertarget.com/hostsearch/',
11 | headers=headers, params=params)
12 | results = results.text.split('\n')
13 | for res in results:
14 | try:
15 | if res.split(",")[0] != "":
16 | domain = res.split(",")[0]
17 | domain = domain.strip()
18 | if domain not in self.domains and domain.endswith("." + target):
19 | self.domains.append(domain)
20 | if self.options["--verbose"]:
21 | print("HackerTarget Found Domain:", domain.strip())
22 | except:
23 | print("except")
24 | pass
25 |
--------------------------------------------------------------------------------
/coverage.svg:
--------------------------------------------------------------------------------
1 |
2 |
22 |
--------------------------------------------------------------------------------
/anubis/scanners/anubis_db.py:
--------------------------------------------------------------------------------
1 | from json import loads
2 |
3 | import requests
4 |
5 | from anubis.utils.color_print import ColorPrint
6 |
7 |
8 | def search_anubisdb(self, target):
9 | print("Searching Anubis-DB")
10 | res = requests.get("https://anubisdb.com/subdomains/" + target)
11 | if res.status_code == 200 and res.text:
12 | subdomains = loads(res.text)
13 | for subdomain in subdomains:
14 | if subdomain not in self.domains:
15 | self.domains.append(subdomain)
16 |
17 |
18 | def send_to_anubisdb(self, target):
19 | if len(target) == 1:
20 | print("Sending to AnubisDB")
21 | data = {'subdomains': self.domains}
22 | # Sends found subdomains to Anubis (max 10,000/post)
23 | res = requests.post("https://anubisdb.com/subdomains/" + target[0],
24 | json=data)
25 | if res.status_code != 200:
26 | ColorPrint.red("Error sending results to AnubisDB - Status Code: " + str(
27 | res.status_code))
28 | else:
29 | print("Cannot send multiple domains to AnubisDB")
30 |
--------------------------------------------------------------------------------
/anubis/scanners/spyse.py:
--------------------------------------------------------------------------------
1 | from json import loads
2 |
3 | import requests
4 |
5 | from anubis.API import SPYSE_TOKEN
6 |
7 |
8 | def search_spyse(self, target):
9 | if SPYSE_TOKEN:
10 | print("Searching Spyse")
11 | headers = {
12 | 'accept': 'application/json',
13 | 'Authorization': f"Bearer {SPYSE_TOKEN}",
14 | 'Content-Type': 'application/json',
15 | }
16 |
17 | data = {"limit": 100, "offset": 0, "search_params": [], "query": target}
18 |
19 | domains = []
20 | try:
21 | response = requests.post('https://api.spyse.com/v4/data/domain/search', headers=headers, json=data)
22 | list_results = loads(response.text)
23 | if 'data' in list_results:
24 | for item in list_results['data']['items']:
25 | domains.append(item['name'])
26 | if domains:
27 | self.domains.extend(domains)
28 | except Exception as e:
29 | print("Exception when searching spyse")
30 | return
31 | if domains and self.options["--verbose"]:
32 | for res in domains:
33 | print("Spyse Found Domain:", res)
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 JonLuca De Caro
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/anubis/scanners/zonetransfer.py:
--------------------------------------------------------------------------------
1 | import socket
2 |
3 | import dns.query
4 | import dns.resolver
5 | import dns.zone
6 |
7 |
8 | # Checks for zone transfers on that domain. Very rare to succeed, but when you
9 | # do, it's a gold mine
10 | def dns_zonetransfer(self, target):
11 | print("Testing for zone transfers")
12 |
13 | zonetransfers = []
14 | resolver = dns.resolver.Resolver()
15 |
16 | try:
17 | answers = resolver.query(target, 'NS')
18 | except Exception as e:
19 | self.handle_exception(e, "Error checking for Zone Transfers")
20 | return
21 |
22 | resolved_ips = []
23 |
24 | for ns in answers:
25 | ns = str(ns).rstrip('.')
26 | resolved_ips.append(socket.gethostbyname(ns))
27 |
28 | for ip in resolved_ips:
29 | try:
30 | zone = dns.zone.from_xfr(dns.query.xfr(ip, target))
31 | for name, node in zone.nodes.items():
32 | name = str(name)
33 | if name not in ["@", "*"]:
34 | zonetransfers.append(name + '.' + target)
35 | except:
36 | pass
37 |
38 | if zonetransfers:
39 | print("\tZone transfers possible:")
40 | for zone in zonetransfers:
41 | print(zone)
42 |
--------------------------------------------------------------------------------
/anubis/scanners/ssl.py:
--------------------------------------------------------------------------------
1 | from collections import defaultdict
2 | import socket
3 | import ssl
4 | from socket import gaierror
5 |
6 |
7 | def search_subject_alt_name(self, target):
8 | print("Searching for Subject Alt Names")
9 | try:
10 | context = ssl.create_default_context()
11 |
12 | # Do connectivity testing to ensure SSLyze is able to connect
13 | try:
14 | with socket.create_connection((target, 443)) as sock:
15 | with context.wrap_socket(sock, server_hostname=target) as ssock:
16 | # https://docs.python.org/3/library/ssl.html#ssl.SSLSocket.getpeercert
17 | cert = ssock.getpeercert()
18 |
19 | subjectAltName = defaultdict(set)
20 | for type_, san in cert['subjectAltName']:
21 | subjectAltName[type_].add(san)
22 |
23 | dns_domains = list(subjectAltName['DNS'])
24 | for domain in dns_domains:
25 | if domain:
26 | self.domains.append(domain.strip())
27 | except gaierror as e:
28 | # Could not connect to the server; abort
29 | print(f"Error connecting to {target}: {e}")
30 | return
31 |
32 | except Exception as e:
33 | self.handle_exception(e)
34 |
--------------------------------------------------------------------------------
/anubis/scanners/recursive.py:
--------------------------------------------------------------------------------
1 | """The target command."""
2 | import queue
3 | import signal
4 | import sys
5 | import threading
6 | from io import StringIO
7 |
8 | from anubis.utils.search_worker import SearchWorker
9 | from anubis.utils.signal_handler import SignalHandler
10 |
11 |
12 | def recursive_search(self):
13 | print("Starting recursive search - warning, might take a long time")
14 | domains = self.clean_domains(self.domains)
15 | domains_unique = set(domains)
16 | num_workers = 10
17 |
18 | if self.options["--queue-workers"]:
19 | num_workers = int(self.options["--queue-workers"])
20 |
21 | stopper = threading.Event()
22 | url_queue = queue.Queue()
23 | for domain in domains_unique:
24 | url_queue.put(domain)
25 |
26 | # we need to keep track of the workers but not start them yet
27 | workers = [SearchWorker(url_queue, self.domains, stopper, self) for _ in
28 | range(num_workers)]
29 |
30 | # create our signal handler and connect it
31 | handler = SignalHandler(stopper, workers)
32 | signal.signal(signal.SIGINT, handler)
33 |
34 | if not self.options["--verbose"]:
35 | # catch stdout and replace it with our own
36 | self.stdout, sys.stdout = sys.stdout, StringIO()
37 |
38 | # start the threads!
39 | for worker in workers:
40 | worker.start()
41 |
42 | # wait for the queue to empty
43 | url_queue.join()
44 |
45 | sys.stdout = self.stdout
46 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 | .pytest_cache/
8 |
9 | # C extensions
10 | *.so
11 |
12 | # Distribution / packaging
13 | .Python
14 | build/
15 | develop-eggs/
16 | dist/
17 | downloads/
18 | eggs/
19 | .eggs/
20 | lib/
21 | lib64/
22 | parts/
23 | sdist/
24 | var/
25 | wheels/
26 | *.egg-info/
27 | .idea/*
28 | .installed.cfg
29 | *.egg
30 | MANIFEST
31 |
32 | # PyInstaller
33 | # Usually these files are written by a python script from a template
34 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
35 | *.manifest
36 | *.spec
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *.cover
46 | .hypothesis/
47 |
48 | # Environments
49 | .env
50 | .venv
51 | env/
52 | venv/
53 | ENV/
54 | env.bak/
55 | venv.bak/
56 |
57 | .idea_modules/
58 |
59 | docs/reference.md
60 | demos/*/parts/
61 | demos/*/prime/
62 | demos/*/stage/
63 | demos/*/snap/.snapcraft/
64 | demos/**/*.snap
65 | snap/.snapcraft/
66 | tests/unit/parts/
67 | tests/unit/snap/
68 | tests/unit/stage/
69 | build
70 | dist
71 | *.egg-info
72 | .eggs/
73 | *.pyc
74 | .coverage**
75 | htmlcov
76 | __pycache__
77 | docs/**.html
78 | Cargo.lock
79 | target
80 | *.swp
81 | *.snap
82 | parts
83 | stage
84 | prime
85 | .DS_Store
--------------------------------------------------------------------------------
/anubis/scanners/netcraft.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | import requests
4 |
5 |
6 | def search_netcraft(self, target):
7 | print("Searching NetCraft.com")
8 | headers = {'Pragma': 'no-cache', 'DNT': '1',
9 | 'Accept-Encoding': 'gzip, deflate, br',
10 | 'Accept-Language': 'en-US,en;q=0.9,it;q=0.8',
11 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36',
12 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
13 | 'Cache-Control': 'no-cache',
14 | 'Referer': 'https://searchdns.netcraft.com/?restriction=site+ends+with&host=',
15 | 'Connection': 'keep-alive', }
16 |
17 | params = (('restriction', 'site contains'), ('host', target))
18 | try:
19 | res = requests.get('https://searchdns.netcraft.com/', headers=headers,
20 | params=params)
21 | scraped = res.text
22 | trimmed = scraped[scraped.find('
'):scraped.rfind(
23 | '
')]
24 | subdomain_finder = re.compile(
25 | '
')
26 | links = subdomain_finder.findall(trimmed)
27 | for domain in links:
28 | if domain.strip() not in self.domains and domain.endswith("." + target):
29 | self.domains.append(domain.strip())
30 | if self.options["--verbose"]:
31 | print("Netcraft Found Domain:", domain.strip())
32 | except Exception as e:
33 | self.handle_exception(e, "Error searching NetCraft")
34 | pass
35 |
--------------------------------------------------------------------------------
/anubis/scanners/pkey.py:
--------------------------------------------------------------------------------
1 | import requests
2 |
3 |
4 | def search_pkey(self, target):
5 | print("Searching Pkey.in")
6 | headers = {'Pragma': 'no-cache', 'Origin': 'https://www.pkey.in',
7 | 'Accept-Encoding': 'gzip, deflate, br',
8 | 'Accept-Language': 'en-US,en;q=0.9,it;q=0.8',
9 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36',
10 | 'Content-Type': 'application/x-www-form-urlencoded',
11 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
12 | 'Cache-Control': 'no-cache',
13 | 'Referer': 'http://www.pkey.in/tools-i/search-subdomains',
14 | 'Connection': 'keep-alive', 'DNT': '1', }
15 |
16 | data = [('zone', target), ('submit', ''), ]
17 | try:
18 | res = requests.post('https://www.pkey.in/tools-i/search-subdomains',
19 | headers=headers, data=data, verify=False, timeout=10.0)
20 | scraped = res.text
21 | trimmed = scraped[scraped.find(''):scraped.rfind(
22 | '')].split('\n')
23 | for entry in trimmed:
24 | if entry.strip().startswith('| '):
25 | domain = entry.replace(' | ', '')
26 | domain = domain.replace(' | ', '')
27 | if target in domain:
28 | if domain.strip() not in self.domains:
29 | self.domains.append(domain.strip())
30 | if self.options["--verbose"]:
31 | print("Pkey Found Domain:", domain.strip())
32 |
33 | except Exception as e:
34 | self.handle_exception(e, "Error parsing pkey")
35 | pass
36 |
--------------------------------------------------------------------------------
/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | """Tests for our main anubis CLI module."""
2 |
3 | import os
4 | import shutil
5 | import tempfile
6 | from subprocess import PIPE, Popen as popen
7 | from unittest import TestCase
8 |
9 | from anubis import __version__ as VERSION
10 | from anubis.cli import StdOutHook
11 |
12 |
13 | class TestHelp(TestCase):
14 |
15 | def test_returns_usage_information(self):
16 | output = popen(['anubis', '-h'], stdout=PIPE).communicate()[0]
17 | output = output.decode("utf-8")
18 | self.assertTrue('Usage:' in output)
19 |
20 | output = popen(['anubis', '--help'], stdout=PIPE).communicate()[0]
21 | output = output.decode("utf-8")
22 | self.assertTrue('Usage:' in output)
23 |
24 |
25 | class TestVersion(TestCase):
26 |
27 | def test_returns_version_information(self):
28 | output = popen(['anubis', '--version'], stdout=PIPE).communicate()[0]
29 | output = output.decode("utf-8")
30 | self.assertEqual(output.strip(), VERSION)
31 |
32 |
33 | class TestOptions(TestCase):
34 |
35 | def setUp(self):
36 | # Create a temporary directory
37 | self.test_dir = tempfile.mkdtemp()
38 |
39 | def tearDown(self):
40 | # Remove the directory after the test
41 | shutil.rmtree(self.test_dir)
42 |
43 | def test_stdouthook(self):
44 | temp = StdOutHook(os.path.join(self.test_dir, 'test.txt'), silent=True, output=True)
45 | temp.write("test")
46 | temp.write_out()
47 | temp.flush()
48 | f = open(os.path.join(self.test_dir, 'test.txt'), 'r')
49 | self.assertIn("test", f.readlines())
50 |
51 |
52 | class TestFullRun(TestCase):
53 |
54 | def test_run(self):
55 | output = popen(
56 | ['anubis', '-tip', 'jonlu.ca', '--with-nmap', '--overwrite-nmap-scan',
57 | '"-F"'], stdout=PIPE).communicate()[0]
58 | output = output.decode("utf-8")
59 | self.assertIn("www.jonlu.ca", output)
60 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Thanks for being interested in contributing to Anubis!
4 |
5 | The project layout is currently fairly straight forward - Anubis uses docopt to parse through the CLI parameters in CLI.py, and uses the comment at the top for any new options or flags.
6 |
7 | It will automatically use the long form of the parameter for any new ones, if provided.
8 |
9 | So, for example, to add a new parameter `x` (to be invoked with `anubis -t example.com -x`), we'd change the header definition to add x, like so
10 |
11 | ``` anubis -t TARGET [-o FILENAME] [-noispbdvx] [-w SCAN]```
12 |
13 | And then add the explanation of what it does below, like so
14 |
15 | ```-x sample new command```
16 |
17 | If we want a long form, we supply it with two dashes
18 |
19 | ```-x --example sample new commmand```
20 |
21 | And now to reference it within the code, we'd do `options["--example"]`, which is either True or False, or the contents of the supplied parameter if we're passing something along with the flag.
22 |
23 |
24 | ## Adding new sources
25 |
26 | The bulk of the code is in `anubis/commands/target.py`, starting with the `run()` method.
27 |
28 | The target URL is in `self.options["TARGET"]`. Feel free to write any additional functions, and then add them to the thread pool in run.
29 |
30 | Your function should not return anything - rather, if it finds any subdomains it should add them to `self.domains`. Make sure it's not been inserted already, and that it's a valid subdomain.
31 |
32 | Handle exceptions with `self.handle_exception(e,"stdout message")`
33 |
34 | Print to stdout with either `print()` or `ColorPrint.color("message")`
35 |
36 | If you have any questions or this is unclear, feel free to open an issue or contact @JonLuca.
37 |
38 | ## Style
39 |
40 | 2 spaces for indentation. Follow Google and PEP8 guidelines for the most part, but I only care as far as it being consistent through the project.
--------------------------------------------------------------------------------
/anubis/scanners/dnsdumpster.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | import requests
4 |
5 |
6 | def search_dnsdumpster(self, target):
7 | print("Searching DNSDumpster")
8 | headers = {'Pragma': 'no-cache', 'Origin': 'https://dnsdumpster.com',
9 | 'Accept-Encoding': 'gzip, deflate, br',
10 | 'Accept-Language': 'en-US,en;q=0.9,it;q=0.8',
11 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36',
12 | 'Content-Type': 'application/x-www-form-urlencoded',
13 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
14 | 'Cache-Control': 'no-cache',
15 | 'Referer': 'https://dnsdumpster.com/',
16 | 'Connection': 'keep-alive', 'DNT': '1', }
17 |
18 | get_csrf_res = requests.get('https://dnsdumpster.com', headers=headers)
19 |
20 | try:
21 | csrf_token = get_csrf_res.headers['Set-Cookie']
22 | csrf_token = csrf_token[10:]
23 | csrf_token = csrf_token.split(";")[0]
24 | except Exception as e:
25 | self.handle_exception(e, "Retrieving CSRF Token for DNSDumpster failed")
26 | return
27 |
28 | cookies = {'csrftoken': csrf_token, }
29 |
30 | data = [('csrfmiddlewaretoken', csrf_token), ('targetip', target), ('user', 'free')]
31 |
32 | res = requests.post('https://dnsdumpster.com/', headers=headers,
33 | cookies=cookies, data=data)
34 | try:
35 | scraped = res.text
36 | subdomain_finder = re.compile('\">(.*\.' + target + ')
')
37 | links = subdomain_finder.findall(scraped)
38 | for domain in links:
39 | if domain.strip() not in self.domains and domain.endswith("." + target):
40 | self.domains.append(domain.strip())
41 | if self.options["--verbose"]:
42 | print("DNSDumpster Found Domain:", domain.strip())
43 | except Exception as e:
44 | self.handle_exception(e, "Error searching DNS Dumpster")
45 | pass
46 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """Packaging settings."""
2 |
3 | from codecs import open
4 | from os.path import abspath, dirname, join
5 | from subprocess import call
6 |
7 | from anubis import __version__
8 | from setuptools import Command, find_packages, setup
9 |
10 | this_dir = abspath(dirname(__file__))
11 | with open(join(this_dir, 'README.md'), encoding='utf-8') as file:
12 | long_description = file.read()
13 |
14 |
15 | class RunTests(Command):
16 | """Run all tests."""
17 | description = 'run tests'
18 | user_options = []
19 |
20 | def initialize_options(self):
21 | pass
22 |
23 | def finalize_options(self):
24 | pass
25 |
26 | def run(self):
27 | """Run all tests!"""
28 | errno = call(['py.test', '--cov=anubis', '--cov-report=term-missing'])
29 | raise SystemExit(errno)
30 |
31 | with open(join(this_dir, 'requirements.txt'), encoding='utf-8') as file:
32 | reqs = [req.strip() for req in file.read().split('\n') if req.strip()]
33 |
34 | setup(name='anubis-netsec', version=__version__,
35 | description='Modern and efficient subdomain enumeration and information gathering',
36 | long_description=long_description,
37 | long_description_content_type="text/markdown",
38 | url='https://github.com/jonluca/anubis', author='JonLuca DeCaro',
39 | author_email='jonluca.decaro@gmail.com', license='MIT',
40 | classifiers=['Intended Audience :: Developers',
41 | 'Development Status :: 3 - Alpha', 'Topic :: Utilities',
42 | 'License :: Public Domain', 'Natural Language :: English',
43 | 'Operating System :: OS Independent',
44 | 'Programming Language :: Python :: 3.4',
45 | 'Programming Language :: Python :: 3.5',
46 | 'Programming Language :: Python :: 3.6', ], keywords='cli',
47 | packages=find_packages(exclude=['docs', 'tests*']), python_requires='>=3',
48 | install_requires=reqs,
49 | extras_require={'test': ['coverage', 'pytest', 'pytest-cov'], },
50 | entry_points={'console_scripts': ['anubis=anubis.cli:main', ], },
51 | cmdclass={'test': RunTests},
52 | py_modules=["anubis.utils", "anubis.scanners"], )
53 |
--------------------------------------------------------------------------------
/anubis/scanners/nmap.py:
--------------------------------------------------------------------------------
1 | import shutil
2 | from json import dumps
3 |
4 | import nmap
5 |
6 |
7 | # Performs an nmap scan of a target, and outputs interesting services/ssl information
8 | def scan_host(self, target):
9 | if shutil.which("nmap") is None:
10 | print("Nmap must be installed to run --with-nmap!")
11 | return
12 |
13 | print("Starting Nmap Scan")
14 | nm = nmap.PortScanner()
15 | arguments = self.options["--overwrite-nmap-scan"] or '-nPn -sV -sC'
16 | nm.scan(hosts=target, arguments=arguments)
17 | # Could be extended to scan multiple hosts in the future
18 | for host in nm.all_hosts():
19 | if self.options["--verbose"]:
20 | print(dumps(nm[host], indent=2, sort_keys=True))
21 | print('Host : %s (%s)' % (host, nm[host].hostname()))
22 | for proto in nm[host].all_protocols():
23 | print('----------')
24 | print('Protocol: %s' % proto)
25 | lport = nm[host][proto].keys()
26 | for port in lport:
27 | print('port: %s\tstate: %s' % (port, nm[host][proto][port]['state']))
28 | try:
29 | if nm[host][proto][port]['product']:
30 | print('\tservice: %s' % nm[host][proto][port]['product'], end='')
31 | if nm[host][proto][port]['version']:
32 | print('\tversion: %s' % nm[host][proto][port]['version'])
33 | else:
34 | print('')
35 | except Exception as e:
36 | self.handle_exception(e)
37 | try:
38 | fix_newline = nm[host][proto][port]['script']['ssl-cert'].split('\n')
39 | print('\tCertificate:')
40 | for i in range(len(fix_newline)):
41 | print('\t', fix_newline[i])
42 | if fix_newline[i].startswith("Subject Alternative Name: "):
43 | content = fix_newline[i].replace("Subject Alternative Name: ", '')
44 | content = content.replace("DNS:", '')
45 | new_domains = content.split(",")
46 | for domain in new_domains:
47 | domain = domain.strip()
48 | if domain not in self.domains:
49 | self.domains.append(domain)
50 | if self.options["--verbose"]:
51 | print("Nmap Found Domain:", domain.strip())
52 | except Exception as e:
53 | self.handle_exception(e)
54 |
--------------------------------------------------------------------------------
/anubis/utils/search_worker.py:
--------------------------------------------------------------------------------
1 | """Search worker """
2 |
3 | import queue
4 | import sys
5 | import threading
6 | from threading import Thread
7 |
8 | from anubis.scanners.crt import search_crtsh
9 | from anubis.scanners.dnsdumpster import search_dnsdumpster
10 | from anubis.scanners.hackertarget import subdomain_hackertarget
11 | from anubis.scanners.netcraft import search_netcraft
12 | from anubis.scanners.pkey import search_pkey
13 | from anubis.scanners.zonetransfer import dns_zonetransfer
14 |
15 |
16 | class SearchWorker(threading.Thread):
17 | """
18 | The thread that will check HTTP statuses.
19 | """
20 |
21 | #: The queue of urls
22 | domain_queue = None
23 |
24 | #: An event that tells the thread to stop
25 | stopper = None
26 |
27 | domains = list()
28 | master_domains = None
29 | parent = None
30 |
31 | def __init__(self, domain_queue, domains, stopper, parent):
32 | super().__init__()
33 | self.domain_queue = domain_queue
34 | self.stopper = stopper
35 | self.master_domains = domains
36 | self.parent = parent
37 |
38 | def run(self):
39 | while not self.stopper.is_set():
40 | try:
41 | target = self.domain_queue.get_nowait()
42 | except queue.Empty:
43 | break
44 | else:
45 | sys.__stdout__.write("Starting recursive search on " + target + "\n")
46 | self.parent.stdout.flush()
47 | # Default scans that run every time
48 | threads = [Thread(target=dns_zonetransfer(self.parent, target)),
49 | Thread(target=subdomain_hackertarget(self.parent, target)),
50 | Thread(target=search_pkey(self.parent, target)),
51 | Thread(target=search_netcraft(self.parent, target)),
52 | Thread(target=search_crtsh(self.parent, target)),
53 | Thread(target=search_dnsdumpster(self.parent, target))]
54 |
55 | # Start all threads
56 | for x in threads:
57 | x.start()
58 |
59 | # Wait for all of them to finish
60 | for x in threads:
61 | x.join()
62 |
63 | self.domains = self.parent.clean_domains(self.domains)
64 | for domain in self.domains:
65 | if domain not in self.master_domains:
66 | sys.__stdout__.write("Found new domain: " + domain)
67 | self.master_domains.append(domain)
68 | self.domain_queue.put(domain)
69 |
70 | self.domain_queue.task_done()
71 |
--------------------------------------------------------------------------------
/anubis/scanners/crt.py:
--------------------------------------------------------------------------------
1 | import re
2 | import requests
3 |
4 |
5 | def search_crtsh(self, target):
6 | """
7 | Search crt.sh for certificates related to the target and extract both
8 | subdomains and root domains.
9 | """
10 | print("Searching crt.sh")
11 | headers = {
12 | 'authority': 'crt.sh',
13 | 'cache-control': 'max-age=0',
14 | 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.28 Safari/537.36',
15 | 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
16 | 'accept-encoding': 'gzip, deflate, br',
17 | 'accept-language': 'en-US,en;q=0.9',
18 | }
19 |
20 | # Clean target input to ensure we have just the domain
21 | target = clean_domain(target)
22 |
23 | # Search for both wildcard and explicit domain
24 | params = (('q', '%.' + target),)
25 | try:
26 | res = requests.get('https://crt.sh/', headers=headers, params=params)
27 | scraped = res.text
28 |
29 | # Extract domains from response
30 | domains = extract_domains(scraped, target)
31 |
32 | # Add all found domains to our list
33 | for domain in domains:
34 | if domain not in self.domains:
35 | self.domains.append(domain)
36 | if self.options["--verbose"]:
37 | print("Crt.sh Found Domain:", domain)
38 |
39 | except Exception as e:
40 | self.handle_exception(e, "Error searching crt.sh")
41 |
42 |
43 | def clean_domain(domain):
44 | """Clean input to get the base domain without protocol or path."""
45 | # Remove protocol if present
46 | domain = re.sub(r'^(http|https)://', '', domain)
47 | # Remove path, query params, etc.
48 | domain = domain.split('/')[0].strip()
49 | return domain
50 |
51 |
52 | def extract_domains(html_content, target):
53 | """Extract all domains and subdomains from HTML content."""
54 | # Match domains in TD tags (crt.sh specific)
55 | domain_pattern = re.compile(r'((?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+' +
56 | re.escape(target) + r') | ')
57 | domains = set()
58 |
59 | # Find all matches
60 | matches = domain_pattern.findall(html_content)
61 |
62 | # Process matches
63 | for domain in matches:
64 | # Handle
separated values
65 | for subdomain in domain.lower().split('
'):
66 | clean_domain = subdomain.strip()
67 | if clean_domain:
68 | domains.add(clean_domain)
69 |
70 | # Also add the root domain
71 | if target not in domains:
72 | domains.add(target)
73 |
74 | return list(domains)
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6 |
7 | ## Our Standards
8 |
9 | Examples of behavior that contributes to creating a positive environment include:
10 |
11 | * Using welcoming and inclusive language
12 | * Being respectful of differing viewpoints and experiences
13 | * Gracefully accepting constructive criticism
14 | * Focusing on what is best for the community
15 | * Showing empathy towards other community members
16 |
17 | Examples of unacceptable behavior by participants include:
18 |
19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances
20 | * Trolling, insulting/derogatory comments, and personal or political attacks
21 | * Public or private harassment
22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission
23 | * Other conduct which could reasonably be considered inappropriate in a professional setting
24 |
25 | ## Our Responsibilities
26 |
27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
28 |
29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
30 |
31 | ## Scope
32 |
33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
34 |
35 | ## Enforcement
36 |
37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at jdecaro@usc.edu. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
38 |
39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
40 |
41 | ## Attribution
42 |
43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
44 |
45 | [homepage]: http://contributor-covenant.org
46 | [version]: http://contributor-covenant.org/version/1/4/
47 |
--------------------------------------------------------------------------------
/anubis/cli.py:
--------------------------------------------------------------------------------
1 | """
2 | Usage:
3 | anubis (-t TARGET | -f FILE) [-o FILENAME] [-bdinoprsSv] [-w SCAN] [-q NUM]
4 | anubis -h
5 | anubis (--version | -V)
6 |
7 | Options:
8 | -h --help show this help message and exit
9 | -t --target set target (comma separated, no spaces, if multiple)
10 | -f --file set target (reads from file, one domain per line)
11 | -n --with-nmap perform an nmap service/script scan
12 | -o --output save to filename
13 | -i --additional-info show additional information about the host from Shodan (requires API key)
14 | -p --ip outputs the resolved IPs for each subdomain, and a full list of unique ips
15 | -d --send-to-anubis-db send results to Anubis-DB
16 | -r --recursive recursively search over all subdomains
17 | -s --ssl run an ssl scan and output cipher + chain info
18 | -S --silent only out put subdomains, one per line
19 | -w --overwrite-nmap-scan SCAN overwrite default nmap scan (default -nPn -sV -sC)
20 | -v --verbose print debug info and full request output
21 | -q --queue-workers NUM override number of queue workers (default: 10, max: 100)
22 | -V --version show version and exit
23 |
24 | Help:
25 | For help using this tool, please open an issue on the Github repository:
26 | https://github.com/jonluca/anubis
27 | """
28 |
29 | import sys
30 | import time
31 | from functools import reduce
32 |
33 | from docopt import docopt
34 |
35 | import anubis.commands
36 | # noinspection PyPep8Naming
37 | from . import __version__ as VERSION
38 |
39 |
40 | # Overload stdout to save output and change colors on filewrite
41 | class StdOutHook:
42 | lines = []
43 | filename = ""
44 |
45 | def __init__(self, filename, silent, output):
46 | self.filename = filename
47 | self.silent = silent
48 | self.output = output
49 |
50 | def write(self, text, override=False, **kwargs):
51 | if not self.silent or override:
52 | sys.__stdout__.write(text)
53 | self.lines.append(text)
54 |
55 | def write_out(self):
56 | if self.output:
57 | with open(self.filename, "w") as file:
58 | for line in self.lines:
59 | # remove stdout colors
60 | line = line.replace('\033[91m', '')
61 | line = line.replace('\033[92m', '')
62 | line = line.replace('\033[93m', '')
63 | line = line.replace('\033[94m', '')
64 | line = line.replace('\033[95m', '')
65 | line = line.replace('\033[0m', '')
66 | file.write(line)
67 |
68 | def flush(self):
69 | # python3 compatability, does nothing
70 | pass
71 |
72 |
73 | # credit to https://stackoverflow.com/questions/1557571/how-do-i-get-time-of-a-python-programs-execution
74 | def seconds_to_str(t):
75 | return "%d:%02d:%02d.%03d" % reduce(lambda ll, b: divmod(ll[0], b) + ll[1:],
76 | [(t * 1000,), 1000, 60, 60])
77 |
78 |
79 | def main():
80 | try:
81 | if sys.version_info < (3, 0):
82 | sys.stdout.write("Sorry, requires Python 3.x, not Python 2.x\n")
83 | sys.exit(1)
84 |
85 | start_time = time.time()
86 |
87 | options = docopt(__doc__, version=VERSION)
88 |
89 | if options["--output"] or options['--silent']:
90 | sys.stdout = StdOutHook(options["FILENAME"], options['--silent'],
91 | options['--output'])
92 |
93 | if options["--queue-workers"]:
94 | if not options["--recursive"]:
95 | print("Queue workers override only works with recursive option!")
96 | sys.exit(1)
97 | if int(options["--queue-workers"]) > 100:
98 | print("Max queue worker override is 100!")
99 | sys.exit(1)
100 | if int(options["--queue-workers"]) < 1:
101 | print("Queue workers can't be negative!")
102 | sys.exit(1)
103 |
104 | if not options["--target"] and not options['--file']:
105 | print(
106 | "Target required! Run with -h for usage instructions. Either -t target.host or -f file.txt required")
107 | return
108 |
109 | if options["--target"] and options['--file']:
110 | print(
111 | "Please only supply one target method - either read by file with -f or as an argument to -t, not both.")
112 | return
113 |
114 | print("""
115 | d8888 888 d8b
116 | d88888 888 Y8P
117 | d88P888 888
118 | d88P 888 88888b. 888 888 88888b. 888 .d8888b
119 | d88P 888 888 "88b 888 888 888 "88b 888 88K
120 | d88P 888 888 888 888 888 888 888 888 "Y8888b.
121 | d8888888888 888 888 Y88b 888 888 d88P 888 X88
122 | d88P 888 888 888 "Y88888 88888P" 888 88888P'
123 | """)
124 |
125 | command = anubis.commands.Target(options)
126 | command.run()
127 | print("Subdomain search took %s" % seconds_to_str(time.time() - start_time))
128 | if options["--output"]:
129 | sys.stdout.write_out()
130 | except KeyboardInterrupt:
131 | print("\nQuitting...")
132 | sys.exit(0)
133 |
--------------------------------------------------------------------------------
/tests/commands/test_target.py:
--------------------------------------------------------------------------------
1 | """Tests for our main anubis CLI module."""
2 |
3 | import os
4 | import signal
5 | import sys
6 | from io import StringIO
7 | from subprocess import PIPE, Popen as popen
8 | from threading import Timer
9 | from time import sleep
10 | from unittest import TestCase
11 |
12 | from anubis.API import SPYSE_TOKEN
13 | from anubis.commands.target import Target
14 | from anubis.scanners.anubis_db import search_anubisdb, send_to_anubisdb
15 | from anubis.scanners.crt import search_crtsh
16 | from anubis.scanners.dnsdumpster import search_dnsdumpster
17 | from anubis.scanners.dnssec import dnssecc_subdomain_enum
18 | from anubis.scanners.hackertarget import subdomain_hackertarget
19 | from anubis.scanners.netcraft import search_netcraft
20 | from anubis.scanners.pkey import search_pkey
21 | from anubis.scanners.recursive import recursive_search
22 | from anubis.scanners.shodan import search_shodan
23 | from anubis.scanners.spyse import search_spyse
24 | from anubis.scanners.zonetransfer import dns_zonetransfer
25 | from anubis.utils.color_print import ColorPrint
26 |
27 |
28 | class TestScanners(TestCase):
29 | domains = list()
30 | options = {"--verbose": True}
31 |
32 | def handle_exception(self, e, message=""):
33 | print(e)
34 | if message:
35 | print(message)
36 |
37 | def setUp(self):
38 | # catch stdout
39 | self.held, sys.stdout = sys.stdout, StringIO()
40 | self.stdout = sys.stdout
41 | # reset domains
42 | self.domains = list()
43 |
44 | def tearDown(self):
45 | self.held.write(sys.stdout.getvalue())
46 |
47 | def test_anubis_db(self):
48 | search_anubisdb(self, "example.com")
49 | self.assertIn("m.example.com", self.domains)
50 |
51 | def test_send_to_anubis(self):
52 | self.domains.append("www.example.com")
53 | send_to_anubisdb(self,
54 | ["example.com"]) # Send to anubis db takes in an array
55 | self.assertTrue("Error" not in sys.stdout.getvalue())
56 |
57 | send_to_anubisdb(self, ["example.com", "www.example.com"])
58 | self.assertTrue("multiple" in sys.stdout.getvalue())
59 |
60 | send_to_anubisdb(self, ["example"])
61 | self.assertTrue("Error" in sys.stdout.getvalue())
62 |
63 | # def test_crt(self):
64 | # search_crtsh(self, "jonlu.ca")
65 | #
66 | # if 'TRAVIS' in os.environ:
67 | # self.assertTrue(True) # crt.sh times out on Travis
68 | # return
69 | # self.assertIn("secure.jonlu.ca", self.domains)
70 |
71 | def test_zonetransfer(self):
72 | dns_zonetransfer(self, "jonlu.ca")
73 | self.assertTrue("Error" not in sys.stdout.getvalue())
74 | dns_zonetransfer(self, "$FALSE$")
75 | self.assertTrue("Error" in sys.stdout.getvalue())
76 |
77 | def test_dnsdumpster(self):
78 | search_dnsdumpster(self, "jonlu.ca")
79 | self.assertIn("assets.jonlu.ca", self.domains)
80 |
81 | def test_dnssec(self):
82 | if os.getuid() == 0:
83 | dnssecc_subdomain_enum(self, "google.com")
84 | self.assertTrue("google" in sys.stdout.getvalue())
85 | else:
86 | print("To run DNSSEC test, run as root")
87 |
88 | def test_hackertarget(self):
89 | subdomain_hackertarget(self, "example.com")
90 | self.assertIn("www.example.com", self.domains)
91 |
92 | def test_netcraft(self):
93 | search_netcraft(self, "example.com")
94 | self.assertTrue(True) # patch after netcraft no long returns valid results, 1/6/19
95 | # self.assertIn("http://www.example.com", self.domains)
96 |
97 | # As of 1/18/18, Pkey currently constantly times out
98 | def test_pkey(self):
99 | search_pkey(self, "google.com")
100 | # self.assertIn("google.com", self.domains)
101 | self.assertTrue(True)
102 |
103 | def test_spyse(self):
104 | search_spyse(self, "jonlu.ca")
105 | if SPYSE_TOKEN:
106 | self.assertIn("blog.jonlu.ca", self.domains)
107 | else:
108 | self.assertTrue(True)
109 |
110 | # def test_shodan(self):
111 | # self.ip = "138.197.125.24"
112 | # self.options = {}
113 | # self.options["TARGET"] = ["jonlu.ca"]
114 | # self.options["--verbose"] = True
115 | # search_shodan(self)
116 | # self.assertIn("ISP", sys.stdout.getvalue())
117 |
118 | def test_recursive(self):
119 | self.options = {}
120 | # Set target to domain we know only has 1 subdomain, for speeds sake
121 | self.options["TARGET"] = ["neverssl.com"]
122 | self.domains.append("neverssl.com")
123 | self.options["--verbose"] = True
124 | self.options["--queue-workers"] = False
125 | recursive_search(self)
126 | self.domains = self.clean_domains(self.domains)
127 | self.assertIn("neverssl.com", self.domains)
128 |
129 | # Pass through function for recursive search
130 | def clean_domains(self, domains):
131 | return Target.clean_domains(domains)
132 |
133 | def test_sigints(self):
134 | # Declare function to send sigint, after timer
135 |
136 | proc1 = popen(['anubis', '-tr', 'neverssl.com'], stdout=PIPE)
137 |
138 | # Function to send sigint to our processes, make sure that it outputss "Quitting" then ends
139 | def send_siginit():
140 | popen.send_signal(proc1, signal.SIGINT)
141 | self.assertTrue("Quitting" in sys.stdout.getvalue())
142 |
143 | t = Timer(3.0, send_siginit)
144 | t.start()
145 | sleep(5)
146 |
147 | def test_exception(self):
148 | self.options["--verbose"] = True
149 | try:
150 | raise Exception("Oh No")
151 | except Exception as e:
152 | Target.handle_exception(self, e, "Test Exception")
153 | self.assertTrue("Test" in sys.stdout.getvalue())
154 |
155 |
156 | class TestColorPrint(TestCase):
157 |
158 | def setUp(self):
159 | # catch stdout
160 | self.held, sys.stdout = sys.stdout, StringIO()
161 | # reset domains
162 | self.domains = list()
163 |
164 | def tearDown(self):
165 | # Kind of hacky as it'll dump all of stdout at the end, but better than losing it entirely
166 | self.held.write(sys.stdout.getvalue())
167 |
168 | def test_color_print(self):
169 | ColorPrint.red("red")
170 | self.assertIn("91m", sys.stdout.getvalue())
171 | ColorPrint.green("green")
172 | self.assertIn("92m", sys.stdout.getvalue())
173 | ColorPrint.light_purple("light_purple")
174 | self.assertIn("94m", sys.stdout.getvalue())
175 | ColorPrint.purple("purple")
176 | self.assertIn("95m", sys.stdout.getvalue())
177 | ColorPrint.yellow("yellow")
178 | self.assertIn("93m", sys.stdout.getvalue())
179 |
--------------------------------------------------------------------------------
/anubis/commands/target.py:
--------------------------------------------------------------------------------
1 | """The target command."""
2 | import os
3 | import re
4 | import socket
5 | import sys
6 | import threading
7 | from urllib.parse import urlsplit
8 |
9 | from anubis.scanners.anubis_db import search_anubisdb, send_to_anubisdb
10 | from anubis.scanners.crt import search_crtsh
11 | from anubis.scanners.hudson_rock import search_hudsonrock
12 | from anubis.scanners.dnsdumpster import search_dnsdumpster
13 | from anubis.scanners.dnssec import dnssecc_subdomain_enum
14 | from anubis.scanners.hackertarget import subdomain_hackertarget
15 | from anubis.scanners.netcraft import search_netcraft
16 | from anubis.scanners.nmap import scan_host
17 | from anubis.scanners.recursive import recursive_search
18 | from anubis.scanners.shodan import search_shodan
19 | from anubis.scanners.spyse import search_spyse
20 | from anubis.scanners.ssl import search_subject_alt_name
21 | from anubis.scanners.zonetransfer import dns_zonetransfer
22 | from anubis.utils.color_print import ColorPrint
23 | from .base import Base
24 |
25 |
26 | class Target(Base):
27 | """Main enumeration module"""
28 | domains = list()
29 | dedupe = set()
30 |
31 | stdout = sys.stdout
32 |
33 | def handle_exception(self, e, message=""):
34 | if self.options["--verbose"]:
35 | print(e)
36 | if message:
37 | ColorPrint.red(message)
38 |
39 | def init(self):
40 | if self.options["FILE"]:
41 | full_path = os.path.join(os.getcwd(), self.options["FILE"])
42 | with open(full_path) as file:
43 | self.options["TARGET"] = list(filter(None, file.read().split('\n')))
44 | else:
45 | self.options["TARGET"] = list(
46 | filter(None, self.options["TARGET"].split(",")))
47 | # Clean up targets
48 | for i in range(len(self.options["TARGET"])):
49 | url = self.options["TARGET"][i]
50 | # Inject protocol if not there
51 | if not re.match(r'http(s?):', url):
52 | url = 'http://' + url
53 |
54 | parsed = urlsplit(url)
55 | host = parsed.netloc
56 |
57 | self.options["TARGET"][i] = host
58 |
59 | try:
60 | ip_str = socket.gethostbyname(host)
61 | ColorPrint.green(f"Searching for subdomains for {ip_str} ({host})")
62 | except Exception as e:
63 | self.handle_exception(e,
64 | "Error connecting to target! Make sure you spelled it correctly and it is a resolvable address")
65 |
66 | def run(self):
67 | # Retrieve IP of target and run initial configurations
68 | self.init()
69 | # If multiple targets, create scans for each
70 | for i in range(len(self.options["TARGET"])):
71 | # Default scans that run every time
72 | target = self.options["TARGET"][i]
73 | ColorPrint.green(f"Working on target: {target}")
74 | threads = [threading.Thread(target=dns_zonetransfer, args=(self, target)),
75 | threading.Thread(target=subdomain_hackertarget,
76 | args=(self, target)),
77 | threading.Thread(target=search_subject_alt_name,
78 | args=(self, target)),
79 | threading.Thread(target=search_netcraft, args=(self, target)),
80 | threading.Thread(target=search_hudsonrock, args=(self, target)),
81 | threading.Thread(target=search_crtsh, args=(self, target)),
82 | threading.Thread(target=search_dnsdumpster,
83 | args=(self, target)),
84 | threading.Thread(target=search_spyse,
85 | args=(self, target)),
86 | threading.Thread(target=search_anubisdb, args=(self, target))]
87 |
88 | # Additional options - shodan.io scan
89 | if self.options["--additional-info"]:
90 | threads.append(threading.Thread(target=search_shodan, args=(self,)))
91 |
92 | # Additional options - nmap scan of dnssec script and a host/port scan
93 | if self.options["--with-nmap"]:
94 | threads.append(
95 | threading.Thread(target=dnssecc_subdomain_enum, args=(self, target)))
96 | threads.append(threading.Thread(target=scan_host, args=(self, target)))
97 |
98 | # Start all threads and wait for them to finish
99 | for x in threads:
100 | x.start()
101 |
102 | for x in threads:
103 | x.join()
104 |
105 | # Run a recursive search on each subdomain - rarely useful, but nice to have
106 | # just in case
107 | if self.options["--recursive"]:
108 | recursive_search(self)
109 |
110 | # remove duplicates and clean up
111 | self.domains = self.clean_domains(self.domains)
112 | self.dedupe = set(self.domains)
113 |
114 | print("Found", len(self.dedupe), "subdomains")
115 | print("----------------")
116 |
117 | if self.options["--ip"]:
118 | self.resolve_ips()
119 | else:
120 | for domain in self.dedupe:
121 | cleaned_domain = domain.strip()
122 | ColorPrint.green(cleaned_domain)
123 | if self.options['--silent']:
124 | sys.stdout.write(cleaned_domain + '\n', override=True)
125 |
126 | if self.options["--send-to-anubis-db"]:
127 | send_to_anubisdb(self, [target])
128 | # reset per domain
129 | self.domains = list()
130 |
131 | def resolve_ips(self):
132 | unique_ips = set()
133 | for domain in self.dedupe:
134 | try:
135 | # Attempt to get IP
136 | resolved_ip = socket.gethostbyname(domain)
137 | except Exception as e:
138 | # If getting IP fails, fallback to empty string
139 | resolved_ip = ""
140 | # TODO - Align domains and ips in stdout
141 | ColorPrint.green(domain + ": " + resolved_ip)
142 | if self.options['--silent']:
143 | sys.stdout.write(domain + '\n', override=True)
144 |
145 | if resolved_ip:
146 | unique_ips.add(resolved_ip)
147 | print("Found %s unique IPs" % len(unique_ips))
148 | for ip in unique_ips:
149 | # Ignore empty strings, final sanity check
150 | if ip:
151 | ColorPrint.green(ip)
152 |
153 | @staticmethod
154 | def clean_domains(domains):
155 | cleaned = []
156 | for subdomain in domains:
157 | subdomain = subdomain.lower()
158 | if subdomain.find("//") != -1:
159 | subdomain = subdomain[subdomain.find("//") + 2:]
160 | # Some pkey return instances like example.com. - remove the final .
161 | if subdomain.endswith('.'):
162 | subdomain = subdomain[:-1]
163 | # sometimes we'll get something like /www.example.com
164 | if subdomain[0] in ["\\", ".", "/", "#", "$", "%"]:
165 | subdomain = subdomain[1:]
166 | # If it's an email address, only take the domain part
167 | if "@" in subdomain:
168 | subdomain = subdomain.split("@")
169 | # If it's an actual email like mail@example.com, take example.com
170 | if len(subdomain) > 1:
171 | subdomain = subdomain[1]
172 | else:
173 | # If for some reason it's example.com@, take example.com
174 | subdomain = subdomain[0]
175 |
176 | cleaned.append(subdomain.strip())
177 | return cleaned
178 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Anubis
2 |
3 | [](https://travis-ci.org/jonluca/Anubis)  [](https://github.com/jonluca/Anubis/issues) [](https://github.com/jonluca/Anubis/blob/master/LICENSE)
4 |
5 | ```
6 | d8888 888 d8b
7 | d88888 888 Y8P
8 | d88P888 888
9 | d88P 888 88888b. 888 888 88888b. 888 .d8888b
10 | d88P 888 888 "88b 888 888 888 "88b 888 88K
11 | d88P 888 888 888 888 888 888 888 888 "Y8888b.
12 | d8888888888 888 888 Y88b 888 888 d88P 888 X88
13 | d88P 888 888 888 "Y88888 88888P" 888 88888P'
14 | ```
15 |
16 | Anubis is a subdomain enumeration and information gathering tool. Anubis collates data from a variety of sources,
17 | including HackerTarget, DNSDumpster, x509 certs, VirusTotal, Google, Pkey, Shodan, Spyse, and NetCraft.
18 | Anubis also has a sister project, [AnubisDB](https://github.com/jonluca/Anubis-DB), which serves as a centralized
19 | repository of subdomains.
20 |
21 | [Original Medium article release](https://medium.com/@jonluca/introducing-anubis-a-new-subdomain-enumerator-and-information-gathering-tool-d25b39ad98f2)
22 |
23 | ## Getting Started
24 |
25 | ### Prerequisites
26 |
27 | * Nmap (if wanting to run port scans and certain certificate scans)
28 |
29 | If you are running Linux, the following are also required:
30 |
31 | `sudo apt-get install python3-pip python-dev libssl-dev libffi-dev`
32 |
33 | ### Installing
34 |
35 | Note: Python 3 is required
36 |
37 | `pip3 install anubis-netsec`
38 |
39 | ### Install From Source
40 |
41 | Please note Anubis is still in beta.
42 |
43 | ```
44 | git clone https://github.com/jonluca/Anubis.git
45 | cd Anubis
46 | pip3 install -r requirements.txt
47 | pip3 install .
48 | ```
49 |
50 | ## Usage
51 |
52 | Usage:
53 | anubis (-t TARGET | -f FILE) [-o FILENAME] [-bdinoprsSv] [-w SCAN] [-q NUM]
54 | anubis -h
55 | anubis (--version | -V)
56 |
57 | Options:
58 | -h --help show this help message and exit
59 | -t --target set target (comma separated, no spaces, if multiple)
60 | -f --file set target (reads from file, one domain per line)
61 | -n --with-nmap perform an nmap service/script scan
62 | -o --output save to filename
63 | -i --additional-info show additional information about the host from Shodan (requires API key)
64 | -p --ip outputs the resolved IPs for each subdomain, and a full list of unique ips
65 | -d --send-to-anubis-db send results to Anubis-DB
66 | -r --recursive recursively search over all subdomains
67 | -s --ssl run an ssl scan and output cipher + chain info
68 | -S --silent only out put subdomains, one per line
69 | -w --overwrite-nmap-scan SCAN overwrite default nmap scan (default -nPn -sV -sC)
70 | -v --verbose print debug info and full request output
71 | -q --queue-workers NUM override number of queue workers (default: 10, max: 100)
72 | -V --version show version and exit
73 |
74 | Help:
75 | For help using this tool, please open an issue on the Github repository:
76 | https://github.com/jonluca/anubis
77 |
78 | Note: If you'd like to use the shodan.io API, make sure to prefix the command with `SHODAN_API_KEY=yourkey`
79 |
80 | ### Basic
81 |
82 | #### Common Use Case
83 |
84 | `anubis -tip domain.com -o out.txt`
85 |
86 | Set's target to `domain.com`, (`t`) outputs additional information (`i`) like server and ISP or server hosting provider,
87 | then attempts to resolve all URLs (`p`) and outputs list of unique IPs and sends to Anubis-DB (`a`). Finally, writes all
88 | results to out.txt (`o`).
89 |
90 | #### Other
91 |
92 | ```anubis -t reddit.com``` Simplest use of Anubis, just runs subdomain enumeration
93 |
94 | ```
95 | Searching for subdomains for 151.101.65.140 (reddit.com)
96 |
97 | Testing for zone transfers
98 | Searching for Subject Alt Names
99 | Searching HackerTarget
100 | Searching VirusTotal
101 | Searching Pkey.in
102 | Searching NetCraft.com
103 | Searching crt.sh
104 | Searching DNSDumpster
105 | Searching Anubis-DB
106 | Found 193 subdomains
107 | ----------------
108 | fj.reddit.com
109 | se.reddit.com
110 | gateway.reddit.com
111 | beta.reddit.com
112 | ww.reddit.com
113 | ... (truncated for readability)
114 | Sending to AnubisDB
115 | Subdomain search took 0:00:20.390
116 | ```
117 |
118 | `anubis -t reddit.com -ip` (equivalent to `anubis -t reddit.com --additional-info --ip`) - resolves IPs and outputs list
119 | of uniques, and provides additional information through https://shodan.io
120 |
121 | ```
122 | Searching for subdomains for 151.101.65.140
123 | Server Location: San Francisco US - 94107
124 | ISP: Fastly
125 | Found 27 domains
126 | ----------------
127 | http://www.np.reddit.com: 151.101.193.140
128 | http://nm.reddit.com: 151.101.193.140
129 | http://ww.reddit.com: 151.101.193.140
130 | http://dg.reddit.com: 151.101.193.140
131 | http://en.reddit.com: 151.101.193.140
132 | http://ads.reddit.com: 151.101.193.140
133 | http://zz.reddit.com: 151.101.193.140
134 | out.reddit.com: 107.23.11.190
135 | origin.reddit.com: 54.172.97.226
136 | http://blog.reddit.com: 151.101.193.140
137 | alb.reddit.com: 52.201.172.48
138 | http://m.reddit.com: 151.101.193.140
139 | http://rr.reddit.com: 151.101.193.140
140 | reddit.com: 151.101.65.140
141 | http://www.reddit.com: 151.101.193.140
142 | mx03.reddit.com: 151.101.193.140
143 | http://fr.reddit.com: 151.101.193.140
144 | rhs.reddit.com: 54.172.97.229
145 | http://np.reddit.com: 151.101.193.140
146 | http://nj.reddit.com: 151.101.193.140
147 | http://re.reddit.com: 151.101.193.140
148 | http://iy.reddit.com: 151.101.193.140
149 | mx02.reddit.com: 151.101.193.140
150 | mailp236.reddit.com: 151.101.193.140
151 | Found 6 unique IPs
152 | 52.201.172.48
153 | 151.101.193.140
154 | 107.23.11.190
155 | 151.101.65.140
156 | 54.172.97.226
157 | 54.172.97.229
158 | Execution took 0:00:04.604
159 | ```
160 |
161 | ### Advanced
162 |
163 | ```anubis -t reddit.com --with-nmap -o temp.txt -i --overwrite-nmap-scan "-F -T5"```
164 |
165 | ```
166 | Searching for subdomains for 151.101.65.140 (reddit.com)
167 |
168 | Testing for zone transfers
169 | Searching for Subject Alt Names
170 | Searching HackerTarget
171 | Searching VirusTotal
172 | Searching Pkey.in
173 | Searching NetCraft.com
174 | Searching crt.sh
175 | Searching DNSDumpster
176 | Searching Anubis-DB
177 | Searching Shodan.io for additional information
178 | Server Location: San Francisco, US - 94107
179 | ISP or Hosting Company: Fastly
180 | To run a DNSSEC subdomain enumeration, Anubis must be run as root
181 | Starting Nmap Scan
182 | Host : 151.101.65.140 ()
183 | ----------
184 | Protocol: tcp
185 | port: 80 state: open
186 | port: 443 state: open
187 | Found 195 subdomains
188 | ----------------
189 | nm.reddit.com
190 | ne.reddit.com
191 | sonics.reddit.com
192 | aj.reddit.com
193 | fo.reddit.com
194 | f5.reddit.com
195 | ... (truncated for readability)
196 | Sending to AnubisDB
197 | Subdomain search took 0:00:26.579
198 | ```
199 |
200 | ## Running the tests
201 |
202 | Run tests on their own, in native pytest environment
203 |
204 | ```pytest```
205 |
206 | ## Built With
207 |
208 | * CLI Boilerplate by [Skele-CLI](https://github.com/rdegges/skele-cli)
209 |
210 | ## Contributing
211 |
212 | Please read [CONTRIBUTING.md](https://github.com/jonluca/Anubis/blob/master/CONTRIBUTING.md) for details on our code of
213 | conduct, and the process for submitting pull requests to us.
214 |
215 | ## Authors
216 |
217 | * **JonLuca DeCaro** - *Initial work* - [Anubis](https://github.com/Anubis)
218 |
219 | See also the list of [contributors](https://github.com/jonluca/Anubis/contributors) who participated in this project.
220 |
221 | ## License
222 |
223 | This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
224 |
225 | ## Acknowledgments
226 |
227 | * [/r/netsec](https://reddit.com/r/netsec)
228 |
229 | * [BitQuark for the most common subdomains](https://github.com/bitquark/dnspop/tree/master/results)
230 |
231 |
--------------------------------------------------------------------------------