28 | 6. (Optionally) Have your own [tanner](https://github.com/mushorg/tanner) service running.
29 |
30 | > Cloner clones the whole website, to restrict to a desired depth of cloning add `--max-depth` parameter
31 |
32 | You obviously want to bind to 0.0.0.0 and port 80 when running in *production*.
33 |
34 | ## Docker build instructions
35 |
36 | 1. Change current directory to `snare` project directory
37 | 2. `docker-compose build`
38 | 3. `docker-compose up`
39 |
40 | More information about running `docker-compose` can be found [here](https://docs.docker.com/compose/gettingstarted/).
41 |
--------------------------------------------------------------------------------
/readthedocs.yml:
--------------------------------------------------------------------------------
1 | build:
2 | image: latest
3 |
4 | python:
5 | version: 3.6
6 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp==3.7.4
2 | aiohttp_jinja2==1.5.0
3 | beautifulsoup4==4.6.3
4 | cssutils==1.0.2
5 | gitpython==3.1.30
6 | pycodestyle==2.4.0
7 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from setuptools import find_packages, setup
3 |
4 |
5 | setup(
6 | name="Snare",
7 | version="0.3.0",
8 | description="Super Next generation Advanced Reactive honEypot",
9 | author="MushMush Foundation",
10 | author_email="glastopf@public.honeynet.org",
11 | url="https://github.com/mushorg/snare",
12 | packages=find_packages(exclude=["*.pyc"]),
13 | scripts=["./bin/snare", "./bin/clone"],
14 | )
15 |
--------------------------------------------------------------------------------
/snare/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mushorg/snare/b17fdfe7c2ba3ac540548763d73fc475cfc185c4/snare/__init__.py
--------------------------------------------------------------------------------
/snare/cloner.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import logging
4 | import asyncio
5 | import hashlib
6 | import json
7 | import re
8 | import aiohttp
9 | import cssutils
10 | import yarl
11 | from bs4 import BeautifulSoup
12 | from asyncio import Queue
13 | from collections import defaultdict
14 |
15 | animation = "|/-\\"
16 |
17 |
18 | class Cloner(object):
19 | def __init__(self, root, max_depth, css_validate, default_path="/opt/snare"):
20 | self.logger = logging.getLogger(__name__)
21 | self.logger.setLevel(logging.DEBUG)
22 | self.visited_urls = []
23 | self.root, self.error_page = self.add_scheme(root)
24 | self.max_depth = max_depth
25 | self.moved_root = None
26 | self.default_path = default_path
27 | if (self.root.host is None) or (len(self.root.host) < 4):
28 | sys.exit("invalid target {}".format(self.root.host))
29 | self.target_path = "{}/pages/{}".format(self.default_path, self.root.host)
30 |
31 | if not os.path.exists(self.target_path):
32 | os.makedirs(self.target_path)
33 | self.css_validate = css_validate
34 | self.new_urls = Queue()
35 | self.meta = defaultdict(dict)
36 |
37 | self.counter = 0
38 | self.itr = 0
39 |
40 | @staticmethod
41 | def add_scheme(url):
42 | new_url = yarl.URL(url)
43 | if not new_url.scheme:
44 | new_url = yarl.URL("http://" + url)
45 | err_url = new_url.with_path("/status_404").with_query(None).with_fragment(None)
46 | return new_url, err_url
47 |
48 | @staticmethod
49 | def get_headers(response):
50 | ignored_headers_lowercase = [
51 | "age",
52 | "cache-control",
53 | "connection",
54 | "content-encoding",
55 | "content-length",
56 | "date",
57 | "etag",
58 | "expires",
59 | "x-cache",
60 | ]
61 |
62 | headers = []
63 | for key, value in response.headers.items():
64 | if key.lower() not in ignored_headers_lowercase:
65 | headers.append({key: value})
66 | return headers
67 |
68 | async def process_link(self, url, level, check_host=False):
69 | try:
70 | url = yarl.URL(url)
71 | except UnicodeError:
72 | return None
73 | if url.scheme in ["data", "javascript", "file"]:
74 | return url.human_repr()
75 | if not url.is_absolute():
76 | if self.moved_root is None:
77 | url = self.root.join(url)
78 | else:
79 | url = self.moved_root.join(url)
80 |
81 | host = url.host
82 |
83 | if check_host:
84 | if (
85 | (host != self.root.host and self.moved_root is None)
86 | or url.fragment
87 | or (self.moved_root is not None and host != self.moved_root.host)
88 | ):
89 | return None
90 | if url.human_repr() not in self.visited_urls and (level + 1) <= self.max_depth:
91 | await self.new_urls.put((url, level + 1))
92 |
93 | res = None
94 | try:
95 | res = url.relative().human_repr()
96 | except ValueError:
97 | self.logger.error("ValueError while processing the %s link", url)
98 | return res
99 |
100 | async def replace_links(self, data, level):
101 | soup = BeautifulSoup(data, "html.parser")
102 |
103 | # find all relative links
104 | for link in soup.findAll(href=True):
105 | res = await self.process_link(link["href"], level, check_host=True)
106 | if res is not None:
107 | link["href"] = res
108 |
109 | # find all images and scripts
110 | for elem in soup.findAll(src=True):
111 | res = await self.process_link(elem["src"], level)
112 | if res is not None:
113 | elem["src"] = res
114 |
115 | # find all action elements
116 | for act_link in soup.findAll(action=True):
117 | res = await self.process_link(act_link["action"], level)
118 | if res is not None:
119 | act_link["action"] = res
120 |
121 | # prevent redirects
122 | for redir in soup.findAll(True, attrs={"name": re.compile("redirect.*")}):
123 | if redir["value"] != "":
124 | redir["value"] = yarl.URL(redir["value"]).relative().human_repr()
125 |
126 | return soup
127 |
128 | def _make_filename(self, url):
129 | host = url.host
130 | if url.is_absolute():
131 | file_name = url.relative().human_repr()
132 | else:
133 | file_name = url.human_repr()
134 | if not file_name.startswith("/"):
135 | file_name = "/" + file_name
136 |
137 | if file_name == "/" or file_name == "":
138 | if host == self.root.host or self.moved_root is not None and self.moved_root.host == host:
139 | file_name = "/index.html"
140 | else:
141 | file_name = host
142 | m = hashlib.md5()
143 | m.update(file_name.encode("utf-8"))
144 | hash_name = m.hexdigest()
145 | return file_name, hash_name
146 |
147 | async def get_body(self, session):
148 | while not self.new_urls.empty():
149 | print(animation[self.itr % len(animation)], end="\r")
150 | self.itr = self.itr + 1
151 | current_url, level = await self.new_urls.get()
152 | if current_url.human_repr() in self.visited_urls:
153 | continue
154 | self.visited_urls.append(current_url.human_repr())
155 | file_name, hash_name = self._make_filename(current_url)
156 | self.logger.debug("Cloned file: %s", file_name)
157 | data = None
158 | content_type = None
159 | try:
160 | response = await session.get(current_url, headers={"Accept": "text/html"}, timeout=10.0)
161 | headers = self.get_headers(response)
162 | content_type = response.content_type
163 | data = await response.read()
164 | except (aiohttp.ClientError, asyncio.TimeoutError) as client_error:
165 | self.logger.error(client_error)
166 | else:
167 | await response.release()
168 |
169 | if data is not None:
170 | self.meta[file_name]["hash"] = hash_name
171 | self.meta[file_name]["headers"] = headers
172 | self.counter = self.counter + 1
173 |
174 | if content_type == "text/html":
175 | soup = await self.replace_links(data, level)
176 | data = str(soup).encode()
177 | elif content_type == "text/css":
178 | css = cssutils.parseString(data, validate=self.css_validate)
179 | for carved_url in cssutils.getUrls(css):
180 | if carved_url.startswith("data"):
181 | continue
182 | carved_url = yarl.URL(carved_url)
183 | if not carved_url.is_absolute():
184 | carved_url = self.root.join(carved_url)
185 | if carved_url.human_repr() not in self.visited_urls:
186 | await self.new_urls.put((carved_url, level + 1))
187 |
188 | with open(os.path.join(self.target_path, hash_name), "wb") as index_fh:
189 | index_fh.write(data)
190 |
191 | async def get_root_host(self):
192 | try:
193 | async with aiohttp.ClientSession() as session:
194 | resp = await session.get(self.root)
195 | if resp.host != self.root.host:
196 | self.moved_root = resp.url
197 | resp.close()
198 | except aiohttp.ClientError as err:
199 | self.logger.error("Can't connect to target host: %s", err)
200 | exit(-1)
201 |
202 | async def run(self):
203 | session = aiohttp.ClientSession()
204 | try:
205 | await self.new_urls.put((self.root, 0))
206 | await self.new_urls.put((self.error_page, 0))
207 | await self.get_body(session)
208 | except KeyboardInterrupt:
209 | raise
210 | finally:
211 | with open(os.path.join(self.target_path, "meta.json"), "w") as mj:
212 | json.dump(self.meta, mj)
213 | await session.close()
214 |
--------------------------------------------------------------------------------
/snare/html_handler.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | import logging
4 | import cssutils
5 | import aiohttp
6 | from bs4 import BeautifulSoup
7 |
8 |
9 | class HtmlHandler:
10 | def __init__(self, no_dorks, tanner):
11 | self.no_dorks = no_dorks
12 | self.dorks = []
13 | self.logger = logging.getLogger(__name__)
14 | self.tanner = tanner
15 |
16 | async def get_dorks(self):
17 | dorks = None
18 | try:
19 | async with aiohttp.ClientSession() as session:
20 | r = await session.get("http://{0}:8090/dorks".format(self.tanner), timeout=10.0)
21 | try:
22 | dorks = await r.json()
23 | except json.decoder.JSONDecodeError as e:
24 | self.logger.error("Error getting dorks: %s", e)
25 | finally:
26 | await r.release()
27 | except asyncio.TimeoutError as error:
28 | self.logger.error("Dorks timeout error: %s", error)
29 | return dorks["response"]["dorks"] if dorks else []
30 |
31 | async def handle_content(self, content):
32 | soup = BeautifulSoup(content, "html.parser")
33 | if self.no_dorks is not True:
34 | for p_elem in soup.find_all("p"):
35 | if p_elem.findChildren():
36 | continue
37 | css = None
38 | if "style" in p_elem.attrs:
39 | css = cssutils.parseStyle(p_elem.attrs["style"])
40 | text_list = p_elem.text.split()
41 | p_new = soup.new_tag("p", style=css.cssText if css else None)
42 | for idx, word in enumerate(text_list):
43 | # Fetch dorks if required
44 | if len(self.dorks) <= 0:
45 | self.dorks = await self.get_dorks()
46 | word += " "
47 | if idx % 5 == 0:
48 | a_tag = soup.new_tag(
49 | "a",
50 | href=self.dorks.pop(),
51 | style="color:{color};text-decoration:none;cursor:text;".format(
52 | color=css.color if css and "color" in css.keys() else "#000000"
53 | ),
54 | )
55 | a_tag.string = word
56 | p_new.append(a_tag)
57 | else:
58 | p_new.append(soup.new_string(word))
59 | p_elem.replace_with(p_new)
60 | content = soup.encode("utf-8")
61 | return content
62 |
--------------------------------------------------------------------------------
/snare/middlewares.py:
--------------------------------------------------------------------------------
1 | import aiohttp_jinja2
2 | import multidict
3 | from aiohttp import web
4 |
5 |
6 | class SnareMiddleware:
7 | def __init__(self, error_404, error_500=None, headers=[], server_header=""):
8 | self.error_404 = error_404
9 | self.error_500 = error_500 if error_500 else "500.html"
10 |
11 | self.headers = multidict.CIMultiDict()
12 | for header in headers:
13 | for key, value in header.items():
14 | self.headers.add(key, value)
15 |
16 | if server_header:
17 | self.headers["Server"] = server_header
18 |
19 | async def handle_404(self, request):
20 | return aiohttp_jinja2.render_template(self.error_404, request, {})
21 |
22 | async def handle_500(self, request):
23 | return aiohttp_jinja2.render_template(self.error_500, request, {})
24 |
25 | def create_error_middleware(self, overrides):
26 | @web.middleware
27 | async def error_middleware(request, handler):
28 | try:
29 | response = await handler(request)
30 | status = response.status
31 | override = overrides.get(status)
32 | if override:
33 | response = await override(request)
34 | response.headers.update(self.headers)
35 | response.set_status(status)
36 | return response
37 | return response
38 | except web.HTTPException as ex:
39 | override = overrides.get(ex.status)
40 | if override:
41 | return await override(request)
42 | raise
43 |
44 | return error_middleware
45 |
46 | def setup_middlewares(self, app):
47 | error_middleware = self.create_error_middleware(
48 | {
49 | 404: self.handle_404,
50 | 500: self.handle_500,
51 | }
52 | )
53 | app.middlewares.append(error_middleware)
54 |
--------------------------------------------------------------------------------
/snare/server.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import aiohttp
3 | import aiohttp_jinja2
4 | import jinja2
5 |
6 | from aiohttp import web
7 | from aiohttp.web import StaticResource as StaticRoute
8 |
9 | from snare.middlewares import SnareMiddleware
10 | from snare.tanner_handler import TannerHandler
11 |
12 |
13 | class HttpRequestHandler:
14 | def __init__(self, meta, run_args, snare_uuid, debug=False, keep_alive=75, **kwargs):
15 | self.run_args = run_args
16 | self.dir = run_args.full_page_path
17 | self.meta = meta
18 | self.snare_uuid = snare_uuid
19 | self.logger = logging.getLogger(__name__)
20 | self.sroute = StaticRoute(name=None, prefix="/", directory=self.dir)
21 | self.tanner_handler = TannerHandler(run_args, meta, snare_uuid)
22 |
23 | async def submit_slurp(self, data):
24 | try:
25 | async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
26 | r = await session.post(
27 | "https://{0}:8080/api?auth={1}&chan=snare_test&msg={2}".format(
28 | self.run_args.slurp_host, self.run_args.slurp_auth, data
29 | ),
30 | json=data,
31 | timeout=10.0,
32 | )
33 | assert r.status == 200
34 | r.close()
35 | except Exception as e:
36 | self.logger.error("Error submitting slurp: %s", e)
37 |
38 | async def handle_request(self, request):
39 | self.logger.info("Request path: {0}".format(request.path_qs))
40 | data = self.tanner_handler.create_data(request, 200)
41 | if request.method == "POST":
42 | post_data = await request.post()
43 | self.logger.info("POST data:")
44 | for key, val in post_data.items():
45 | self.logger.info("\t- {0}: {1}".format(key, val))
46 | data["post_data"] = dict(post_data)
47 |
48 | # Submit the event to the TANNER service
49 | event_result = await self.tanner_handler.submit_data(data)
50 |
51 | # Log the event to slurp service if enabled
52 | if self.run_args.slurp_enabled:
53 | await self.submit_slurp(request.path_qs)
54 |
55 | content, headers, status_code = await self.tanner_handler.parse_tanner_response(
56 | request.path_qs, event_result["response"]["message"]["detection"]
57 | )
58 |
59 | if self.run_args.server_header:
60 | headers["Server"] = self.run_args.server_header
61 |
62 | if "cookies" in data and "sess_uuid" in data["cookies"]:
63 | previous_sess_uuid = data["cookies"]["sess_uuid"]
64 | else:
65 | previous_sess_uuid = None
66 |
67 | if event_result is not None and "sess_uuid" in event_result["response"]["message"]:
68 | cur_sess_id = event_result["response"]["message"]["sess_uuid"]
69 | if previous_sess_uuid is None or not previous_sess_uuid.strip() or previous_sess_uuid != cur_sess_id:
70 | headers.add("Set-Cookie", "sess_uuid=" + cur_sess_id)
71 |
72 | return web.Response(body=content, status=status_code, headers=headers)
73 |
74 | async def start(self):
75 | app = web.Application()
76 | app.add_routes([web.route("*", "/{tail:.*}", self.handle_request)])
77 | aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader(self.dir))
78 | middleware = SnareMiddleware(
79 | error_404=self.meta["/status_404"].get("hash"),
80 | headers=self.meta["/status_404"].get("headers", []),
81 | server_header=self.run_args.server_header,
82 | )
83 | middleware.setup_middlewares(app)
84 |
85 | self.runner = web.AppRunner(app)
86 | await self.runner.setup()
87 | site = web.TCPSite(self.runner, self.run_args.host_ip, self.run_args.port)
88 |
89 | await site.start()
90 | names = sorted(str(s.name) for s in self.runner.sites)
91 | print("======== Running on {} ========\n" "(Press CTRL+C to quit)".format(", ".join(names)))
92 |
93 | async def stop(self):
94 | await self.runner.cleanup()
95 |
--------------------------------------------------------------------------------
/snare/tanner_handler.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os
3 | import multidict
4 | import json
5 | import logging
6 | import aiohttp
7 |
8 | from urllib.parse import unquote
9 | from bs4 import BeautifulSoup
10 | from snare.html_handler import HtmlHandler
11 |
12 |
13 | class TannerHandler:
14 | def __init__(self, run_args, meta, snare_uuid):
15 | self.run_args = run_args
16 | self.meta = meta
17 | self.dir = run_args.full_page_path
18 | self.snare_uuid = snare_uuid
19 | self.html_handler = HtmlHandler(run_args.no_dorks, run_args.tanner)
20 | self.logger = logging.getLogger(__name__)
21 |
22 | def create_data(self, request, response_status):
23 | data = dict(
24 | method=None,
25 | path=None,
26 | headers=None,
27 | uuid=self.snare_uuid.decode("utf-8"),
28 | peer=None,
29 | status=response_status,
30 | )
31 | if request.transport:
32 | peer = dict(
33 | ip=request.transport.get_extra_info("peername")[0],
34 | port=request.transport.get_extra_info("peername")[1],
35 | )
36 | data["peer"] = peer
37 | if request.path:
38 | # FIXME request.headers is a CIMultiDict, so items with the same
39 | # key will be overwritten when converting to dictionary
40 | header = {key: value for (key, value) in request.headers.items()}
41 | data["method"] = request.method
42 | data["headers"] = header
43 | data["path"] = request.path_qs
44 | if "Cookie" in header:
45 | data["cookies"] = {cookie.split("=")[0]: cookie.split("=")[1] for cookie in header["Cookie"].split(";")}
46 | return data
47 |
48 | async def submit_data(self, data):
49 | event_result = None
50 | try:
51 | async with aiohttp.ClientSession() as session:
52 | r = await session.post(
53 | "http://{0}:8090/event".format(self.run_args.tanner),
54 | json=data,
55 | timeout=10.0,
56 | )
57 | try:
58 | event_result = await r.json()
59 | except (
60 | json.decoder.JSONDecodeError,
61 | aiohttp.client_exceptions.ContentTypeError,
62 | ) as e:
63 | self.logger.error("Error submitting data: {} {}".format(e, data))
64 | event_result = {
65 | "version": "0.6.0",
66 | "response": {
67 | "message": {
68 | "detection": {
69 | "name": "index",
70 | "order": 1,
71 | "type": 1,
72 | "version": "0.6.0",
73 | },
74 | "sess_uuid": data["uuid"],
75 | }
76 | },
77 | }
78 | finally:
79 | await r.release()
80 | except Exception as e:
81 | self.logger.exception("Exception: %s", e)
82 | raise e
83 | return event_result
84 |
85 | async def parse_tanner_response(self, requested_name, detection):
86 | content = None
87 | status_code = 200
88 | headers = multidict.CIMultiDict()
89 | # Creating a regex object for the pattern of multiple contiguous forward slashes
90 | p = re.compile("/+")
91 | # Substituting all occurrences of the pattern with single forward slash
92 | requested_name = p.sub("/", requested_name)
93 |
94 | if detection["type"] == 1:
95 | possible_requests = [requested_name]
96 | query_start = requested_name.find("?")
97 | if query_start != -1:
98 | possible_requests.append(requested_name[:query_start])
99 |
100 | file_name = None
101 | for requested_name in possible_requests:
102 | if requested_name == "/":
103 | requested_name = self.run_args.index_page
104 | if requested_name[-1] == "/":
105 | requested_name = requested_name[:-1]
106 | requested_name = unquote(requested_name)
107 | try:
108 | file_name = self.meta[requested_name]["hash"]
109 | for header in self.meta[requested_name].get("headers", []):
110 | for key, value in header.items():
111 | headers.add(key, value)
112 | # overwrite headers with legacy content-type if present and not none
113 | content_type = self.meta[requested_name].get("content_type")
114 | if content_type:
115 | headers["Content-Type"] = content_type
116 | except KeyError:
117 | pass
118 | else:
119 | break
120 |
121 | if not file_name:
122 | status_code = 404
123 | else:
124 | path = os.path.join(self.dir, file_name)
125 | if os.path.isfile(path):
126 | with open(path, "rb") as fh:
127 | content = fh.read()
128 | if headers.get("Content-Type", "").startswith("text/html"):
129 | content = await self.html_handler.handle_content(content)
130 |
131 | elif detection["type"] == 2:
132 | payload_content = detection["payload"]
133 | if payload_content["page"]:
134 | try:
135 | file_name = self.meta[payload_content["page"]]["hash"]
136 | for header in self.meta[payload_content["page"]].get("headers", []):
137 | for key, value in header.items():
138 | headers.add(key, value)
139 | # overwrite headers with legacy content-type if present and not none
140 | content_type = self.meta[payload_content["page"]].get("content_type")
141 | if content_type:
142 | headers["Content-Type"] = content_type
143 | page_path = os.path.join(self.dir, file_name)
144 | with open(page_path, encoding="utf-8") as p:
145 | content = p.read()
146 | except KeyError:
147 | content = ""
148 | headers["Content-Type"] = "text/html"
149 |
150 | soup = BeautifulSoup(content, "html.parser")
151 | script_tag = soup.new_tag("div")
152 | script_tag.append(BeautifulSoup(payload_content["value"], "html.parser"))
153 | soup.body.append(script_tag)
154 | content = str(soup).encode()
155 | else:
156 | content_type = "text/plain"
157 | if content_type:
158 | headers["Content-Type"] = content_type
159 | content = payload_content["value"].encode("utf-8")
160 |
161 | if "headers" in payload_content:
162 | # overwrite local headers with the tanner-provided ones
163 | headers.update(payload_content["headers"])
164 |
165 | else: # type 3
166 | payload_content = detection["payload"]
167 | status_code = payload_content["status_code"]
168 |
169 | return content, headers, status_code
170 |
--------------------------------------------------------------------------------
/snare/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mushorg/snare/b17fdfe7c2ba3ac540548763d73fc475cfc185c4/snare/tests/__init__.py
--------------------------------------------------------------------------------
/snare/tests/test_cloner_add_scheme.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import sys
3 | import os
4 | import yarl
5 | import shutil
6 | from snare.cloner import Cloner
7 | from snare.utils.page_path_generator import generate_unique_path
8 |
9 |
10 | class TestCloner(unittest.TestCase):
11 | def setUp(self):
12 | self.url = "http://example.com"
13 | self.main_page_path = generate_unique_path()
14 | os.makedirs(self.main_page_path)
15 | self.expected_new_url = yarl.URL("http://example.com")
16 | self.expected_err_url = yarl.URL("http://example.com/status_404")
17 | self.max_depth = sys.maxsize
18 | self.css_validate = "false"
19 | self.handler = Cloner(self.url, self.max_depth, self.css_validate)
20 |
21 | def test_trailing_slash(self):
22 | self.url = "http://example.com/"
23 | new_url, err_url = self.handler.add_scheme(self.url)
24 | self.assertEqual(new_url, self.expected_new_url)
25 | self.assertEqual(err_url, self.expected_err_url)
26 |
27 | def test_add_scheme(self):
28 | new_url, err_url = self.handler.add_scheme(self.url)
29 |
30 | self.assertEqual(new_url, self.expected_new_url)
31 | self.assertEqual(err_url, self.expected_err_url)
32 |
33 | def test_no_scheme(self):
34 | self.url = "example.com"
35 | new_url, err_url = self.handler.add_scheme(self.url)
36 | self.assertEqual(new_url, self.expected_new_url)
37 | self.assertEqual(err_url, self.expected_err_url)
38 |
39 | def tearDown(self):
40 | shutil.rmtree(self.main_page_path)
41 |
42 | def test_no_host(self):
43 | self.url = "http:/"
44 | with self.assertRaises(SystemExit):
45 | Cloner(self.url, self.max_depth, self.css_validate)
46 |
47 | def test_limited_length_host(self):
48 | self.url = "http://aaa"
49 | with self.assertRaises(SystemExit):
50 | Cloner(self.url, self.max_depth, self.css_validate)
51 |
--------------------------------------------------------------------------------
/snare/tests/test_cloner_get_body.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import aiohttp
3 | import yarl
4 | import sys
5 | import os
6 | import shutil
7 | import asyncio
8 | from snare.cloner import Cloner
9 | from snare.utils.asyncmock import AsyncMock
10 | from snare.utils.page_path_generator import generate_unique_path
11 |
12 |
13 | class TestGetBody(unittest.TestCase):
14 | def setUp(self):
15 | self.main_page_path = generate_unique_path()
16 | os.makedirs(self.main_page_path)
17 | self.root = "http://example.com"
18 | self.level = 0
19 | self.max_depth = sys.maxsize
20 | self.loop = asyncio.new_event_loop()
21 | self.css_validate = "false"
22 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
23 | self.target_path = "/opt/snare/pages/{}".format(yarl.URL(self.root).host)
24 | self.return_content = None
25 | self.expected_content = None
26 | self.filename = None
27 | self.hashname = None
28 | self.url = None
29 | self.content = None
30 | self.return_url = None
31 | self.return_level = None
32 | self.meta = None
33 | self.q_size = None
34 |
35 | self.session = aiohttp.ClientSession
36 | self.session.get = AsyncMock(
37 | return_value=aiohttp.ClientResponse(
38 | url=yarl.URL("http://www.example.com"),
39 | method="GET",
40 | writer=None,
41 | continue100=1,
42 | timer=None,
43 | request_info=None,
44 | traces=None,
45 | loop=self.loop,
46 | session=None,
47 | )
48 | )
49 |
50 | def test_get_body(self):
51 | self.content = b""""""
52 |
53 | aiohttp.ClientResponse._headers = {"Content-Type": "text/html"}
54 | aiohttp.ClientResponse.read = AsyncMock(return_value=self.content)
55 | self.filename, self.hashname = self.handler._make_filename(yarl.URL(self.root))
56 | self.expected_content = ''
57 |
58 | self.meta = {
59 | "/index.html": {
60 | "hash": "d1546d731a9f30cc80127d57142a482b",
61 | "headers": [{"Content-Type": "text/html"}],
62 | },
63 | "/test": {
64 | "hash": "4539330648b80f94ef3bf911f6d77ac9",
65 | "headers": [{"Content-Type": "text/html"}],
66 | },
67 | }
68 |
69 | async def test():
70 | await self.handler.new_urls.put((yarl.URL(self.root), 0))
71 | await self.handler.get_body(self.session)
72 |
73 | with self.assertLogs(level="DEBUG") as log:
74 | self.loop.run_until_complete(test())
75 | self.assertIn("DEBUG:snare.cloner:Cloned file: /test", "".join(log.output))
76 |
77 | with open(os.path.join(self.target_path, self.hashname)) as f:
78 | self.return_content = f.read()
79 |
80 | self.assertEqual(self.return_content, self.expected_content)
81 | self.assertEqual(
82 | self.handler.visited_urls[-2:],
83 | ["http://example.com/", "http://example.com/test"],
84 | )
85 | self.assertEqual(self.handler.meta, self.meta)
86 |
87 | def test_get_body_css_validate(self):
88 | aiohttp.ClientResponse._headers = {"Content-Type": "text/css"}
89 |
90 | self.css_validate = "true"
91 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
92 | self.content = b""".banner { background: url("/example.png") }"""
93 | aiohttp.ClientResponse.read = AsyncMock(return_value=self.content)
94 | self.expected_content = "http://example.com/example.png"
95 | self.return_size = 0
96 | self.meta = {
97 | "/example.png": {
98 | "hash": "5a64beebcd2a6f1cbd00b8370debaa72",
99 | "headers": [{"Content-Type": "text/css"}],
100 | },
101 | "/index.html": {
102 | "hash": "d1546d731a9f30cc80127d57142a482b",
103 | "headers": [{"Content-Type": "text/css"}],
104 | },
105 | }
106 |
107 | async def test():
108 | await self.handler.new_urls.put((yarl.URL(self.root), 0))
109 | await self.handler.get_body(self.session)
110 | self.q_size = self.handler.new_urls.qsize()
111 |
112 | self.loop.run_until_complete(test())
113 | self.assertEqual(self.handler.visited_urls[-1], self.expected_content)
114 | self.assertEqual(self.q_size, self.return_size)
115 | self.assertEqual(self.meta, self.handler.meta)
116 |
117 | def test_get_body_css_validate_scheme(self):
118 | aiohttp.ClientResponse._headers = {"Content-Type": "text/css"}
119 |
120 | self.css_validate = "true"
121 | self.return_size = 0
122 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
123 | self.content = [
124 | b""".banner { background: url("data://domain/test.txt") }""",
125 | b""".banner { background: url("file://domain/test.txt") }""",
126 | ]
127 | self.meta = {
128 | "/index.html": {
129 | "hash": "d1546d731a9f30cc80127d57142a482b",
130 | "headers": [{"Content-Type": "text/css"}],
131 | },
132 | }
133 |
134 | self.expected_content = "http://example.com/"
135 |
136 | async def test():
137 | await self.handler.new_urls.put((yarl.URL(self.root), 0))
138 | await self.handler.get_body(self.session)
139 | self.q_size = self.handler.new_urls.qsize()
140 |
141 | for content in self.content:
142 | aiohttp.ClientResponse.read = AsyncMock(return_value=content)
143 | self.loop.run_until_complete(test())
144 | self.assertEqual(self.return_size, self.q_size)
145 | self.assertEqual(self.handler.meta, self.meta)
146 | self.assertEqual(self.handler.visited_urls[-1], self.expected_content)
147 |
148 | def test_client_error(self):
149 | self.session.get = AsyncMock(side_effect=aiohttp.ClientError)
150 |
151 | async def test():
152 | await self.handler.new_urls.put((yarl.URL(self.root), 0))
153 | await self.handler.get_body(self.session)
154 |
155 | with self.assertLogs(level="ERROR") as log:
156 | self.loop.run_until_complete(test())
157 | self.assertIn("ERROR:snare.cloner:", "".join(log.output))
158 |
159 | def tearDown(self):
160 | shutil.rmtree(self.main_page_path)
161 |
--------------------------------------------------------------------------------
/snare/tests/test_cloner_get_root_host.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest import mock
3 | import sys
4 | from snare.cloner import Cloner
5 | import shutil
6 | from yarl import URL
7 | import asyncio
8 | import aiohttp
9 | from snare.utils.asyncmock import AsyncMock
10 |
11 |
12 | class TestClonerGetRootHost(unittest.TestCase):
13 | def setUp(self):
14 | self.loop = asyncio.new_event_loop()
15 |
16 | def test_moved_root(self):
17 | self.root = "http://example.com"
18 | self.max_depth = sys.maxsize
19 | self.css_validate = "false"
20 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
21 | self.expected_moved_root = URL("http://www.example.com")
22 |
23 | async def test():
24 | await self.handler.get_root_host()
25 |
26 | self.loop.run_until_complete(test())
27 |
28 | self.assertEqual(self.handler.moved_root, self.expected_moved_root)
29 |
30 | @mock.patch("aiohttp.ClientSession")
31 | def test_clienterror(self, session):
32 | self.root = "http://example.com"
33 | self.max_depth = sys.maxsize
34 | self.css_validate = "false"
35 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
36 |
37 | aiohttp.ClientSession = mock.Mock(side_effect=aiohttp.ClientError)
38 |
39 | async def test():
40 | await self.handler.get_root_host()
41 |
42 | with self.assertRaises(SystemExit):
43 | self.loop.run_until_complete(test())
44 |
--------------------------------------------------------------------------------
/snare/tests/test_cloner_init.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import sys
3 | from snare.cloner import Cloner
4 | import shutil
5 |
6 |
7 | class TestClonerInitialization(unittest.TestCase):
8 | def setUp(self):
9 | self.root = "http://example.com"
10 | self.max_depth = sys.maxsize
11 | self.css_validate = "false"
12 | self.handler = Cloner(self.root, self.max_depth, self.css_validate, default_path="/tmp")
13 |
14 | def test_cloner_init(self):
15 | self.assertIsInstance(self.handler, Cloner)
16 |
17 | def tearDown(self):
18 | shutil.rmtree(self.handler.target_path)
19 |
--------------------------------------------------------------------------------
/snare/tests/test_cloner_make_filename.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import sys
3 | import os
4 | import shutil
5 | import yarl
6 | import asyncio
7 | from snare.cloner import Cloner
8 | from snare.utils.page_path_generator import generate_unique_path
9 |
10 |
11 | class TestMakeFilename(unittest.TestCase):
12 | def setUp(self):
13 | self.main_page_path = generate_unique_path()
14 | os.makedirs(self.main_page_path)
15 | self.url = yarl.URL("http://foo.com")
16 | self.root = "http://example.com"
17 | self.max_depth = sys.maxsize
18 | self.loop = asyncio.new_event_loop()
19 | self.css_validate = "false"
20 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
21 | self.filename = None
22 | self.hashname = None
23 |
24 | def test_make_filename(self):
25 | self.filename, self.hashname = self.handler._make_filename(self.url)
26 | self.assertEqual(self.filename, "foo.com")
27 | self.assertEqual(self.hashname, "167a0418dd8ce3bf0ef00dfb6195f038")
28 |
29 | def test_make_filename_same_host(self):
30 | self.filename, self.hashname = self.handler._make_filename(yarl.URL(self.root))
31 | self.assertEqual(self.filename, "/index.html")
32 | self.assertEqual(self.hashname, "d1546d731a9f30cc80127d57142a482b")
33 |
34 | def test_make_filename_relative(self):
35 | self.url = yarl.URL("/images")
36 | self.filename, self.hashname = self.handler._make_filename(self.url)
37 | self.assertEqual(self.filename, "/images")
38 | self.assertEqual(self.hashname, "41389bcf7f7427468d8c8675db2d4f98")
39 |
40 | def tearDown(self):
41 | shutil.rmtree(self.main_page_path)
42 |
--------------------------------------------------------------------------------
/snare/tests/test_cloner_process_links.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import asyncio
3 | import sys
4 | import yarl
5 | from unittest import mock
6 | from snare.cloner import Cloner
7 |
8 |
9 | class TestProcessLinks(unittest.TestCase):
10 | def setUp(self):
11 | self.root = "http://example.com"
12 | self.level = 0
13 | self.max_depth = sys.maxsize
14 | self.loop = asyncio.new_event_loop()
15 | self.css_validate = "false"
16 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
17 | self.expected_content = None
18 | self.return_content = None
19 | self.return_url = None
20 | self.return_level = None
21 | self.qsize = None
22 |
23 | def test_process_link_scheme(self):
24 | test_urls = [
25 | "file://images/test.png",
26 | "data://images/test.txt",
27 | "javascript://alert(1)/",
28 | ]
29 |
30 | async def test(url_param):
31 | self.return_content = await self.handler.process_link(url_param, self.level)
32 | self.qsize = self.handler.new_urls.qsize()
33 |
34 | for url in test_urls:
35 |
36 | self.loop.run_until_complete(test(url))
37 | self.expected_content = url
38 | self.return_size = 0
39 | self.assertEqual(self.expected_content, self.return_content)
40 | self.assertEqual(self.qsize, self.return_size)
41 |
42 | def test_process_link_relative(self):
43 | self.url = "/foo/путь/"
44 | self.expected_content = "http://example.com/foo/путь/"
45 |
46 | async def test():
47 | self.return_content = await self.handler.process_link(self.url, self.level)
48 | self.return_url, self.return_level = await self.handler.new_urls.get()
49 |
50 | self.loop.run_until_complete(test())
51 | self.assertEqual(self.return_content, "/foo/путь/")
52 | self.assertEqual(yarl.URL(self.return_url).human_repr(), self.expected_content)
53 | self.assertEqual(self.return_level, self.level + 1)
54 |
55 | self.handler.moved_root = yarl.URL("http://example2.com")
56 | self.expected_content = "http://example2.com/foo/путь/"
57 |
58 | self.loop.run_until_complete(test())
59 | self.assertEqual(self.return_content, "/foo/путь/")
60 | self.assertEqual(yarl.URL(self.return_url).human_repr(), self.expected_content)
61 | self.assertEqual(self.return_level, self.level + 1)
62 |
63 | def test_process_link_absolute(self):
64 | self.url = "http://domain.com"
65 | self.expected_content = ""
66 |
67 | async def test():
68 | self.return_content = await self.handler.process_link(self.url, self.level)
69 | self.return_url, self.return_level = await self.handler.new_urls.get()
70 |
71 | self.loop.run_until_complete(test())
72 | self.assertEqual(self.return_content, self.expected_content)
73 | self.assertEqual(yarl.URL(self.url), self.return_url)
74 | self.assertEqual(self.return_level, self.level + 1)
75 |
76 | def test_check_host(self):
77 | self.url = "http://foo.com"
78 | self.return_size = 0
79 |
80 | async def test():
81 | self.return_content = await self.handler.process_link(self.url, self.level, check_host=True)
82 | self.qsize = self.handler.new_urls.qsize()
83 |
84 | self.loop.run_until_complete(test())
85 | self.assertEqual(self.return_content, None)
86 | self.assertEqual(self.qsize, self.return_size)
87 |
88 | @mock.patch("yarl.URL")
89 | def test_process_link_unicode_error(self, url):
90 |
91 | yarl.URL = mock.Mock(side_effect=UnicodeError)
92 |
93 | async def test():
94 | self.return_content = await self.handler.process_link(self.root, self.level)
95 |
96 | self.loop.run_until_complete(test())
97 | self.assertEqual(self.return_content, self.expected_content)
98 |
--------------------------------------------------------------------------------
/snare/tests/test_cloner_replace_links.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import sys
3 | import os
4 | import shutil
5 | import asyncio
6 | from snare.cloner import Cloner
7 | from snare.utils.page_path_generator import generate_unique_path
8 | from snare.utils.asyncmock import AsyncMock
9 |
10 |
11 | class TestReplaceLinks(unittest.TestCase):
12 | def setUp(self):
13 | self.main_page_path = generate_unique_path()
14 | os.makedirs(self.main_page_path)
15 | self.root = "http://example.com"
16 | self.level = 0
17 | self.max_depth = sys.maxsize
18 | self.loop = asyncio.new_event_loop()
19 | self.css_validate = "false"
20 | self.handler = Cloner(self.root, self.max_depth, self.css_validate)
21 | self.content = None
22 | self.expected_content = None
23 | self.return_content = None
24 |
25 | def test_replace_relative_links(self):
26 | self.handler.process_link = AsyncMock(return_value="/test")
27 | self.root = "http://example.com/test"
28 | self.content = '\n\n\n\n\n\n'
29 |
30 | self.expected_content = '\n\n\n\n\n\n'
31 |
32 | async def test():
33 | self.return_content = await self.handler.replace_links(self.content, self.level)
34 |
35 | self.loop.run_until_complete(test())
36 | self.assertEqual(str(self.return_content), self.expected_content)
37 | self.handler.process_link.assert_called_with(self.root, self.level, check_host=True)
38 |
39 | def test_replace_image_links(self):
40 | self.handler.process_link = AsyncMock(return_value="/smiley.png")
41 | self.root = "http://example.com/smiley.png"
42 | self.content = '\n\n\n
\n\n\n'
43 |
44 | self.expected_content = '\n\n\n
\n\n\n'
45 |
46 | async def test():
47 | self.return_content = await self.handler.replace_links(self.content, self.level)
48 |
49 | self.loop.run_until_complete(test())
50 | self.assertEqual(str(self.return_content), self.expected_content)
51 | self.handler.process_link.assert_called_with(self.root, self.level)
52 |
53 | def test_replace_action_links(self):
54 | self.handler.process_link = AsyncMock(return_value="/submit.php")
55 | self.root = "http://example.com/submit.php"
56 | self.content = '\n\n\n\n\n\n'
57 |
58 | self.expected_content = '\n\n\n\n\n\n'
59 |
60 | async def test():
61 | self.return_content = await self.handler.replace_links(self.content, self.level)
62 |
63 | self.loop.run_until_complete(test())
64 | self.assertEqual(str(self.return_content), self.expected_content)
65 | self.handler.process_link.assert_called_with(self.root, self.level)
66 |
67 | def test_replace_redirects(self):
68 | self.root = "http://example.com"
69 | self.content = (
70 | '\n\n\nRedirecting...
\n'
71 | "\n\n"
72 | )
73 |
74 | self.expected_content = (
75 | '\n\n\nRedirecting...
\n\n' "\n"
76 | )
77 |
78 | async def test():
79 | self.return_content = await self.handler.replace_links(self.content, self.level)
80 |
81 | self.loop.run_until_complete(test())
82 | self.assertEqual(str(self.return_content), self.expected_content)
83 |
84 | def tearDown(self):
85 | shutil.rmtree(self.main_page_path)
86 |
--------------------------------------------------------------------------------
/snare/tests/test_cloner_run.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import sys
3 | from snare.cloner import Cloner
4 | import shutil
5 | import asyncio
6 |
7 |
8 | class TestClonerRun(unittest.TestCase):
9 | def setUp(self):
10 | self.root = "http://example.com"
11 | self.max_depth = sys.maxsize
12 | self.css_validate = "false"
13 | self.handler = Cloner(self.root, self.max_depth, self.css_validate, default_path="/tmp")
14 | self.loop = asyncio.new_event_loop()
15 |
16 | def test_run(self):
17 | self.loop.run_until_complete(self.handler.run())
18 |
--------------------------------------------------------------------------------
/snare/tests/test_html_handler_get_dorks.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import asyncio
3 | import shutil
4 | import os
5 | import yarl
6 | import aiohttp
7 | from json import JSONDecodeError
8 | from snare.utils.asyncmock import AsyncMock
9 | from snare.html_handler import HtmlHandler
10 | from snare.utils.page_path_generator import generate_unique_path
11 |
12 |
13 | class TestGetDorks(unittest.TestCase):
14 | def setUp(self):
15 | self.main_page_path = generate_unique_path()
16 | os.makedirs(self.main_page_path)
17 | self.dorks = dict(response={"dorks": "test_dorks"})
18 | self.loop = asyncio.new_event_loop()
19 | aiohttp.ClientSession.get = AsyncMock(
20 | return_value=aiohttp.ClientResponse(
21 | url=yarl.URL("http://www.example.com"),
22 | method="GET",
23 | writer=None,
24 | continue100=1,
25 | timer=None,
26 | request_info=None,
27 | traces=None,
28 | loop=self.loop,
29 | session=None,
30 | )
31 | )
32 | no_dorks = True
33 | tanner = "tanner.mushmush.org"
34 | self.handler = HtmlHandler(no_dorks, tanner)
35 | self.data = None
36 |
37 | def test_get_dorks(self):
38 | aiohttp.ClientResponse.json = AsyncMock(return_value=dict(response={"dorks": "test_dorks"}))
39 |
40 | async def test():
41 | self.data = await self.handler.get_dorks()
42 |
43 | self.loop.run_until_complete(test())
44 | aiohttp.ClientSession.get.assert_called_with("http://tanner.mushmush.org:8090/dorks", timeout=10.0)
45 |
46 | def test_return_dorks(self):
47 | aiohttp.ClientResponse.json = AsyncMock(return_value=self.dorks)
48 |
49 | async def test():
50 | self.data = await self.handler.get_dorks()
51 |
52 | self.loop.run_until_complete(test())
53 | self.assertEqual(self.data, self.dorks["response"]["dorks"])
54 |
55 | def test_logging_error(self):
56 | aiohttp.ClientResponse.json = AsyncMock(side_effect=JSONDecodeError("ERROR", "", 0))
57 |
58 | async def test():
59 | self.data = await self.handler.get_dorks()
60 |
61 | with self.assertLogs(level="ERROR") as log:
62 | self.loop.run_until_complete(test())
63 | self.assertIn("Error getting dorks: ERROR: line 1 column 1 (char 0)", log.output[0])
64 |
65 | def test_logging_timeout(self):
66 | aiohttp.ClientResponse.json = AsyncMock(side_effect=asyncio.TimeoutError())
67 |
68 | async def test():
69 | self.data = await self.handler.get_dorks()
70 |
71 | with self.assertLogs(level="INFO") as log:
72 | self.loop.run_until_complete(test())
73 | self.assertIn("Dorks timeout", log.output[0])
74 |
75 | def test_return_dorks_exception(self):
76 | aiohttp.ClientResponse.json = AsyncMock(side_effect=Exception())
77 |
78 | async def test():
79 | self.data = await self.handler.get_dorks()
80 |
81 | with self.assertRaises(Exception):
82 | self.loop.run_until_complete(test())
83 |
84 | def tearDown(self):
85 | shutil.rmtree(self.main_page_path)
86 |
--------------------------------------------------------------------------------
/snare/tests/test_html_handler_handle_html_content.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import asyncio
3 | import shutil
4 | import os
5 | from bs4 import BeautifulSoup
6 | from snare.utils.asyncmock import AsyncMock
7 | from snare.html_handler import HtmlHandler
8 | from snare.utils.page_path_generator import generate_unique_path
9 |
10 |
11 | class TestHandleHtmlContent(unittest.TestCase):
12 | def setUp(self):
13 | self.main_page_path = generate_unique_path()
14 | os.makedirs(self.main_page_path)
15 | self.content = """
16 |
17 |
18 | A paragraph to be tested
19 |
20 |
21 | """
22 | self.expected_content = '\n \n \n'
23 | self.expected_content += ' \n'
24 | self.expected_content += " A\n \n paragraph to be tested\n
\n \n\n"
25 | self.no_dorks_content = '\n \n \n A paragraph to be tested\n'
26 | self.no_dorks_content += "
\n \n\n"
27 | self.loop = asyncio.new_event_loop()
28 | self.return_content = None
29 | no_dorks = True
30 | tanner = "tanner.mushmush.org"
31 | self.handler = HtmlHandler(no_dorks, tanner)
32 |
33 | def test_handle_content(self):
34 | self.handler.no_dorks = False
35 | self.handler.get_dorks = AsyncMock(return_value=["test_dork1"])
36 |
37 | async def test():
38 | self.return_content = await self.handler.handle_content(self.content)
39 |
40 | self.loop.run_until_complete(test())
41 | soup = BeautifulSoup(self.return_content, "html.parser")
42 | return_content = soup.decode("utf-8")
43 | self.assertEqual(return_content, self.expected_content)
44 |
45 | def test_handle_content_no_dorks(self):
46 | self.handler.no_dorks = True
47 |
48 | async def test():
49 | self.return_content = await self.handler.handle_content(self.content)
50 |
51 | self.loop.run_until_complete(test())
52 | soup = BeautifulSoup(self.return_content, "html.parser")
53 | self.return_content = soup.decode("utf-8")
54 | self.assertEqual(self.return_content, self.no_dorks_content)
55 |
56 | def test_handle_content_exception(self):
57 | self.handler.no_dorks = False
58 | self.handler.get_dorks = AsyncMock(return_value=[])
59 |
60 | async def test():
61 | self.return_content = await self.handler.handle_content(self.content)
62 |
63 | with self.assertRaises(IndexError):
64 | self.loop.run_until_complete(test())
65 |
66 | def tearDown(self):
67 | shutil.rmtree(self.main_page_path)
68 |
--------------------------------------------------------------------------------
/snare/tests/test_logger.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from snare.utils.logger import Logger, LevelFilter
3 | import logging
4 | import os
5 |
6 |
7 | class TestLogger(unittest.TestCase):
8 | def setUp(self):
9 | self.cloner_log_file = "/tmp/cloner.log"
10 | self.snare_log_file = "/tmp/snare.log"
11 | self.snare_err_log_file = "/tmp/snare.err"
12 | self.record_dict = {"levelno": logging.INFO}
13 | self.logger = Logger.create_logger(self.snare_log_file, self.snare_err_log_file, __name__)
14 |
15 | def test_create_clone_logger(self):
16 | self.assertIsNone(Logger.create_clone_logger(self.cloner_log_file, __name__))
17 |
18 | def test_create_logger(self):
19 | self.assertIsInstance(self.logger, logging.Logger)
20 |
21 | def test_filter(self):
22 | self.assertTrue(LevelFilter(logging.ERROR).filter(logging.makeLogRecord(self.record_dict)))
23 |
24 | def tearDown(self):
25 | try:
26 | os.remove(self.cloner_log_file)
27 | os.remove(self.snare_log_file)
28 | os.remove(self.snare_err_log_file)
29 | except FileNotFoundError:
30 | pass
31 |
--------------------------------------------------------------------------------
/snare/tests/test_middleware.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from snare.middlewares import SnareMiddleware
3 |
4 |
5 | class TestMiddleware(unittest.TestCase):
6 | def setUp(self):
7 | self.middleware = SnareMiddleware(
8 | "error_404.html",
9 | headers=[{"Content-Type": "text/html; charset=UTF-8"}],
10 | server_header="nginx",
11 | )
12 |
13 | def test_initialization(self):
14 | self.assertIsInstance(self.middleware, SnareMiddleware)
15 |
--------------------------------------------------------------------------------
/snare/tests/test_server_handle_request.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import Mock
3 | import asyncio
4 | import argparse
5 | import shutil
6 | import multidict
7 | import os
8 | import aiohttp
9 | from aiohttp.http_parser import RawRequestMessage
10 | from aiohttp import HttpVersion
11 | from aiohttp import web
12 | from yarl import URL
13 | from snare.server import HttpRequestHandler
14 | from snare.utils.asyncmock import AsyncMock
15 | from snare.utils.page_path_generator import generate_unique_path
16 |
17 |
18 | class TestHandleRequest(unittest.TestCase):
19 | def setUp(self):
20 | meta = {}
21 | run_args = argparse.ArgumentParser()
22 | run_args.add_argument("--tanner")
23 | run_args.add_argument("--page-dir")
24 | self.main_page_path = generate_unique_path()
25 | os.makedirs(self.main_page_path)
26 | self.page_dir = self.main_page_path.rsplit("/")[-1]
27 | args = run_args.parse_args(["--page-dir", self.page_dir])
28 | args_dict = vars(args)
29 | args_dict["full_page_path"] = self.main_page_path
30 | uuid = "9c10172f-7ce2-4fb4-b1c6-abc70141db56".encode("utf-8")
31 | args.tanner = "tanner.mushmush.org"
32 | args.no_dorks = True
33 | args.server_header = "test_server"
34 | args.slurp_enabled = True
35 | self.handler = HttpRequestHandler(meta, args, uuid)
36 | self.request_data = {
37 | "method": "GET",
38 | "path": "/",
39 | "headers": {
40 | "Host": "test_host",
41 | "Content-Type": "test_type",
42 | },
43 | "status": 200,
44 | "cookies": {
45 | "sess_uuid": "prev_test_uuid",
46 | },
47 | }
48 | self.loop = asyncio.new_event_loop()
49 | self.response_content = ""
50 | self.response_headers = multidict.CIMultiDict([("Content-Type", "text/html")])
51 | self.response_status = 200
52 | event_result = dict(response=dict(message=dict(detection={"type": 1}, sess_uuid="test_uuid")))
53 | RequestHandler = Mock()
54 | protocol = RequestHandler()
55 | message = RawRequestMessage(
56 | method="POST",
57 | path="/",
58 | version=HttpVersion(major=1, minor=1),
59 | headers=self.request_data["headers"],
60 | raw_headers=None,
61 | should_close=None,
62 | compression=None,
63 | upgrade=None,
64 | chunked=None,
65 | url=URL("http://test_url/"),
66 | )
67 | self.request = web.Request(
68 | message=message,
69 | payload=None,
70 | protocol=protocol,
71 | payload_writer=None,
72 | task="POST",
73 | loop=self.loop,
74 | )
75 | self.handler.tanner_handler.create_data = Mock(return_value=self.request_data)
76 | self.handler.tanner_handler.submit_data = AsyncMock(return_value=event_result)
77 | self.handler.submit_slurp = AsyncMock()
78 | web.Response.add_header = Mock()
79 | web.Response.write = Mock()
80 | web.Response.send_headers = Mock()
81 | web.Response.write_eof = AsyncMock()
82 | aiohttp.streams.EmptyStreamReader.read = AsyncMock(return_value=b"con1=test1&con2=test2")
83 | self.handler.tanner_handler.parse_tanner_response = AsyncMock(
84 | return_value=(
85 | self.response_content,
86 | self.response_headers,
87 | self.response_status,
88 | )
89 | )
90 |
91 | def test_create_request_data(self):
92 | async def test():
93 | await self.handler.handle_request(self.request)
94 |
95 | self.loop.run_until_complete(test())
96 | self.handler.tanner_handler.create_data.assert_called_with(self.request, 200)
97 |
98 | def test_submit_request_data(self):
99 | async def test():
100 | await self.handler.handle_request(self.request)
101 |
102 | self.loop.run_until_complete(test())
103 | self.handler.tanner_handler.submit_data.assert_called_with(self.request_data)
104 |
105 | def test_submit_request_slurp(self):
106 | async def test():
107 | await self.handler.handle_request(self.request)
108 |
109 | self.loop.run_until_complete(test())
110 | self.handler.submit_slurp.assert_called_with(self.request.path_qs)
111 |
112 | def test_parse_response(self):
113 | async def test():
114 | await self.handler.handle_request(self.request)
115 |
116 | self.loop.run_until_complete(test())
117 | self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path_qs, {"type": 1})
118 |
119 | def test_no_prev_sess_uuid(self):
120 | self.request_data = {
121 | "method": "GET",
122 | "path": "/",
123 | "headers": {
124 | "Host": "test_host",
125 | "Content-Type": "test_type",
126 | },
127 | "status": 200,
128 | }
129 | self.handler.tanner_handler.create_data = Mock(return_value=self.request_data)
130 |
131 | async def test():
132 | await self.handler.handle_request(self.request)
133 |
134 | self.loop.run_until_complete(test())
135 | self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path_qs, {"type": 1})
136 |
137 | def tearDown(self):
138 | shutil.rmtree(self.main_page_path)
139 |
--------------------------------------------------------------------------------
/snare/tests/test_server_stop.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import Mock
3 | import asyncio
4 | import argparse
5 | import shutil
6 | import os
7 | from snare.server import HttpRequestHandler
8 | from snare.utils.asyncmock import AsyncMock
9 | from snare.utils.page_path_generator import generate_unique_path
10 |
11 |
12 | class TestServerStop(unittest.TestCase):
13 | def setUp(self):
14 | meta = {
15 | "/status_404": {
16 | "hash": "bacfa45149ffbe8dbff34609bf56d748",
17 | "headers": [{"Content-Type": "text/html; charset=UTF-8"}],
18 | }
19 | }
20 | run_args = argparse.ArgumentParser()
21 | self.main_page_path = generate_unique_path()
22 | os.makedirs(self.main_page_path)
23 | args = run_args.parse_args([])
24 | args_dict = vars(args)
25 | args_dict["full_page_path"] = self.main_page_path
26 | uuid = "9c10172f-7ce2-4fb4-b1c6-abc70141db56".encode("utf-8")
27 | args.tanner = "tanner.mushmush.org"
28 | args.no_dorks = True
29 | args.host_ip = "127.0.0.1"
30 | args.port = "80"
31 | self.handler = HttpRequestHandler(meta, args, uuid)
32 | self.loop = asyncio.new_event_loop()
33 |
34 | def test_handler_stop(self):
35 | self.handler.runner = AsyncMock()
36 |
37 | async def test():
38 | await self.handler.stop()
39 |
40 | self.loop.run_until_complete(test())
41 |
42 | def tearDown(self):
43 | shutil.rmtree(self.main_page_path)
44 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_add_meta_tag.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import os
3 | import shutil
4 | import configparser
5 | from bs4 import BeautifulSoup
6 | from snare.utils.snare_helpers import add_meta_tag
7 | from snare.utils.page_path_generator import generate_unique_path
8 |
9 |
10 | class TestAddMetaTag(unittest.TestCase):
11 | def setUp(self):
12 | self.main_page_path = generate_unique_path()
13 | os.makedirs(self.main_page_path)
14 | self.content = "titlesample"
15 | self.page_dir = self.main_page_path.rsplit("/")[-1]
16 | self.index_page = "index.html"
17 | with open(os.path.join(self.main_page_path, "index.html"), "w") as f:
18 | f.write(self.content)
19 |
20 | def test_add_meta_tag(self):
21 | config = configparser.ConfigParser()
22 | config["WEB-TOOLS"] = dict(google="test google content", bing="test bing content")
23 | add_meta_tag(self.page_dir, self.index_page, config, base_path="/opt/snare")
24 | with open(os.path.join(self.main_page_path, "index.html")) as main:
25 | main_page = main.read()
26 | soup = BeautifulSoup(main_page, "html.parser")
27 | assert soup.find("meta", attrs={"name": "google-site-verification"}) and soup.find(
28 | "meta", attrs={"name": "msvalidate.01"}
29 | )
30 |
31 | def test_add_meta_tag_with_empty_tags(self):
32 | config = configparser.ConfigParser()
33 | config["WEB-TOOLS"] = dict(google="", bing="")
34 | assert add_meta_tag(self.page_dir, self.index_page, config, base_path="/opt/snare") is None
35 |
36 | def tearDown(self):
37 | shutil.rmtree(self.main_page_path)
38 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_check_meta_file.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from snare.utils.snare_helpers import check_meta_file
3 |
4 |
5 | class TestMetaFile(unittest.TestCase):
6 | def setUp(self):
7 | self.correct_meta = {
8 | "/index.html": {
9 | "hash": "d1546d731a9f30cc80127d57142a482b",
10 | "headers": [{"Accept-Ranges": "bytes"}],
11 | }
12 | }
13 | self.incorrect_meta = {
14 | "/index.html": {
15 | "not_hash": "d1546d731a9f30cc80127d57142a482b",
16 | "headers": [{"Accept-Ranges": "bytes"}],
17 | }
18 | }
19 |
20 | def test_check_meta_file(self):
21 | self.assertTrue(check_meta_file(self.correct_meta))
22 | self.assertFalse(check_meta_file(self.incorrect_meta))
23 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_check_privileges.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from os.path import expanduser, join
3 |
4 | from snare.utils.snare_helpers import check_privileges
5 |
6 |
7 | @unittest.skip("fails in Travis")
8 | class TestStrToBool(unittest.TestCase):
9 | def test_privileges_in_root(self):
10 | self.path = "/"
11 | try:
12 | check_privileges(self.path)
13 | except PermissionError as e:
14 | self.fail(f"failed permissions check: {e}")
15 |
16 | def test_privileges_in_home(self):
17 | self.path = expanduser("~")
18 | try:
19 | check_privileges(self.path)
20 | except PermissionError as e:
21 | self.fail(f"failed permissions check: {e}")
22 |
23 | def test_non_existent_root_path(self):
24 | self.path = "/snare"
25 | try:
26 | check_privileges(self.path)
27 | except PermissionError as e:
28 | self.fail(f"failed permissions check: {e}")
29 |
30 | def test_non_existent_home_path(self):
31 | self.path = join(expanduser("~"), "snare")
32 | try:
33 | check_privileges(self.path)
34 | except PermissionError as e:
35 | self.fail(f"failed permissions check: {e}")
36 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_converter.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import os
3 | import shutil
4 | import json
5 | from snare.utils.snare_helpers import Converter
6 |
7 |
8 | class TestConverter(unittest.TestCase):
9 | def setUp(self):
10 | self.content = ""
11 | self.page_path = "/tmp/test/"
12 | if not os.path.exists("/tmp/test/depth"):
13 | os.makedirs("/tmp/test/depth")
14 | self.hname1 = ""
15 | self.hname2 = ""
16 | with open(os.path.join(self.page_path, "index.html"), "w") as f:
17 | f.write(self.content)
18 | with open(os.path.join(self.page_path, "depth/page.html"), "w") as f:
19 | f.write(self.content)
20 | self.cnv = Converter()
21 |
22 | def test_converter(self):
23 | self.cnv.convert(self.page_path)
24 | with open(os.path.join(self.page_path, "meta.json")) as f:
25 | s = json.load(f)
26 | self.hname1 = s["index.html"]["hash"]
27 | self.hname2 = s["depth/page.html"]["hash"]
28 | assert os.path.exists(self.page_path + self.hname1) and os.path.exists(self.page_path + self.hname2)
29 |
30 | def tearDown(self):
31 | shutil.rmtree("/tmp/test")
32 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_parse_timeout.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from snare.utils.snare_helpers import parse_timeout
3 |
4 |
5 | class TestParseTimeout(unittest.TestCase):
6 | def test_parse_timeout(self):
7 | assert parse_timeout("20H") == 20 * 60 * 60
8 | assert parse_timeout("10M") == 10 * 60
9 | assert parse_timeout("1D") == 24 * 60 * 60
10 |
11 | # Default 24H format is used.
12 | assert parse_timeout("24Y") == 24 * 60 * 60
13 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_print_color.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from snare.utils.snare_helpers import print_color
3 |
4 |
5 | class TestPrintColor(unittest.TestCase):
6 | def test_print_color(self):
7 | self.assertIsNone(print_color("testing print_color()", "INFO"))
8 | self.assertIsNone(print_color("testing print_color()", "WRONG_MODE"))
9 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_str_to_bool.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from argparse import ArgumentTypeError
3 | from snare.utils.snare_helpers import str_to_bool
4 |
5 |
6 | class TestStrToBool(unittest.TestCase):
7 | def setUp(self):
8 | self.v = None
9 |
10 | def test_str_to_bool_true(self):
11 | self.v = "true"
12 | assert str_to_bool(self.v) is True
13 |
14 | def test_str_to_bool_false(self):
15 | self.v = "false"
16 | assert str_to_bool(self.v) is False
17 |
18 | def test_str_to_bool_error(self):
19 | self.v = "twz"
20 | with self.assertRaises(ArgumentTypeError):
21 | str_to_bool(self.v)
22 |
--------------------------------------------------------------------------------
/snare/tests/test_snare_helpers_versions_manager.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from snare.utils.snare_helpers import VersionManager
3 |
4 |
5 | class TestVersion(unittest.TestCase):
6 | def setUp(self):
7 | self.vm = VersionManager()
8 | self.vm.version = "0.1.0"
9 |
10 | def test_check_compatibilty_fails(self):
11 | with self.assertRaises(RuntimeError):
12 | self.vm.check_compatibility("0.0.0")
13 |
14 | def test_check_compatibilty_ok(self):
15 | self.vm.check_compatibility("0.3.0")
16 |
--------------------------------------------------------------------------------
/snare/tests/test_tanner_handler_create_data.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import Mock
3 | import shutil
4 | import os
5 | import asyncio
6 | import argparse
7 | from yarl import URL
8 | from aiohttp import HttpVersion
9 | from aiohttp import web
10 | from aiohttp.http_parser import RawRequestMessage
11 | from snare.tanner_handler import TannerHandler
12 | from snare.utils.page_path_generator import generate_unique_path
13 |
14 |
15 | class TestCreateData(unittest.TestCase):
16 | def setUp(self):
17 | meta = {}
18 | run_args = argparse.ArgumentParser()
19 | run_args.add_argument("--tanner")
20 | run_args.add_argument("--page-dir")
21 | self.main_page_path = generate_unique_path()
22 | os.makedirs(self.main_page_path)
23 | page_dir = self.main_page_path.rsplit("/")[-1]
24 | args = run_args.parse_args(["--page-dir", page_dir])
25 | args_dict = vars(args)
26 | args_dict["full_page_path"] = self.main_page_path
27 | snare_uuid = "9c10172f-7ce2-4fb4-b1c6-abc70141db56".encode("utf-8")
28 | args.no_dorks = True
29 | self.handler = TannerHandler(args, meta, snare_uuid)
30 | headers = {
31 | "Host": "test_host",
32 | "status": 200,
33 | "Cookie": "sess_uuid=prev_test_uuid; test_cookie=test",
34 | }
35 | message = RawRequestMessage(
36 | method="POST",
37 | path="/",
38 | version=HttpVersion(major=1, minor=1),
39 | headers=headers,
40 | raw_headers=None,
41 | should_close=None,
42 | compression=None,
43 | upgrade=None,
44 | chunked=None,
45 | url=URL("http://test_url/"),
46 | )
47 | loop = asyncio.get_event_loop()
48 | RequestHandler = Mock()
49 | protocol = RequestHandler()
50 | self.request = web.Request(
51 | message=message,
52 | payload=None,
53 | protocol=protocol,
54 | payload_writer=None,
55 | task="POST",
56 | loop=loop,
57 | )
58 | self.request.transport.get_extra_info = Mock(return_value=(["test_ip", "test_port"]))
59 | self.response_status = "test_status"
60 | self.data = None
61 | self.expected_data = {
62 | "method": "POST",
63 | "path": "http://test_url/",
64 | "headers": {
65 | "Host": "test_host",
66 | "status": 200,
67 | "Cookie": "sess_uuid=prev_test_uuid; test_cookie=test",
68 | },
69 | "uuid": "9c10172f-7ce2-4fb4-b1c6-abc70141db56",
70 | "peer": {"ip": "test_ip", "port": "test_port"},
71 | "status": "test_status",
72 | "cookies": {"sess_uuid": "prev_test_uuid", " test_cookie": "test"},
73 | }
74 |
75 | def test_create_data(self):
76 | self.data = self.handler.create_data(self.request, self.response_status)
77 | self.assertEqual(self.data, self.expected_data)
78 |
79 | def tearDown(self):
80 | shutil.rmtree(self.main_page_path)
81 |
--------------------------------------------------------------------------------
/snare/tests/test_tanner_handler_parse_tanner_response.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import asyncio
3 | import argparse
4 | import shutil
5 | import os
6 | import json
7 | import multidict
8 | from snare.utils.asyncmock import AsyncMock
9 | from snare.utils.page_path_generator import generate_unique_path
10 | from snare.tanner_handler import TannerHandler
11 |
12 |
13 | class TestParseTannerResponse(unittest.TestCase):
14 | def setUp(self):
15 | run_args = argparse.ArgumentParser()
16 | run_args.add_argument("--tanner")
17 | run_args.add_argument("--page-dir")
18 | self.main_page_path = generate_unique_path()
19 | os.makedirs(self.main_page_path)
20 | page_dir = self.main_page_path.rsplit("/")[-1]
21 | meta_content = {
22 | "/index.html": {
23 | "hash": "hash_name",
24 | "headers": [{"Content-Type": "text/html"}],
25 | }
26 | }
27 | self.page_content = ""
28 | self.headers = multidict.CIMultiDict([("Content-Type", "text/html")])
29 | self.status_code = 200
30 | self.content_type = "text/html"
31 | with open(os.path.join(self.main_page_path, "hash_name"), "w") as f:
32 | f.write(self.page_content)
33 | with open(os.path.join(self.main_page_path, "meta.json"), "w") as f:
34 | json.dump(meta_content, f)
35 | self.args = run_args.parse_args(["--page-dir", page_dir])
36 | args_dict = vars(self.args)
37 | args_dict["full_page_path"] = self.main_page_path
38 | self.args.index_page = "/index.html"
39 | self.args.no_dorks = True
40 | self.args.tanner = "tanner.mushmush.org"
41 | self.uuid = "test_uuid"
42 | self.handler = TannerHandler(self.args, meta_content, self.uuid)
43 | self.requested_name = "/"
44 | self.loop = asyncio.get_event_loop()
45 | self.handler.html_handler.handle_content = AsyncMock(return_value=self.page_content)
46 | self.res1 = None
47 | self.res2 = None
48 | self.res3 = None
49 | self.detection = None
50 | self.expected_content = None
51 | self.call_content = None
52 |
53 | def test_parse_type_one(self):
54 | self.detection = {"type": 1}
55 |
56 | async def test():
57 | (
58 | self.res1,
59 | self.res2,
60 | self.res3,
61 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
62 |
63 | self.loop.run_until_complete(test())
64 | real_result = [self.res1, self.res2, self.res3]
65 | expected_result = [self.page_content, self.headers, self.status_code]
66 | self.assertCountEqual(real_result, expected_result)
67 |
68 | def test_parse_type_one_query(self):
69 | self.requested_name = "/?"
70 | self.detection = {"type": 1}
71 |
72 | async def test():
73 | (
74 | self.res1,
75 | self.res2,
76 | self.res3,
77 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
78 |
79 | self.loop.run_until_complete(test())
80 | real_result = [self.res1, self.res2, self.res3]
81 | expected_result = [self.page_content, self.headers, self.status_code]
82 | self.assertCountEqual(real_result, expected_result)
83 |
84 | def test_parse_type_one_error(self):
85 | self.requested_name = "something/"
86 | self.detection = {"type": 1}
87 | self.expected_content = None
88 | self.headers = multidict.CIMultiDict()
89 | self.status_code = 404
90 |
91 | async def test():
92 | (
93 | self.res1,
94 | self.res2,
95 | self.res3,
96 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
97 |
98 | self.loop.run_until_complete(test())
99 | real_result = [self.res1, self.res2, self.res3]
100 | expected_result = [self.expected_content, self.headers, self.status_code]
101 | self.assertCountEqual(real_result, expected_result)
102 |
103 | def test_parse_type_two(self):
104 | self.detection = {
105 | "type": 2,
106 | "payload": {
107 | "page": "/index.html",
108 | "value": "test",
109 | },
110 | }
111 | self.expected_content = b"test
"
112 |
113 | async def test():
114 | (
115 | self.res1,
116 | self.res2,
117 | self.res3,
118 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
119 |
120 | self.loop.run_until_complete(test())
121 | real_result = [self.res1, self.res2, self.res3]
122 | expected_result = [self.expected_content, self.headers, self.status_code]
123 | self.assertCountEqual(real_result, expected_result)
124 |
125 | def test_parse_type_two_with_headers(self):
126 | self.detection = {
127 | "type": 2,
128 | "payload": {
129 | "page": "",
130 | "value": "test.png",
131 | "headers": {
132 | "content-type": "multipart/form-data",
133 | },
134 | },
135 | }
136 | self.expected_content = b"test.png"
137 | self.content_type = "image/png"
138 | self.headers = multidict.CIMultiDict([("Content-Type", "multipart/form-data")])
139 |
140 | async def test():
141 | (
142 | self.res1,
143 | self.res2,
144 | self.res3,
145 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
146 |
147 | self.loop.run_until_complete(test())
148 | real_result = [self.res1, self.res2, self.res3]
149 | expected_result = [self.expected_content, self.headers, self.status_code]
150 |
151 | self.assertCountEqual(real_result, expected_result)
152 |
153 | def test_parse_type_two_error(self):
154 | self.detection = {
155 | "type": 2,
156 | "payload": {
157 | "page": "/something",
158 | "value": "test",
159 | },
160 | }
161 | self.expected_content = b"test
"
162 | self.content_type = r"text/html"
163 |
164 | async def test():
165 | (
166 | self.res1,
167 | self.res2,
168 | self.res3,
169 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
170 |
171 | self.loop.run_until_complete(test())
172 | real_result = [self.res1, self.res2, self.res3]
173 | expected_result = [self.expected_content, self.headers, self.status_code]
174 | self.assertCountEqual(real_result, expected_result)
175 |
176 | def test_parse_type_three(self):
177 | self.detection = {
178 | "type": 3,
179 | "payload": {
180 | "page": "/index.html",
181 | "value": "test",
182 | "status_code": 200,
183 | },
184 | }
185 | self.expected_content = None
186 | self.headers = multidict.CIMultiDict()
187 |
188 | async def test():
189 | (
190 | self.res1,
191 | self.res2,
192 | self.res3,
193 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
194 |
195 | self.loop.run_until_complete(test())
196 | real_result = [self.res1, self.res2, self.res3]
197 | expected_result = [self.expected_content, self.headers, self.status_code]
198 | self.assertCountEqual(real_result, expected_result)
199 |
200 | def test_call_handle_html(self):
201 | self.detection = {"type": 1}
202 | self.call_content = b""
203 | self.expected_content = self.page_content
204 |
205 | async def test():
206 | (
207 | self.res1,
208 | self.res2,
209 | self.res3,
210 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
211 |
212 | self.loop.run_until_complete(test())
213 | self.handler.html_handler.handle_content.assert_called_with(self.call_content)
214 |
215 | def test_parse_exception(self):
216 | self.detection = {}
217 | self.expected_content = self.page_content
218 |
219 | async def test():
220 | (
221 | self.res1,
222 | self.res2,
223 | self.res3,
224 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection)
225 |
226 | with self.assertRaises(KeyError):
227 | self.loop.run_until_complete(test())
228 |
229 | def tearDown(self):
230 | shutil.rmtree(self.main_page_path)
231 |
--------------------------------------------------------------------------------
/snare/tests/test_tanner_handler_submit_data.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import asyncio
3 | import argparse
4 | import shutil
5 | import os
6 | import json
7 | import yarl
8 | import aiohttp
9 | from json import JSONDecodeError
10 | from snare.utils.asyncmock import AsyncMock
11 | from snare.tanner_handler import TannerHandler
12 | from snare.utils.page_path_generator import generate_unique_path
13 |
14 |
15 | class TestSubmitData(unittest.TestCase):
16 | def setUp(self):
17 | meta = {}
18 | run_args = argparse.ArgumentParser()
19 | run_args.add_argument("--tanner")
20 | run_args.add_argument("--page-dir")
21 | self.main_page_path = generate_unique_path()
22 | os.makedirs(self.main_page_path)
23 | page_dir = self.main_page_path.rsplit("/")[-1]
24 | args = run_args.parse_args(["--page-dir", page_dir])
25 | args_dict = vars(args)
26 | args_dict["full_page_path"] = self.main_page_path
27 | self.loop = asyncio.new_event_loop()
28 | self.data = {
29 | "method": "GET",
30 | "path": "/",
31 | "headers": {
32 | "Host": "test_host",
33 | "Connection": "keep-alive",
34 | "Upgrade-Insecure-Requests": "1",
35 | "User-Agent": "test_agent",
36 | "Accept": "text/html",
37 | "Accept-Encoding": "test_encoding",
38 | "Accept-Language": "test_lang",
39 | "Cookie": "test_cookie",
40 | },
41 | "uuid": "test_uuid",
42 | "peer": {"ip": "::1", "port": 80},
43 | "status": 200,
44 | "cookies": "test_cookies",
45 | "sess_uuid": "test_uuid",
46 | }
47 | aiohttp.ClientSession.post = AsyncMock(
48 | return_value=aiohttp.ClientResponse(
49 | url=yarl.URL("http://www.example.com"),
50 | method="GET",
51 | writer=None,
52 | continue100=1,
53 | timer=None,
54 | request_info=None,
55 | traces=None,
56 | loop=self.loop,
57 | session=None,
58 | )
59 | )
60 | uuid = "test_uuid"
61 | args.tanner = "tanner.mushmush.org"
62 | args.no_dorks = True
63 | self.handler = TannerHandler(args, meta, uuid)
64 | self.result = None
65 |
66 | def test_post_data(self):
67 | aiohttp.ClientResponse.json = AsyncMock(return_value=dict(detection={"type": 1}, sess_uuid="test_uuid"))
68 |
69 | async def test():
70 | self.result = await self.handler.submit_data(self.data)
71 |
72 | self.loop.run_until_complete(test())
73 | aiohttp.ClientSession.post.assert_called_with(
74 | "http://tanner.mushmush.org:8090/event", json=self.data, timeout=10.0
75 | )
76 |
77 | def test_event_result(self):
78 | aiohttp.ClientResponse.json = AsyncMock(return_value=dict(detection={"type": 1}, sess_uuid="test_uuid"))
79 |
80 | async def test():
81 | self.result = await self.handler.submit_data(self.data)
82 |
83 | self.loop.run_until_complete(test())
84 | self.assertEqual(self.result, dict(detection={"type": 1}, sess_uuid="test_uuid"))
85 |
86 | def test_submit_data_error(self):
87 | aiohttp.ClientResponse.json = AsyncMock(side_effect=JSONDecodeError("ERROR", "", 0))
88 |
89 | async def test():
90 | self.result = await self.handler.submit_data(self.data)
91 |
92 | with self.assertLogs(level="ERROR") as log:
93 | self.loop.run_until_complete(test())
94 | self.assertIn(
95 | "Error submitting data: ERROR: line 1 column 1 (char 0) {}".format(self.data),
96 | log.output[0],
97 | )
98 |
99 | def test_event_result_exception(self):
100 | aiohttp.ClientResponse.json = AsyncMock(side_effect=Exception())
101 |
102 | async def test():
103 | self.result = await self.handler.submit_data(self.data)
104 |
105 | with self.assertRaises(Exception):
106 | self.loop.run_until_complete(test())
107 |
108 | def tearDown(self):
109 | shutil.rmtree(self.main_page_path)
110 |
--------------------------------------------------------------------------------
/snare/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mushorg/snare/b17fdfe7c2ba3ac540548763d73fc475cfc185c4/snare/utils/__init__.py
--------------------------------------------------------------------------------
/snare/utils/asyncmock.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import Mock
2 |
3 |
4 | class AsyncMock(Mock): # custom function defined to mock asyncio coroutines
5 | def __call__(self, *args, **kwargs):
6 | sup = super(AsyncMock, self)
7 |
8 | async def coro():
9 | return sup.__call__(*args, **kwargs)
10 |
11 | return coro()
12 |
13 | def __await__(self):
14 | return self().__await__()
15 |
--------------------------------------------------------------------------------
/snare/utils/logger.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import logging.handlers
3 |
4 |
5 | class LevelFilter(logging.Filter):
6 | """Filters (lets through) all messages with level < LEVEL"""
7 |
8 | def __init__(self, level):
9 | self.level = level
10 |
11 | def filter(self, record):
12 | return record.levelno < self.level
13 |
14 | # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive
15 |
16 |
17 | class Logger:
18 | @staticmethod
19 | def create_logger(debug_filename, err_filename, logger_name):
20 | logger = logging.getLogger(logger_name)
21 | logger.setLevel(logging.DEBUG)
22 | logger.propagate = False
23 | formatter = logging.Formatter(
24 | fmt="%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s",
25 | datefmt="%Y-%m-%d %H:%M:%S",
26 | )
27 |
28 | # ERROR log to 'snare.err'
29 | error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding="utf-8")
30 | error_log_handler.setLevel(logging.ERROR)
31 | error_log_handler.setFormatter(formatter)
32 | logger.addHandler(error_log_handler)
33 |
34 | # DEBUG log to 'snare.log'
35 | debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding="utf-8")
36 | debug_log_handler.setLevel(logging.DEBUG)
37 | debug_log_handler.setFormatter(formatter)
38 | max_level_filter = LevelFilter(logging.ERROR)
39 | debug_log_handler.addFilter(max_level_filter)
40 | logger.addHandler(debug_log_handler)
41 |
42 | return logger
43 |
44 | @staticmethod
45 | def create_clone_logger(log_filename, logger_name):
46 | logger = logging.getLogger(logger_name)
47 | formatter = logging.Formatter(
48 | fmt="%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s",
49 | datefmt="%Y-%m-%d %H:%M:%S",
50 | )
51 | # logs to 'clone.err'
52 | debug_log_handler = logging.handlers.RotatingFileHandler(log_filename, encoding="utf-8")
53 | debug_log_handler.setLevel(logging.DEBUG)
54 | debug_log_handler.setFormatter(formatter)
55 | logger.addHandler(debug_log_handler)
56 |
--------------------------------------------------------------------------------
/snare/utils/page_path_generator.py:
--------------------------------------------------------------------------------
1 | import string
2 | import random
3 | import os
4 |
5 |
6 | def directory_generator(size=9, chars=string.ascii_lowercase + string.digits):
7 | return "".join(random.choice(chars) for _ in range(size))
8 |
9 |
10 | def generate_unique_path():
11 | path = "/opt/snare/pages/" + directory_generator()
12 | while os.path.exists(path):
13 | path = "/opt/snare/pages/" + directory_generator()
14 | return path
15 |
--------------------------------------------------------------------------------
/snare/utils/snare_helpers.py:
--------------------------------------------------------------------------------
1 | import os
2 | import hashlib
3 | import mimetypes
4 | import json
5 | import shutil
6 | import argparse
7 | import logging
8 | from os import walk
9 | from distutils.version import StrictVersion
10 | from bs4 import BeautifulSoup
11 |
12 |
13 | class VersionManager:
14 | def __init__(self):
15 | self.logger = logging.getLogger(__name__)
16 | self.version = "0.3.0"
17 | self.version_mapper = {
18 | "0.1.0": ["0.1.0", "0.4.0"],
19 | "0.2.0": ["0.5.0", "0.5.0"],
20 | "0.3.0": ["0.5.0", "0.6.0"],
21 | }
22 |
23 | def check_compatibility(self, tanner_version):
24 | min_version = self.version_mapper[self.version][0]
25 | max_version = self.version_mapper[self.version][1]
26 | if not (StrictVersion(min_version) <= StrictVersion(tanner_version) <= StrictVersion(max_version)):
27 | self.logger.exception("Wrong tanner version %s", tanner_version)
28 | raise RuntimeError(
29 | "Wrong tanner version: {}. Compatible versions are {} - {}".format(
30 | tanner_version, min_version, max_version
31 | )
32 | )
33 |
34 |
35 | class Converter:
36 | def __init__(self):
37 | self.logger = logging.getLogger(__name__)
38 | self.meta = {}
39 |
40 | def convert(self, path):
41 | files_to_convert = []
42 |
43 | for (dirpath, dirnames, filenames) in walk(path):
44 | for fn in filenames:
45 | files_to_convert.append(os.path.join(dirpath, fn))
46 |
47 | for fn in files_to_convert:
48 | path_len = len(path)
49 | file_name = fn[path_len:]
50 | m = hashlib.md5()
51 | m.update(fn.encode("utf-8"))
52 | hash_name = m.hexdigest()
53 | self.meta[file_name] = {
54 | "hash": hash_name,
55 | "headers": [
56 | {"Content-Type": mimetypes.guess_type(file_name)[0]},
57 | ],
58 | }
59 | self.logger.debug("Converting the file as %s ", os.path.join(path, hash_name))
60 | shutil.copyfile(fn, os.path.join(path, hash_name))
61 | os.remove(fn)
62 |
63 | with open(os.path.join(path, "meta.json"), "w") as mj:
64 | json.dump(self.meta, mj)
65 |
66 |
67 | def add_meta_tag(page_dir, index_page, config, base_path):
68 | google_content = config["WEB-TOOLS"]["google"]
69 | bing_content = config["WEB-TOOLS"]["bing"]
70 |
71 | if not google_content and not bing_content:
72 | return
73 |
74 | main_page_path = os.path.join(os.path.join(base_path, "pages"), page_dir, index_page)
75 | with open(main_page_path) as main:
76 | main_page = main.read()
77 | soup = BeautifulSoup(main_page, "html.parser")
78 |
79 | if google_content and soup.find("meta", attrs={"name": "google-site-verification"}) is None:
80 | google_meta = soup.new_tag("meta")
81 | google_meta.attrs["name"] = "google-site-verification"
82 | google_meta.attrs["content"] = google_content
83 | soup.head.append(google_meta)
84 | if bing_content and soup.find("meta", attrs={"name": "msvalidate.01"}) is None:
85 | bing_meta = soup.new_tag("meta")
86 | bing_meta.attrs["name"] = "msvalidate.01"
87 | bing_meta.attrs["content"] = bing_content
88 | soup.head.append(bing_meta)
89 |
90 | html = soup.prettify("utf-8")
91 | with open(main_page_path, "wb") as file:
92 | file.write(html)
93 |
94 |
95 | def check_meta_file(meta_info):
96 | for key, val in meta_info.items():
97 | if "hash" in val and any(header in val for header in ["content_type", "headers"]):
98 | continue
99 | else:
100 | return False
101 | return True
102 |
103 |
104 | def parse_timeout(timeout):
105 | timeouts_coeff = {"M": 60, "H": 3600, "D": 86400}
106 |
107 | form = timeout[-1]
108 | if form not in timeouts_coeff.keys():
109 | print_color("Bad timeout format, default will be used", "WARNING")
110 | result = parse_timeout("24H")
111 | else:
112 | result = int(timeout[:-1])
113 | result *= timeouts_coeff[form]
114 | return result
115 |
116 |
117 | def str_to_bool(v):
118 | if v.lower() == "true":
119 | return True
120 | elif v.lower() == "false":
121 | return False
122 | else:
123 | raise argparse.ArgumentTypeError("Boolean value expected")
124 |
125 |
126 | def print_color(msg, mode="INFO", end="\n"):
127 | colors = {
128 | "INFO": "\033[97m", # white
129 | "ERROR": "\033[31m", # red
130 | "WARNING": "\033[33m", # yellow
131 | }
132 | try:
133 | color = colors[mode]
134 | except KeyError:
135 | color = colors["INFO"]
136 | print(color + str(msg) + "\033[0m", end=end)
137 |
138 |
139 | def check_privileges(path):
140 | """
141 | Checks if the user has privileges to the path passed as argument.
142 | """
143 | if not os.path.exists(path):
144 | try:
145 | os.makedirs(path)
146 | except PermissionError:
147 | raise PermissionError(f"Failed to create path: {os.path.abspath(path)}")
148 | if not os.access(path, os.W_OK):
149 | raise PermissionError(f"Failed to access path: {os.path.abspath(path)}")
150 |
--------------------------------------------------------------------------------