├── .coveragerc ├── .github ├── FUNDING.yml ├── codeql │ └── codeql-config.yml └── workflows │ ├── codeql-analysis.yml │ └── test-snare.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── bin ├── __init__.py ├── clone └── snare ├── docker-compose.yml ├── docs ├── Makefile ├── cloner.md ├── conf.py ├── faq.md ├── index.rst ├── parameters.md └── quick-start.md ├── readthedocs.yml ├── requirements.txt ├── setup.py └── snare ├── __init__.py ├── cloner.py ├── html_handler.py ├── middlewares.py ├── server.py ├── tanner_handler.py ├── tests ├── __init__.py ├── test_cloner_add_scheme.py ├── test_cloner_get_body.py ├── test_cloner_get_root_host.py ├── test_cloner_init.py ├── test_cloner_make_filename.py ├── test_cloner_process_links.py ├── test_cloner_replace_links.py ├── test_cloner_run.py ├── test_html_handler_get_dorks.py ├── test_html_handler_handle_html_content.py ├── test_logger.py ├── test_middleware.py ├── test_server_handle_request.py ├── test_server_stop.py ├── test_snare_helpers_add_meta_tag.py ├── test_snare_helpers_check_meta_file.py ├── test_snare_helpers_check_privileges.py ├── test_snare_helpers_converter.py ├── test_snare_helpers_parse_timeout.py ├── test_snare_helpers_print_color.py ├── test_snare_helpers_str_to_bool.py ├── test_snare_helpers_versions_manager.py ├── test_tanner_handler_create_data.py ├── test_tanner_handler_parse_tanner_response.py └── test_tanner_handler_submit_data.py └── utils ├── __init__.py ├── asyncmock.py ├── logger.py ├── page_path_generator.py └── snare_helpers.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | omit = 3 | */tests/* 4 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [mushorg,] 4 | -------------------------------------------------------------------------------- /.github/codeql/codeql-config.yml: -------------------------------------------------------------------------------- 1 | name: "Snare CodeQL config" 2 | 3 | paths: 4 | - bin 5 | - snare 6 | paths-ignore: 7 | - snare/tests 8 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ master ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ master ] 20 | schedule: 21 | - cron: '15 0 * * 1' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | 37 | steps: 38 | - name: Checkout repository 39 | uses: actions/checkout@v2 40 | 41 | # Initializes the CodeQL tools for scanning. 42 | - name: Initialize CodeQL 43 | uses: github/codeql-action/init@v1 44 | with: 45 | languages: ${{ matrix.language }} 46 | config-file: ./.github/codeql/codeql-config.yml 47 | 48 | 49 | - name: Perform CodeQL Analysis 50 | uses: github/codeql-action/analyze@v1 51 | -------------------------------------------------------------------------------- /.github/workflows/test-snare.yml: -------------------------------------------------------------------------------- 1 | name: Unit and integration tests 2 | 3 | on: [push] 4 | 5 | jobs: 6 | test: 7 | 8 | runs-on: ubuntu-latest 9 | strategy: 10 | matrix: 11 | python-version: [3.6, 3.7, 3.8] 12 | 13 | steps: 14 | - uses: actions/checkout@v2 15 | - name: Set up Python ${{ matrix.python-version }} 16 | uses: actions/setup-python@v2 17 | with: 18 | python-version: ${{ matrix.python-version }} 19 | - name: Install dependencies 20 | run: | 21 | python -m pip install --upgrade pip 22 | pip install black nose coveralls 23 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 24 | - name: Lint with black 25 | run: | 26 | black . --line-length 120 27 | - name: Test with nose 28 | run: | 29 | nosetests -w snare/tests -v --with-coverage --cover-inclusive --cover-package=snare 30 | - name: Coveralls 31 | env: 32 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 33 | run: | 34 | coveralls --service=github 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | .pytest_cache/ 4 | *.py[cod] 5 | *$py.class 6 | .coverage 7 | 8 | # Sphinx documentation 9 | docs/_build/ 10 | 11 | # Developer 12 | .idea/ 13 | .venv/ 14 | .vscode/ 15 | 16 | Snare.egg-info/ 17 | build/ 18 | dist/ 19 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6-alpine3.8 2 | 3 | RUN apk -U --no-cache add git build-base && \ 4 | rm -rf /root/* && \ 5 | rm -rf /tmp/* /var/tmp/* && \ 6 | rm -rf /var/cache/apk/* 7 | RUN pip3 install --no-cache-dir -U pip setuptools 8 | ADD requirements.txt . 9 | RUN pip3 install --no-cache-dir -r requirements.txt 10 | 11 | ADD . . 12 | RUN python3 setup.py install 13 | 14 | ARG PAGE_URL=example.com 15 | ENV PAGE_URL $PAGE_URL 16 | ENV PORT 80 17 | ENV TANNER tanner.mushmush.org 18 | 19 | RUN clone --target "http://$PAGE_URL" 20 | 21 | CMD snare --no-dorks true --auto-update false --host-ip 0.0.0.0 --port $PORT --page-dir "$PAGE_URL" --tanner $TANNER 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | {one line to give the program's name and a brief idea of what it does.} 635 | Copyright (C) {year} {name of author} 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | {project} Copyright (C) {year} {fullname} 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | SNARE 2 | ===== 3 | 4 | [![Documentation Status](https://readthedocs.org/projects/snare/badge/?version=latest)](http://snare.readthedocs.io/en/latest/?badge=latest) 5 | [![Build Status](https://travis-ci.org/mushorg/snare.svg?branch=master)](https://travis-ci.org/mushorg/snare) 6 | [![Coverage Status](https://coveralls.io/repos/github/mushorg/snare/badge.svg?branch=master)](https://coveralls.io/github/mushorg/snare?branch=master) 7 | 8 | _**Super Next generation Advanced Reactive honEypot**_ 9 | 10 | About 11 | ----- 12 | 13 | SNARE is a web application honeypot sensor attracting all sort of maliciousness from the Internet. 14 | 15 | Documentation 16 | -------------- 17 | 18 | The documentation can be found [here](http://snare.readthedocs.io). 19 | 20 | Basic Concepts 21 | -------------- 22 | 23 | - Surface first. Focus on the attack surface generation. 24 | - Sensors and masters. Lightweight collectors (SNARE) and central decision maker (tanner). 25 | 26 | Getting started 27 | --------------- 28 | 29 | - You need Python3.6 to run SNARE 30 | - This was tested with a recent Ubuntu based Linux. 31 | 32 | #### Steps to setup 33 | 34 | 1. Get SNARE: `git clone https://github.com/mushorg/snare.git` and `cd snare` 35 | 2. [Optional] Make virtual environment: `python3 -m venv venv` 36 | 3. [Optional] Activate virtual environment: `. venv/bin/activate` 37 | 38 | **Note:** Do not use sudo with below commands if you're running snare in virtual environment. 39 | 40 | 4. Install requirements: `sudo pip3 install -r requirements.txt` 41 | 5. Setup snare: `sudo python3 setup.py install` 42 | 6. Clone a page: `sudo clone --target http://example.com --path ` 43 | 7. Run SNARE: `sudo snare --port 8080 --page-dir example.com --path ` 44 | 8. Test: Visit http://localhost:8080/index.html 45 | 9. (Optionally) Have your own [tanner](https://github.com/mushorg/tanner) service running. 46 | 47 | [Note : Cloner clones the whole website, to restrict to a desired depth of cloning add `--max-depth` parameter] 48 | 49 | #### Docker build instructions 50 | 51 | 1. Change current directory to `snare` project directory 52 | 2. `docker-compose build` 53 | 3. `docker-compose up` 54 | 4. Snare will start on 0.0.0.0 with port 80. 55 | More information about running `docker-compose` can be found [here.](https://docs.docker.com/compose/gettingstarted/) 56 | 57 | In case of an error while running `docker-compose up`, check the availability of port 80, if it is occupied then refer to Docker documentation to change the default port. 58 | 59 | You obviously want to bind to 0.0.0.0 and port 80 when running in _production_. 60 | 61 | ## Testing 62 | 63 | In order to run the tests and receive a test coverage report, we recommend running `pytest`: 64 | 65 | ``` 66 | pip install pytest pytest-cov 67 | sudo pytest --cov-report term-missing --cov=snare snare/tests/ 68 | ``` 69 | 70 | ## Sample Output 71 | 72 | ```shell 73 | 74 | # sudo snare --port 8080 --page-dir example.com 75 | 76 | _____ _ _____ ____ ______ 77 | / ___// | / / | / __ \/ ____/ 78 | \__ \/ |/ / /| | / /_/ / __/ 79 | ___/ / /| / ___ |/ _, _/ /___ 80 | /____/_/ |_/_/ |_/_/ |_/_____/ 81 | 82 | 83 | privileges dropped, running as "nobody:nogroup" 84 | serving with uuid 9c10172f-7ce2-4fb4-b1c6-abc70141db56 85 | Debug logs will be stored in /opt/snare/snare.log 86 | Error logs will be stored in /opt/snare/snare.err 87 | ======== Running on http://127.0.0.1:8080 ======== 88 | (Press CTRL+C to quit) 89 | you are running the latest version 90 | 91 | ``` -------------------------------------------------------------------------------- /bin/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mushorg/snare/b17fdfe7c2ba3ac540548763d73fc475cfc185c4/bin/__init__.py -------------------------------------------------------------------------------- /bin/clone: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | Copyright (C) 2015-2021 MushMush Foundation 5 | 6 | This program is free software: you can redistribute it and/or modify 7 | it under the terms of the GNU General Public License as published by 8 | the Free Software Foundation, either version 3 of the License, or 9 | (at your option) any later version. 10 | 11 | This program is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | GNU General Public License for more details. 15 | """ 16 | import argparse 17 | import asyncio 18 | import os 19 | import sys 20 | from datetime import datetime 21 | 22 | from snare.cloner import Cloner 23 | from snare.utils import logger 24 | from snare.utils.snare_helpers import (check_privileges, print_color, 25 | str_to_bool) 26 | 27 | 28 | def main(): 29 | loop = asyncio.get_event_loop() 30 | parser = argparse.ArgumentParser() 31 | parser.add_argument( 32 | "--target", help="domain of the site to be cloned", required=True 33 | ) 34 | parser.add_argument( 35 | "--max-depth", 36 | help="max depth of the cloning", 37 | required=False, 38 | default=sys.maxsize, 39 | ) 40 | parser.add_argument("--log-path", help="path to the log file") 41 | parser.add_argument( 42 | "--css-validate", 43 | help="set whether css validation is required", 44 | type=str_to_bool, 45 | default=None, 46 | ) 47 | parser.add_argument( 48 | "--path", 49 | help="path to save the page to be cloned", 50 | required=False, 51 | default="/opt/", 52 | ) 53 | args = parser.parse_args() 54 | default_path = os.path.join(args.path, "snare") 55 | 56 | if args.log_path: 57 | log_file = os.path.join(args.log_path, "clone.log") 58 | else: 59 | log_file = os.path.join(default_path, "clone.log") 60 | 61 | try: 62 | check_privileges(default_path) 63 | check_privileges(os.path.dirname(log_file)) 64 | except PermissionError as err: 65 | print_color(err, "WARNING") 66 | sys.exit(1) 67 | 68 | if not os.path.exists("{}/pages".format(default_path)): 69 | os.makedirs("{}/pages".format(default_path)) 70 | 71 | logger.Logger.create_clone_logger(log_file, __package__) 72 | print_color(" Logs will be stored in {}".format(log_file), "INFO", end="") 73 | start = datetime.now() 74 | try: 75 | cloner = Cloner( 76 | args.target, int(args.max_depth), args.css_validate, default_path 77 | ) 78 | loop.run_until_complete(cloner.get_root_host()) 79 | loop.run_until_complete(cloner.run()) 80 | end = datetime.now() - start 81 | except KeyboardInterrupt: 82 | end = datetime.now() - start 83 | finally: 84 | print("") 85 | end = datetime.now() - start 86 | print_color("-" * 36 + ">SUMMARY<" + "-" * 36, "INFO") 87 | print_color( 88 | "\tTotal number of URLs cloned: {}".format(str(cloner.counter)), "INFO" 89 | ) 90 | print_color("\tTime elapsed: {}".format(str(end)), "INFO") 91 | print_color("\tCloned directory: {}".format(cloner.target_path), "INFO") 92 | print_color("-" * 82, "INFO") 93 | 94 | 95 | if __name__ == "__main__": 96 | print( 97 | """ 98 | ______ __ ______ _ ____________ 99 | / ____// / / __ // | / / ____/ __ \\ 100 | / / / / / / / // |/ / __/ / /_/ / 101 | / /___ / /____ / /_/ // /| / /___/ _, _/ 102 | /_____//______//_____//_/ |_/_____/_/ |_| 103 | 104 | """ 105 | ) 106 | main() 107 | -------------------------------------------------------------------------------- /bin/snare: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | """ 4 | Copyright (C) 2015-2021 MushMush Foundation 5 | This program is free software: you can redistribute it and/or modify 6 | it under the terms of the GNU General Public License as published by 7 | the Free Software Foundation, either version 3 of the License, or 8 | (at your option) any later version. 9 | This program is distributed in the hope that it will be useful, 10 | but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | GNU General Public License for more details. 13 | """ 14 | import argparse 15 | import asyncio 16 | import configparser 17 | import grp 18 | import json 19 | import multiprocessing 20 | import os 21 | import pwd 22 | import sys 23 | import time 24 | import uuid 25 | import aiohttp 26 | import pip 27 | import git 28 | from concurrent.futures import ProcessPoolExecutor 29 | 30 | from snare.server import HttpRequestHandler 31 | from snare.utils import snare_helpers 32 | from snare.utils.logger import Logger 33 | from snare.utils.snare_helpers import check_privileges, check_meta_file, print_color, str_to_bool 34 | 35 | 36 | def create_initial_config(base_path): 37 | cfg = configparser.ConfigParser() 38 | cfg['WEB-TOOLS'] = dict(google='', bing='') 39 | with open(os.path.join(base_path, 'snare.cfg'), 'w') as configfile: 40 | cfg.write(configfile) 41 | 42 | 43 | def snare_setup(base_path): 44 | try: 45 | # Create folders 46 | check_privileges(base_path) 47 | except PermissionError as err: 48 | print_color(err, 'WARNING') 49 | sys.exit(1) 50 | 51 | if not os.path.exists(os.path.join(base_path, 'pages')): 52 | os.makedirs(os.path.join(base_path, 'pages')) 53 | # Write pid to pid file 54 | with open(os.path.join(base_path, 'snare.pid'), 'wb') as pid_fh: 55 | pid_fh.write(str(os.getpid()).encode('utf-8')) 56 | # Config file 57 | if not os.path.exists(os.path.join(base_path, 'snare.cfg')): 58 | create_initial_config(base_path) 59 | # Read or create the sensor id 60 | uuid_file_path = os.path.join(base_path, 'snare.uuid') 61 | if os.path.exists(uuid_file_path): 62 | with open(uuid_file_path, 'rb') as uuid_fh: 63 | snare_uuid = uuid_fh.read() 64 | return snare_uuid 65 | else: 66 | with open(uuid_file_path, 'wb') as uuid_fh: 67 | snare_uuid = str(uuid.uuid4()).encode('utf-8') 68 | uuid_fh.write(snare_uuid) 69 | return snare_uuid 70 | 71 | 72 | def drop_privileges(): 73 | uid_name = 'nobody' 74 | wanted_user = pwd.getpwnam(uid_name) 75 | gid_name = grp.getgrgid(wanted_user.pw_gid).gr_name 76 | wanted_group = grp.getgrnam(gid_name) 77 | os.setgid(wanted_group.gr_gid) 78 | os.setuid(wanted_user.pw_uid) 79 | new_user = pwd.getpwuid(os.getuid()) 80 | new_group = grp.getgrgid(os.getgid()) 81 | print_color('privileges dropped, running as "{}:{}"'.format(new_user.pw_name, new_group.gr_name), 'INFO') 82 | 83 | 84 | def compare_version_info(timeout): 85 | while True: 86 | repo = git.Repo(os.getcwd()) 87 | try: 88 | rem = repo.remote() 89 | res = rem.fetch() 90 | diff_list = res[0].commit.diff(repo.heads.master) 91 | except TimeoutError: 92 | print_color('timeout fetching the repository version', 'ERROR') 93 | else: 94 | if diff_list: 95 | print_color('you are running an outdated version, SNARE will be updated and restarted', 'INFO') 96 | repo.git.reset('--hard') 97 | repo.heads.master.checkout() 98 | repo.git.clean('-xdf') 99 | repo.remotes.origin.pull() 100 | pip.main(['install', '-r', 'requirements.txt']) 101 | os.execv(sys.executable, [sys.executable, __file__] + sys.argv[1:]) 102 | return 103 | else: 104 | print_color('you are running the latest version', 'INFO') 105 | time.sleep(timeout) 106 | 107 | 108 | async def check_tanner(): 109 | vm = snare_helpers.VersionManager() 110 | async with aiohttp.ClientSession() as client: 111 | req_url = 'http://{}:8090/version'.format(args.tanner) 112 | try: 113 | resp = await client.get(req_url) 114 | result = await resp.json() 115 | version = result["version"] 116 | vm.check_compatibility(version) 117 | except aiohttp.ClientOSError: 118 | print_color("Can't connect to tanner host {}".format(req_url), 'ERROR') 119 | exit(1) 120 | else: 121 | await resp.release() 122 | 123 | if __name__ == '__main__': 124 | print(r""" 125 | _____ _ _____ ____ ______ 126 | / ___// | / / | / __ \/ ____/ 127 | \__ \/ |/ / /| | / /_/ / __/ 128 | ___/ / /| / ___ |/ _, _/ /___ 129 | /____/_/ |_/_/ |_/_/ |_/_____/ 130 | """) 131 | parser = argparse.ArgumentParser() 132 | page_group = parser.add_mutually_exclusive_group(required=True) 133 | page_group.add_argument("--page-dir", help="name of the folder to be served") 134 | page_group.add_argument("--list-pages", help="list available pages", action='store_true') 135 | parser.add_argument("--index-page", help="file name of the index page", default='index.html') 136 | parser.add_argument("--port", type=int, help="port to listen on", default='8080') 137 | parser.add_argument("--host-ip", help="host ip to bind to", default='127.0.0.1') 138 | parser.add_argument("--debug", help="run web server in debug mode", default=False) 139 | parser.add_argument("--tanner", help="ip of the tanner service", default='tanner.mushmush.org') 140 | parser.add_argument("--skip-check-version", help="skip check for update", action='store_true') 141 | parser.add_argument("--slurp-enabled", help="enable nsq logging", action='store_true') 142 | parser.add_argument("--slurp-host", help="nsq logging host", default='slurp.mushmush.org') 143 | parser.add_argument("--slurp-auth", help="nsq logging auth", default='slurp') 144 | parser.add_argument("--config", help="snare config file", default='snare.cfg') 145 | parser.add_argument("--auto-update", help="auto update SNARE if new version available ", default=True) 146 | parser.add_argument("--update-timeout", help="update snare every timeout ", default='24H') 147 | parser.add_argument("--server-header", help="set server-header", default=None) 148 | parser.add_argument("--no-dorks", help="disable the use of dorks", type=str_to_bool, default=True) 149 | parser.add_argument("--path", help="path to save the page to be cloned", required=False, default='/opt/') 150 | 151 | args = parser.parse_args() 152 | base_path = os.path.join(args.path, 'snare') 153 | if(not os.path.isabs(args.page_dir)): 154 | base_page_path = os.path.join(base_path, 'pages') 155 | full_page_path = os.path.join(base_page_path, args.page_dir) 156 | else: 157 | base_page_path = os.path.dirname(args.page_dir) 158 | full_page_path = args.page_dir 159 | snare_uuid = snare_setup(base_path) 160 | config = configparser.ConfigParser() 161 | config.read(os.path.join(base_path, args.config)) 162 | log_debug = os.path.join(base_path, 'snare.log') 163 | log_err = os.path.join(base_path, 'snare.err') 164 | Logger.create_logger(log_debug, log_err, __package__) 165 | if args.list_pages: 166 | print_color('Available pages:\n', 'INFO') 167 | for page in os.listdir(base_page_path): 168 | print_color('\t- {}'.format(page), 'INFO') 169 | print_color('\nuse with --page-dir {page_name}\n\n', 'INFO') 170 | exit() 171 | args_dict = vars(args) 172 | args_dict['full_page_path'] = os.path.realpath(full_page_path) 173 | if not os.path.exists(full_page_path): 174 | print_color("--page-dir: {0} does not exist".format(args.page_dir), 'ERROR') 175 | exit() 176 | args.index_page = os.path.join("/", args.index_page) 177 | 178 | if not os.path.exists(os.path.join(full_page_path, 'meta.json')): 179 | conv = snare_helpers.Converter() 180 | conv.convert(full_page_path) 181 | print_color("pages were converted. Try to clone again for the better result.", 'WARNING') 182 | 183 | with open(os.path.join(full_page_path, 'meta.json')) as meta: 184 | meta_info = json.load(meta) 185 | 186 | if not check_meta_file(meta_info): 187 | print_color("Error found in meta.json. Please clone the pages again.", "ERROR") 188 | exit() 189 | 190 | if not os.path.exists(os.path.join(full_page_path, os.path.join(meta_info[args.index_page]['hash']))): 191 | print_color('can\'t create meta tag', 'WARNING') 192 | else: 193 | snare_helpers.add_meta_tag(args.page_dir, meta_info[args.index_page]['hash'], config, base_path) 194 | loop = asyncio.get_event_loop() 195 | loop.run_until_complete(check_tanner()) 196 | 197 | pool = ProcessPoolExecutor(max_workers=multiprocessing.cpu_count()) 198 | compare_version_fut = None 199 | if args.auto_update is True: 200 | timeout = snare_helpers.parse_timeout(args.update_timeout) 201 | compare_version_fut = loop.run_in_executor(pool, compare_version_info, timeout) 202 | 203 | app = HttpRequestHandler(meta_info, args, snare_uuid, debug=args.debug, keep_alive=75) 204 | 205 | print_color('serving with uuid {0}'.format(snare_uuid.decode('utf-8')), 'INFO') 206 | print_color("Debug logs will be stored in {}".format(log_debug), 'INFO') 207 | print_color("Error logs will be stored in {}".format(log_err), 'INFO') 208 | loop = asyncio.get_event_loop() 209 | try: 210 | loop.run_until_complete(app.start()) 211 | if os.getuid() == 0: 212 | drop_privileges() 213 | loop.run_forever() 214 | except (KeyboardInterrupt, TypeError) as e: 215 | loop.run_until_complete(app.stop()) 216 | print_color(e, 'ERROR') 217 | finally: 218 | if compare_version_fut: 219 | compare_version_fut.cancel() 220 | loop.close() 221 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2.3' 2 | 3 | networks: 4 | snare_local: 5 | 6 | services: 7 | 8 | # Snare service 9 | snare: 10 | build: . 11 | container_name: snare 12 | restart: always 13 | stop_signal: SIGKILL 14 | tty: true 15 | networks: 16 | - snare_local 17 | ports: 18 | - "80:80" 19 | image: "mushorg/snare:latest" 20 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = . 8 | BUILDDIR = _build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/cloner.md: -------------------------------------------------------------------------------- 1 | # Cloner 2 | 3 | Cloner clones the website that we require to be served by snare. 4 | 5 | ## Cloner command line parameters 6 | 7 | clone [`--target` *website\_url* ] [`--max-depth` *clone\_depth*] [`--log-path` *LOG\_PATH*] [`--css-validate` *CSS\_VALIDATE*] [`--path` *PATH*] 8 | 9 | ## Parameter Description 10 | 11 | - `--target` URL of website to be cloned 12 | - `--max--depth` Maximum depth of the web-pages desired to be cloned (optional), default: full depth of the site 13 | - `--log-path` Path of the log file (optional) 14 | - `--css-validate` Set wheather css validation is required (optional) 15 | - `--path` Path to save the page to be cloned (optional) 16 | 17 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = "SNARE" 23 | copyright = "2018, mushmush" 24 | author = "mushmush" 25 | 26 | # The short X.Y version 27 | version = "" 28 | # The full version, including alpha/beta/rc tags 29 | release = "v0.3.0" 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = ['myst_parser'] 42 | 43 | # Add any paths that contain templates here, relative to this directory. 44 | templates_path = ["_templates"] 45 | 46 | # The suffix(es) of source filenames. 47 | # You can specify multiple suffix as a list of string: 48 | # 49 | # source_suffix = ['.rst', '.md'] 50 | source_suffix = [".rst", ".md"] 51 | 52 | # The master toctree document. 53 | master_doc = "index" 54 | 55 | # The language for content autogenerated by Sphinx. Refer to documentation 56 | # for a list of supported languages. 57 | # 58 | # This is also used if you do content translation via gettext catalogs. 59 | # Usually you set "language" from the command line for these cases. 60 | language = None 61 | 62 | # List of patterns, relative to source directory, that match files and 63 | # directories to ignore when looking for source files. 64 | # This pattern also affects html_static_path and html_extra_path. 65 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 66 | 67 | # The name of the Pygments (syntax highlighting) style to use. 68 | pygments_style = None 69 | 70 | 71 | # -- Options for HTML output ------------------------------------------------- 72 | 73 | # The theme to use for HTML and HTML Help pages. See the documentation for 74 | # a list of builtin themes. 75 | # 76 | html_theme = "alabaster" 77 | 78 | # Theme options are theme-specific and customize the look and feel of a theme 79 | # further. For a list of options available for each theme, see the 80 | # documentation. 81 | # 82 | # html_theme_options = {} 83 | 84 | # Add any paths that contain custom static files (such as style sheets) here, 85 | # relative to this directory. They are copied after the builtin static files, 86 | # so a file named "default.css" will overwrite the builtin "default.css". 87 | html_static_path = ["_static"] 88 | 89 | # Custom sidebar templates, must be a dictionary that maps document names 90 | # to template names. 91 | # 92 | # The default sidebars (for documents that don't match any pattern) are 93 | # defined by theme itself. Builtin themes are using these templates by 94 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 95 | # 'searchbox.html']``. 96 | # 97 | # html_sidebars = {} 98 | 99 | 100 | # -- Options for HTMLHelp output --------------------------------------------- 101 | 102 | # Output file base name for HTML help builder. 103 | htmlhelp_basename = "SNAREdoc" 104 | 105 | 106 | # -- Options for LaTeX output ------------------------------------------------ 107 | 108 | latex_elements = { 109 | # The paper size ('letterpaper' or 'a4paper'). 110 | # 111 | # 'papersize': 'letterpaper', 112 | # The font size ('10pt', '11pt' or '12pt'). 113 | # 114 | # 'pointsize': '10pt', 115 | # Additional stuff for the LaTeX preamble. 116 | # 117 | # 'preamble': '', 118 | # Latex figure (float) alignment 119 | # 120 | # 'figure_align': 'htbp', 121 | } 122 | 123 | # Grouping the document tree into LaTeX files. List of tuples 124 | # (source start file, target name, title, 125 | # author, documentclass [howto, manual, or own class]). 126 | latex_documents = [ 127 | (master_doc, "SNARE.tex", "SNARE Documentation", "mushmush", "manual"), 128 | ] 129 | 130 | 131 | # -- Options for manual page output ------------------------------------------ 132 | 133 | # One entry per manual page. List of tuples 134 | # (source start file, name, description, authors, manual section). 135 | man_pages = [(master_doc, "snare", "SNARE Documentation", [author], 1)] 136 | 137 | 138 | # -- Options for Texinfo output ---------------------------------------------- 139 | 140 | # Grouping the document tree into Texinfo files. List of tuples 141 | # (source start file, target name, title, author, 142 | # dir menu entry, description, category) 143 | texinfo_documents = [ 144 | ( 145 | master_doc, 146 | "SNARE", 147 | "SNARE Documentation", 148 | author, 149 | "SNARE", 150 | "One line description of project.", 151 | "Miscellaneous", 152 | ), 153 | ] 154 | 155 | 156 | # -- Options for Epub output ------------------------------------------------- 157 | 158 | # Bibliographic Dublin Core info. 159 | epub_title = project 160 | 161 | # The unique identifier of the text. This can be a ISBN number 162 | # or the project homepage. 163 | # 164 | # epub_identifier = '' 165 | 166 | # A unique identification for the text. 167 | # 168 | # epub_uid = '' 169 | 170 | # A list of files that should not be packed into the epub file. 171 | epub_exclude_files = ["search.html"] 172 | -------------------------------------------------------------------------------- /docs/faq.md: -------------------------------------------------------------------------------- 1 | # Frequently Asked Questions 2 | 3 | ### White Pages 4 | 5 | Once the cloning is done and you try to serve the pages via snare, you might get all the white pages and nothing else. 6 | 7 | In this scenario, there are two things to make sure of: 8 | 9 | 1. You are using the latest code from the Snare repository. 10 | - For extra measure, make sure to compare the versions in the requirement.txt on the remote main branch(https://github.com/mushorg/snare/tree/main) and the code you have. 11 | 12 | 2. Make sure you have the tanner running properly. 13 | - If you are using the public tanner instance i.e. tanner.mushmush.org:8090, make sure it is responding. 14 | - If any Tanner instance is running properly, once you visit the Tanner URL in your browser you should see `Tanner Server` on the page. 15 | - In most of the scenarios when Tanner isn't working properly snare might serve white pages instead of throwing any error. 16 | 17 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. SNARE documentation master file, created by 2 | sphinx-quickstart on Sat Dec 15 20:15:02 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | SNARE 7 | ===== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | quick-start 14 | parameters 15 | cloner 16 | faq 17 | 18 | 19 | Indices and tables 20 | ================== 21 | 22 | * :ref:`genindex` 23 | * :ref:`modindex` 24 | * :ref:`search` 25 | -------------------------------------------------------------------------------- /docs/parameters.md: -------------------------------------------------------------------------------- 1 | # Commandline 2 | 3 | snare [`--page-dir` *folder* ] [`--list-pages`] [`--host-ip`] [`--index-page` *filename*] [`--port` *port*] [`--interface` *ip\_addr*] [`--debug` ] [`--tanner` *tanner\_ip*] [`--skip-check-version`] [`--slurp-enabled`] [`--slurp-host` *host\_ip*] [`--slurp-auth`] [`--config` *filename*] [`--auto-update`] [`--update-timeout` *timeout*] 4 | 5 | ## Parameter Description 6 | 7 | - `--page--dir` name of the folder to be served 8 | - `--list--pages` list available pages 9 | - `--host--ip` host ip to bind to, default: localhost 10 | - `--index--page` file name of the index page, default: index.html 11 | - `--port` port to listen on, default: 8080 12 | - `--interface` interface to bind to 13 | - `--debug` run web server in debug mode, default: False 14 | - `--tanner` ip of the tanner service, default: tanner.mushmush.org 15 | - `--skip--check-version` skip check for update 16 | - `--slurp--enabled` enable nsq logging 17 | - `--slurp--host` nsq logging host, default: slurp.mushmush.org 18 | - `--slurp--auth` nsq logging auth, default: slurp 19 | - `--config` -- snare config file, default: snare.cfg 20 | - `--auto--update` -- auto update SNARE if new version available, default: True 21 | - `--update--timeout` update SNARE every timeout (possible labels are: **D** -- day, **H** -- hours, **M** -- minutes), default: 24H 22 | - `--server--header` set server header, default: nginx 23 | -------------------------------------------------------------------------------- /docs/quick-start.md: -------------------------------------------------------------------------------- 1 | # Quick Start 2 | 3 | SNARE is a web application honeypot and is the successor of [Glastopf](https://github.com/mushorg/glastopf), which has many of the same features as [Glastopf](https://github.com/mushorg/glastopf) as well as ability to convert existing Web pages into attack surfaces with [TANNER](https://github.com/mushorg/tanner). Every event sent from SNARE to [TANNER](https://github.com/mushorg/tanner) is evaluated, and [TANNER](https://github.com/mushorg/tanner) decides how SNARE should respond to the client. This allows the honeypot to produce dynamic responses which improves its camouflage. SNARE when fingerprinted by attackers shows that it is a Nginx Web application server. 4 | 5 | ## Basic Concepts 6 | 7 | - Surface first. Focus on the attack surface generation. Clone with `Cloner`. 8 | - Sensors and masters. Lightweight collectors (SNARE) and central decision maker (tanner). 9 | 10 | ## Getting started 11 | 12 | > You need Python3. We tested primarily with \>=3.6 13 | > This was tested with a recent Ubuntu based Linux. 14 | 15 | ### Steps to setup 16 | 17 | 1. Get SNARE: `git clone https://github.com/mushorg/snare.git` and `cd snare` 18 | 2. [Optional] Make virtual environment: `python3 -m venv venv` 19 | 3. [Optional] Activate virtual environment: `. venv/bin/activate` 20 | 21 | > Do not use sudo with below commands if you're running snare in virtual environment. 22 | 23 | 1. Install requirements: `sudo pip3 install -r requirements.txt` 24 | 2. Setup snare: `sudo python3 setup.py install` 25 | 3. Clone a page: `sudo clone --target http://example.com --path ` 26 | 4. Run SNARE: `sudo snare --port 8080 --page-dir example.com --path ` (See parameters description for more info) 27 | 5. Test: Visit 28 | 6. (Optionally) Have your own [tanner](https://github.com/mushorg/tanner) service running. 29 | 30 | > Cloner clones the whole website, to restrict to a desired depth of cloning add `--max-depth` parameter 31 | 32 | You obviously want to bind to 0.0.0.0 and port 80 when running in *production*. 33 | 34 | ## Docker build instructions 35 | 36 | 1. Change current directory to `snare` project directory 37 | 2. `docker-compose build` 38 | 3. `docker-compose up` 39 | 40 | More information about running `docker-compose` can be found [here](https://docs.docker.com/compose/gettingstarted/). 41 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | build: 2 | image: latest 3 | 4 | python: 5 | version: 3.6 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.7.4 2 | aiohttp_jinja2==1.5.0 3 | beautifulsoup4==4.6.3 4 | cssutils==1.0.2 5 | gitpython==3.1.30 6 | pycodestyle==2.4.0 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from setuptools import find_packages, setup 3 | 4 | 5 | setup( 6 | name="Snare", 7 | version="0.3.0", 8 | description="Super Next generation Advanced Reactive honEypot", 9 | author="MushMush Foundation", 10 | author_email="glastopf@public.honeynet.org", 11 | url="https://github.com/mushorg/snare", 12 | packages=find_packages(exclude=["*.pyc"]), 13 | scripts=["./bin/snare", "./bin/clone"], 14 | ) 15 | -------------------------------------------------------------------------------- /snare/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mushorg/snare/b17fdfe7c2ba3ac540548763d73fc475cfc185c4/snare/__init__.py -------------------------------------------------------------------------------- /snare/cloner.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import logging 4 | import asyncio 5 | import hashlib 6 | import json 7 | import re 8 | import aiohttp 9 | import cssutils 10 | import yarl 11 | from bs4 import BeautifulSoup 12 | from asyncio import Queue 13 | from collections import defaultdict 14 | 15 | animation = "|/-\\" 16 | 17 | 18 | class Cloner(object): 19 | def __init__(self, root, max_depth, css_validate, default_path="/opt/snare"): 20 | self.logger = logging.getLogger(__name__) 21 | self.logger.setLevel(logging.DEBUG) 22 | self.visited_urls = [] 23 | self.root, self.error_page = self.add_scheme(root) 24 | self.max_depth = max_depth 25 | self.moved_root = None 26 | self.default_path = default_path 27 | if (self.root.host is None) or (len(self.root.host) < 4): 28 | sys.exit("invalid target {}".format(self.root.host)) 29 | self.target_path = "{}/pages/{}".format(self.default_path, self.root.host) 30 | 31 | if not os.path.exists(self.target_path): 32 | os.makedirs(self.target_path) 33 | self.css_validate = css_validate 34 | self.new_urls = Queue() 35 | self.meta = defaultdict(dict) 36 | 37 | self.counter = 0 38 | self.itr = 0 39 | 40 | @staticmethod 41 | def add_scheme(url): 42 | new_url = yarl.URL(url) 43 | if not new_url.scheme: 44 | new_url = yarl.URL("http://" + url) 45 | err_url = new_url.with_path("/status_404").with_query(None).with_fragment(None) 46 | return new_url, err_url 47 | 48 | @staticmethod 49 | def get_headers(response): 50 | ignored_headers_lowercase = [ 51 | "age", 52 | "cache-control", 53 | "connection", 54 | "content-encoding", 55 | "content-length", 56 | "date", 57 | "etag", 58 | "expires", 59 | "x-cache", 60 | ] 61 | 62 | headers = [] 63 | for key, value in response.headers.items(): 64 | if key.lower() not in ignored_headers_lowercase: 65 | headers.append({key: value}) 66 | return headers 67 | 68 | async def process_link(self, url, level, check_host=False): 69 | try: 70 | url = yarl.URL(url) 71 | except UnicodeError: 72 | return None 73 | if url.scheme in ["data", "javascript", "file"]: 74 | return url.human_repr() 75 | if not url.is_absolute(): 76 | if self.moved_root is None: 77 | url = self.root.join(url) 78 | else: 79 | url = self.moved_root.join(url) 80 | 81 | host = url.host 82 | 83 | if check_host: 84 | if ( 85 | (host != self.root.host and self.moved_root is None) 86 | or url.fragment 87 | or (self.moved_root is not None and host != self.moved_root.host) 88 | ): 89 | return None 90 | if url.human_repr() not in self.visited_urls and (level + 1) <= self.max_depth: 91 | await self.new_urls.put((url, level + 1)) 92 | 93 | res = None 94 | try: 95 | res = url.relative().human_repr() 96 | except ValueError: 97 | self.logger.error("ValueError while processing the %s link", url) 98 | return res 99 | 100 | async def replace_links(self, data, level): 101 | soup = BeautifulSoup(data, "html.parser") 102 | 103 | # find all relative links 104 | for link in soup.findAll(href=True): 105 | res = await self.process_link(link["href"], level, check_host=True) 106 | if res is not None: 107 | link["href"] = res 108 | 109 | # find all images and scripts 110 | for elem in soup.findAll(src=True): 111 | res = await self.process_link(elem["src"], level) 112 | if res is not None: 113 | elem["src"] = res 114 | 115 | # find all action elements 116 | for act_link in soup.findAll(action=True): 117 | res = await self.process_link(act_link["action"], level) 118 | if res is not None: 119 | act_link["action"] = res 120 | 121 | # prevent redirects 122 | for redir in soup.findAll(True, attrs={"name": re.compile("redirect.*")}): 123 | if redir["value"] != "": 124 | redir["value"] = yarl.URL(redir["value"]).relative().human_repr() 125 | 126 | return soup 127 | 128 | def _make_filename(self, url): 129 | host = url.host 130 | if url.is_absolute(): 131 | file_name = url.relative().human_repr() 132 | else: 133 | file_name = url.human_repr() 134 | if not file_name.startswith("/"): 135 | file_name = "/" + file_name 136 | 137 | if file_name == "/" or file_name == "": 138 | if host == self.root.host or self.moved_root is not None and self.moved_root.host == host: 139 | file_name = "/index.html" 140 | else: 141 | file_name = host 142 | m = hashlib.md5() 143 | m.update(file_name.encode("utf-8")) 144 | hash_name = m.hexdigest() 145 | return file_name, hash_name 146 | 147 | async def get_body(self, session): 148 | while not self.new_urls.empty(): 149 | print(animation[self.itr % len(animation)], end="\r") 150 | self.itr = self.itr + 1 151 | current_url, level = await self.new_urls.get() 152 | if current_url.human_repr() in self.visited_urls: 153 | continue 154 | self.visited_urls.append(current_url.human_repr()) 155 | file_name, hash_name = self._make_filename(current_url) 156 | self.logger.debug("Cloned file: %s", file_name) 157 | data = None 158 | content_type = None 159 | try: 160 | response = await session.get(current_url, headers={"Accept": "text/html"}, timeout=10.0) 161 | headers = self.get_headers(response) 162 | content_type = response.content_type 163 | data = await response.read() 164 | except (aiohttp.ClientError, asyncio.TimeoutError) as client_error: 165 | self.logger.error(client_error) 166 | else: 167 | await response.release() 168 | 169 | if data is not None: 170 | self.meta[file_name]["hash"] = hash_name 171 | self.meta[file_name]["headers"] = headers 172 | self.counter = self.counter + 1 173 | 174 | if content_type == "text/html": 175 | soup = await self.replace_links(data, level) 176 | data = str(soup).encode() 177 | elif content_type == "text/css": 178 | css = cssutils.parseString(data, validate=self.css_validate) 179 | for carved_url in cssutils.getUrls(css): 180 | if carved_url.startswith("data"): 181 | continue 182 | carved_url = yarl.URL(carved_url) 183 | if not carved_url.is_absolute(): 184 | carved_url = self.root.join(carved_url) 185 | if carved_url.human_repr() not in self.visited_urls: 186 | await self.new_urls.put((carved_url, level + 1)) 187 | 188 | with open(os.path.join(self.target_path, hash_name), "wb") as index_fh: 189 | index_fh.write(data) 190 | 191 | async def get_root_host(self): 192 | try: 193 | async with aiohttp.ClientSession() as session: 194 | resp = await session.get(self.root) 195 | if resp.host != self.root.host: 196 | self.moved_root = resp.url 197 | resp.close() 198 | except aiohttp.ClientError as err: 199 | self.logger.error("Can't connect to target host: %s", err) 200 | exit(-1) 201 | 202 | async def run(self): 203 | session = aiohttp.ClientSession() 204 | try: 205 | await self.new_urls.put((self.root, 0)) 206 | await self.new_urls.put((self.error_page, 0)) 207 | await self.get_body(session) 208 | except KeyboardInterrupt: 209 | raise 210 | finally: 211 | with open(os.path.join(self.target_path, "meta.json"), "w") as mj: 212 | json.dump(self.meta, mj) 213 | await session.close() 214 | -------------------------------------------------------------------------------- /snare/html_handler.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import logging 4 | import cssutils 5 | import aiohttp 6 | from bs4 import BeautifulSoup 7 | 8 | 9 | class HtmlHandler: 10 | def __init__(self, no_dorks, tanner): 11 | self.no_dorks = no_dorks 12 | self.dorks = [] 13 | self.logger = logging.getLogger(__name__) 14 | self.tanner = tanner 15 | 16 | async def get_dorks(self): 17 | dorks = None 18 | try: 19 | async with aiohttp.ClientSession() as session: 20 | r = await session.get("http://{0}:8090/dorks".format(self.tanner), timeout=10.0) 21 | try: 22 | dorks = await r.json() 23 | except json.decoder.JSONDecodeError as e: 24 | self.logger.error("Error getting dorks: %s", e) 25 | finally: 26 | await r.release() 27 | except asyncio.TimeoutError as error: 28 | self.logger.error("Dorks timeout error: %s", error) 29 | return dorks["response"]["dorks"] if dorks else [] 30 | 31 | async def handle_content(self, content): 32 | soup = BeautifulSoup(content, "html.parser") 33 | if self.no_dorks is not True: 34 | for p_elem in soup.find_all("p"): 35 | if p_elem.findChildren(): 36 | continue 37 | css = None 38 | if "style" in p_elem.attrs: 39 | css = cssutils.parseStyle(p_elem.attrs["style"]) 40 | text_list = p_elem.text.split() 41 | p_new = soup.new_tag("p", style=css.cssText if css else None) 42 | for idx, word in enumerate(text_list): 43 | # Fetch dorks if required 44 | if len(self.dorks) <= 0: 45 | self.dorks = await self.get_dorks() 46 | word += " " 47 | if idx % 5 == 0: 48 | a_tag = soup.new_tag( 49 | "a", 50 | href=self.dorks.pop(), 51 | style="color:{color};text-decoration:none;cursor:text;".format( 52 | color=css.color if css and "color" in css.keys() else "#000000" 53 | ), 54 | ) 55 | a_tag.string = word 56 | p_new.append(a_tag) 57 | else: 58 | p_new.append(soup.new_string(word)) 59 | p_elem.replace_with(p_new) 60 | content = soup.encode("utf-8") 61 | return content 62 | -------------------------------------------------------------------------------- /snare/middlewares.py: -------------------------------------------------------------------------------- 1 | import aiohttp_jinja2 2 | import multidict 3 | from aiohttp import web 4 | 5 | 6 | class SnareMiddleware: 7 | def __init__(self, error_404, error_500=None, headers=[], server_header=""): 8 | self.error_404 = error_404 9 | self.error_500 = error_500 if error_500 else "500.html" 10 | 11 | self.headers = multidict.CIMultiDict() 12 | for header in headers: 13 | for key, value in header.items(): 14 | self.headers.add(key, value) 15 | 16 | if server_header: 17 | self.headers["Server"] = server_header 18 | 19 | async def handle_404(self, request): 20 | return aiohttp_jinja2.render_template(self.error_404, request, {}) 21 | 22 | async def handle_500(self, request): 23 | return aiohttp_jinja2.render_template(self.error_500, request, {}) 24 | 25 | def create_error_middleware(self, overrides): 26 | @web.middleware 27 | async def error_middleware(request, handler): 28 | try: 29 | response = await handler(request) 30 | status = response.status 31 | override = overrides.get(status) 32 | if override: 33 | response = await override(request) 34 | response.headers.update(self.headers) 35 | response.set_status(status) 36 | return response 37 | return response 38 | except web.HTTPException as ex: 39 | override = overrides.get(ex.status) 40 | if override: 41 | return await override(request) 42 | raise 43 | 44 | return error_middleware 45 | 46 | def setup_middlewares(self, app): 47 | error_middleware = self.create_error_middleware( 48 | { 49 | 404: self.handle_404, 50 | 500: self.handle_500, 51 | } 52 | ) 53 | app.middlewares.append(error_middleware) 54 | -------------------------------------------------------------------------------- /snare/server.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import aiohttp 3 | import aiohttp_jinja2 4 | import jinja2 5 | 6 | from aiohttp import web 7 | from aiohttp.web import StaticResource as StaticRoute 8 | 9 | from snare.middlewares import SnareMiddleware 10 | from snare.tanner_handler import TannerHandler 11 | 12 | 13 | class HttpRequestHandler: 14 | def __init__(self, meta, run_args, snare_uuid, debug=False, keep_alive=75, **kwargs): 15 | self.run_args = run_args 16 | self.dir = run_args.full_page_path 17 | self.meta = meta 18 | self.snare_uuid = snare_uuid 19 | self.logger = logging.getLogger(__name__) 20 | self.sroute = StaticRoute(name=None, prefix="/", directory=self.dir) 21 | self.tanner_handler = TannerHandler(run_args, meta, snare_uuid) 22 | 23 | async def submit_slurp(self, data): 24 | try: 25 | async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session: 26 | r = await session.post( 27 | "https://{0}:8080/api?auth={1}&chan=snare_test&msg={2}".format( 28 | self.run_args.slurp_host, self.run_args.slurp_auth, data 29 | ), 30 | json=data, 31 | timeout=10.0, 32 | ) 33 | assert r.status == 200 34 | r.close() 35 | except Exception as e: 36 | self.logger.error("Error submitting slurp: %s", e) 37 | 38 | async def handle_request(self, request): 39 | self.logger.info("Request path: {0}".format(request.path_qs)) 40 | data = self.tanner_handler.create_data(request, 200) 41 | if request.method == "POST": 42 | post_data = await request.post() 43 | self.logger.info("POST data:") 44 | for key, val in post_data.items(): 45 | self.logger.info("\t- {0}: {1}".format(key, val)) 46 | data["post_data"] = dict(post_data) 47 | 48 | # Submit the event to the TANNER service 49 | event_result = await self.tanner_handler.submit_data(data) 50 | 51 | # Log the event to slurp service if enabled 52 | if self.run_args.slurp_enabled: 53 | await self.submit_slurp(request.path_qs) 54 | 55 | content, headers, status_code = await self.tanner_handler.parse_tanner_response( 56 | request.path_qs, event_result["response"]["message"]["detection"] 57 | ) 58 | 59 | if self.run_args.server_header: 60 | headers["Server"] = self.run_args.server_header 61 | 62 | if "cookies" in data and "sess_uuid" in data["cookies"]: 63 | previous_sess_uuid = data["cookies"]["sess_uuid"] 64 | else: 65 | previous_sess_uuid = None 66 | 67 | if event_result is not None and "sess_uuid" in event_result["response"]["message"]: 68 | cur_sess_id = event_result["response"]["message"]["sess_uuid"] 69 | if previous_sess_uuid is None or not previous_sess_uuid.strip() or previous_sess_uuid != cur_sess_id: 70 | headers.add("Set-Cookie", "sess_uuid=" + cur_sess_id) 71 | 72 | return web.Response(body=content, status=status_code, headers=headers) 73 | 74 | async def start(self): 75 | app = web.Application() 76 | app.add_routes([web.route("*", "/{tail:.*}", self.handle_request)]) 77 | aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader(self.dir)) 78 | middleware = SnareMiddleware( 79 | error_404=self.meta["/status_404"].get("hash"), 80 | headers=self.meta["/status_404"].get("headers", []), 81 | server_header=self.run_args.server_header, 82 | ) 83 | middleware.setup_middlewares(app) 84 | 85 | self.runner = web.AppRunner(app) 86 | await self.runner.setup() 87 | site = web.TCPSite(self.runner, self.run_args.host_ip, self.run_args.port) 88 | 89 | await site.start() 90 | names = sorted(str(s.name) for s in self.runner.sites) 91 | print("======== Running on {} ========\n" "(Press CTRL+C to quit)".format(", ".join(names))) 92 | 93 | async def stop(self): 94 | await self.runner.cleanup() 95 | -------------------------------------------------------------------------------- /snare/tanner_handler.py: -------------------------------------------------------------------------------- 1 | import re 2 | import os 3 | import multidict 4 | import json 5 | import logging 6 | import aiohttp 7 | 8 | from urllib.parse import unquote 9 | from bs4 import BeautifulSoup 10 | from snare.html_handler import HtmlHandler 11 | 12 | 13 | class TannerHandler: 14 | def __init__(self, run_args, meta, snare_uuid): 15 | self.run_args = run_args 16 | self.meta = meta 17 | self.dir = run_args.full_page_path 18 | self.snare_uuid = snare_uuid 19 | self.html_handler = HtmlHandler(run_args.no_dorks, run_args.tanner) 20 | self.logger = logging.getLogger(__name__) 21 | 22 | def create_data(self, request, response_status): 23 | data = dict( 24 | method=None, 25 | path=None, 26 | headers=None, 27 | uuid=self.snare_uuid.decode("utf-8"), 28 | peer=None, 29 | status=response_status, 30 | ) 31 | if request.transport: 32 | peer = dict( 33 | ip=request.transport.get_extra_info("peername")[0], 34 | port=request.transport.get_extra_info("peername")[1], 35 | ) 36 | data["peer"] = peer 37 | if request.path: 38 | # FIXME request.headers is a CIMultiDict, so items with the same 39 | # key will be overwritten when converting to dictionary 40 | header = {key: value for (key, value) in request.headers.items()} 41 | data["method"] = request.method 42 | data["headers"] = header 43 | data["path"] = request.path_qs 44 | if "Cookie" in header: 45 | data["cookies"] = {cookie.split("=")[0]: cookie.split("=")[1] for cookie in header["Cookie"].split(";")} 46 | return data 47 | 48 | async def submit_data(self, data): 49 | event_result = None 50 | try: 51 | async with aiohttp.ClientSession() as session: 52 | r = await session.post( 53 | "http://{0}:8090/event".format(self.run_args.tanner), 54 | json=data, 55 | timeout=10.0, 56 | ) 57 | try: 58 | event_result = await r.json() 59 | except ( 60 | json.decoder.JSONDecodeError, 61 | aiohttp.client_exceptions.ContentTypeError, 62 | ) as e: 63 | self.logger.error("Error submitting data: {} {}".format(e, data)) 64 | event_result = { 65 | "version": "0.6.0", 66 | "response": { 67 | "message": { 68 | "detection": { 69 | "name": "index", 70 | "order": 1, 71 | "type": 1, 72 | "version": "0.6.0", 73 | }, 74 | "sess_uuid": data["uuid"], 75 | } 76 | }, 77 | } 78 | finally: 79 | await r.release() 80 | except Exception as e: 81 | self.logger.exception("Exception: %s", e) 82 | raise e 83 | return event_result 84 | 85 | async def parse_tanner_response(self, requested_name, detection): 86 | content = None 87 | status_code = 200 88 | headers = multidict.CIMultiDict() 89 | # Creating a regex object for the pattern of multiple contiguous forward slashes 90 | p = re.compile("/+") 91 | # Substituting all occurrences of the pattern with single forward slash 92 | requested_name = p.sub("/", requested_name) 93 | 94 | if detection["type"] == 1: 95 | possible_requests = [requested_name] 96 | query_start = requested_name.find("?") 97 | if query_start != -1: 98 | possible_requests.append(requested_name[:query_start]) 99 | 100 | file_name = None 101 | for requested_name in possible_requests: 102 | if requested_name == "/": 103 | requested_name = self.run_args.index_page 104 | if requested_name[-1] == "/": 105 | requested_name = requested_name[:-1] 106 | requested_name = unquote(requested_name) 107 | try: 108 | file_name = self.meta[requested_name]["hash"] 109 | for header in self.meta[requested_name].get("headers", []): 110 | for key, value in header.items(): 111 | headers.add(key, value) 112 | # overwrite headers with legacy content-type if present and not none 113 | content_type = self.meta[requested_name].get("content_type") 114 | if content_type: 115 | headers["Content-Type"] = content_type 116 | except KeyError: 117 | pass 118 | else: 119 | break 120 | 121 | if not file_name: 122 | status_code = 404 123 | else: 124 | path = os.path.join(self.dir, file_name) 125 | if os.path.isfile(path): 126 | with open(path, "rb") as fh: 127 | content = fh.read() 128 | if headers.get("Content-Type", "").startswith("text/html"): 129 | content = await self.html_handler.handle_content(content) 130 | 131 | elif detection["type"] == 2: 132 | payload_content = detection["payload"] 133 | if payload_content["page"]: 134 | try: 135 | file_name = self.meta[payload_content["page"]]["hash"] 136 | for header in self.meta[payload_content["page"]].get("headers", []): 137 | for key, value in header.items(): 138 | headers.add(key, value) 139 | # overwrite headers with legacy content-type if present and not none 140 | content_type = self.meta[payload_content["page"]].get("content_type") 141 | if content_type: 142 | headers["Content-Type"] = content_type 143 | page_path = os.path.join(self.dir, file_name) 144 | with open(page_path, encoding="utf-8") as p: 145 | content = p.read() 146 | except KeyError: 147 | content = "" 148 | headers["Content-Type"] = "text/html" 149 | 150 | soup = BeautifulSoup(content, "html.parser") 151 | script_tag = soup.new_tag("div") 152 | script_tag.append(BeautifulSoup(payload_content["value"], "html.parser")) 153 | soup.body.append(script_tag) 154 | content = str(soup).encode() 155 | else: 156 | content_type = "text/plain" 157 | if content_type: 158 | headers["Content-Type"] = content_type 159 | content = payload_content["value"].encode("utf-8") 160 | 161 | if "headers" in payload_content: 162 | # overwrite local headers with the tanner-provided ones 163 | headers.update(payload_content["headers"]) 164 | 165 | else: # type 3 166 | payload_content = detection["payload"] 167 | status_code = payload_content["status_code"] 168 | 169 | return content, headers, status_code 170 | -------------------------------------------------------------------------------- /snare/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mushorg/snare/b17fdfe7c2ba3ac540548763d73fc475cfc185c4/snare/tests/__init__.py -------------------------------------------------------------------------------- /snare/tests/test_cloner_add_scheme.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | import os 4 | import yarl 5 | import shutil 6 | from snare.cloner import Cloner 7 | from snare.utils.page_path_generator import generate_unique_path 8 | 9 | 10 | class TestCloner(unittest.TestCase): 11 | def setUp(self): 12 | self.url = "http://example.com" 13 | self.main_page_path = generate_unique_path() 14 | os.makedirs(self.main_page_path) 15 | self.expected_new_url = yarl.URL("http://example.com") 16 | self.expected_err_url = yarl.URL("http://example.com/status_404") 17 | self.max_depth = sys.maxsize 18 | self.css_validate = "false" 19 | self.handler = Cloner(self.url, self.max_depth, self.css_validate) 20 | 21 | def test_trailing_slash(self): 22 | self.url = "http://example.com/" 23 | new_url, err_url = self.handler.add_scheme(self.url) 24 | self.assertEqual(new_url, self.expected_new_url) 25 | self.assertEqual(err_url, self.expected_err_url) 26 | 27 | def test_add_scheme(self): 28 | new_url, err_url = self.handler.add_scheme(self.url) 29 | 30 | self.assertEqual(new_url, self.expected_new_url) 31 | self.assertEqual(err_url, self.expected_err_url) 32 | 33 | def test_no_scheme(self): 34 | self.url = "example.com" 35 | new_url, err_url = self.handler.add_scheme(self.url) 36 | self.assertEqual(new_url, self.expected_new_url) 37 | self.assertEqual(err_url, self.expected_err_url) 38 | 39 | def tearDown(self): 40 | shutil.rmtree(self.main_page_path) 41 | 42 | def test_no_host(self): 43 | self.url = "http:/" 44 | with self.assertRaises(SystemExit): 45 | Cloner(self.url, self.max_depth, self.css_validate) 46 | 47 | def test_limited_length_host(self): 48 | self.url = "http://aaa" 49 | with self.assertRaises(SystemExit): 50 | Cloner(self.url, self.max_depth, self.css_validate) 51 | -------------------------------------------------------------------------------- /snare/tests/test_cloner_get_body.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import aiohttp 3 | import yarl 4 | import sys 5 | import os 6 | import shutil 7 | import asyncio 8 | from snare.cloner import Cloner 9 | from snare.utils.asyncmock import AsyncMock 10 | from snare.utils.page_path_generator import generate_unique_path 11 | 12 | 13 | class TestGetBody(unittest.TestCase): 14 | def setUp(self): 15 | self.main_page_path = generate_unique_path() 16 | os.makedirs(self.main_page_path) 17 | self.root = "http://example.com" 18 | self.level = 0 19 | self.max_depth = sys.maxsize 20 | self.loop = asyncio.new_event_loop() 21 | self.css_validate = "false" 22 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 23 | self.target_path = "/opt/snare/pages/{}".format(yarl.URL(self.root).host) 24 | self.return_content = None 25 | self.expected_content = None 26 | self.filename = None 27 | self.hashname = None 28 | self.url = None 29 | self.content = None 30 | self.return_url = None 31 | self.return_level = None 32 | self.meta = None 33 | self.q_size = None 34 | 35 | self.session = aiohttp.ClientSession 36 | self.session.get = AsyncMock( 37 | return_value=aiohttp.ClientResponse( 38 | url=yarl.URL("http://www.example.com"), 39 | method="GET", 40 | writer=None, 41 | continue100=1, 42 | timer=None, 43 | request_info=None, 44 | traces=None, 45 | loop=self.loop, 46 | session=None, 47 | ) 48 | ) 49 | 50 | def test_get_body(self): 51 | self.content = b"""""" 52 | 53 | aiohttp.ClientResponse._headers = {"Content-Type": "text/html"} 54 | aiohttp.ClientResponse.read = AsyncMock(return_value=self.content) 55 | self.filename, self.hashname = self.handler._make_filename(yarl.URL(self.root)) 56 | self.expected_content = '' 57 | 58 | self.meta = { 59 | "/index.html": { 60 | "hash": "d1546d731a9f30cc80127d57142a482b", 61 | "headers": [{"Content-Type": "text/html"}], 62 | }, 63 | "/test": { 64 | "hash": "4539330648b80f94ef3bf911f6d77ac9", 65 | "headers": [{"Content-Type": "text/html"}], 66 | }, 67 | } 68 | 69 | async def test(): 70 | await self.handler.new_urls.put((yarl.URL(self.root), 0)) 71 | await self.handler.get_body(self.session) 72 | 73 | with self.assertLogs(level="DEBUG") as log: 74 | self.loop.run_until_complete(test()) 75 | self.assertIn("DEBUG:snare.cloner:Cloned file: /test", "".join(log.output)) 76 | 77 | with open(os.path.join(self.target_path, self.hashname)) as f: 78 | self.return_content = f.read() 79 | 80 | self.assertEqual(self.return_content, self.expected_content) 81 | self.assertEqual( 82 | self.handler.visited_urls[-2:], 83 | ["http://example.com/", "http://example.com/test"], 84 | ) 85 | self.assertEqual(self.handler.meta, self.meta) 86 | 87 | def test_get_body_css_validate(self): 88 | aiohttp.ClientResponse._headers = {"Content-Type": "text/css"} 89 | 90 | self.css_validate = "true" 91 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 92 | self.content = b""".banner { background: url("/example.png") }""" 93 | aiohttp.ClientResponse.read = AsyncMock(return_value=self.content) 94 | self.expected_content = "http://example.com/example.png" 95 | self.return_size = 0 96 | self.meta = { 97 | "/example.png": { 98 | "hash": "5a64beebcd2a6f1cbd00b8370debaa72", 99 | "headers": [{"Content-Type": "text/css"}], 100 | }, 101 | "/index.html": { 102 | "hash": "d1546d731a9f30cc80127d57142a482b", 103 | "headers": [{"Content-Type": "text/css"}], 104 | }, 105 | } 106 | 107 | async def test(): 108 | await self.handler.new_urls.put((yarl.URL(self.root), 0)) 109 | await self.handler.get_body(self.session) 110 | self.q_size = self.handler.new_urls.qsize() 111 | 112 | self.loop.run_until_complete(test()) 113 | self.assertEqual(self.handler.visited_urls[-1], self.expected_content) 114 | self.assertEqual(self.q_size, self.return_size) 115 | self.assertEqual(self.meta, self.handler.meta) 116 | 117 | def test_get_body_css_validate_scheme(self): 118 | aiohttp.ClientResponse._headers = {"Content-Type": "text/css"} 119 | 120 | self.css_validate = "true" 121 | self.return_size = 0 122 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 123 | self.content = [ 124 | b""".banner { background: url("data://domain/test.txt") }""", 125 | b""".banner { background: url("file://domain/test.txt") }""", 126 | ] 127 | self.meta = { 128 | "/index.html": { 129 | "hash": "d1546d731a9f30cc80127d57142a482b", 130 | "headers": [{"Content-Type": "text/css"}], 131 | }, 132 | } 133 | 134 | self.expected_content = "http://example.com/" 135 | 136 | async def test(): 137 | await self.handler.new_urls.put((yarl.URL(self.root), 0)) 138 | await self.handler.get_body(self.session) 139 | self.q_size = self.handler.new_urls.qsize() 140 | 141 | for content in self.content: 142 | aiohttp.ClientResponse.read = AsyncMock(return_value=content) 143 | self.loop.run_until_complete(test()) 144 | self.assertEqual(self.return_size, self.q_size) 145 | self.assertEqual(self.handler.meta, self.meta) 146 | self.assertEqual(self.handler.visited_urls[-1], self.expected_content) 147 | 148 | def test_client_error(self): 149 | self.session.get = AsyncMock(side_effect=aiohttp.ClientError) 150 | 151 | async def test(): 152 | await self.handler.new_urls.put((yarl.URL(self.root), 0)) 153 | await self.handler.get_body(self.session) 154 | 155 | with self.assertLogs(level="ERROR") as log: 156 | self.loop.run_until_complete(test()) 157 | self.assertIn("ERROR:snare.cloner:", "".join(log.output)) 158 | 159 | def tearDown(self): 160 | shutil.rmtree(self.main_page_path) 161 | -------------------------------------------------------------------------------- /snare/tests/test_cloner_get_root_host.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | import sys 4 | from snare.cloner import Cloner 5 | import shutil 6 | from yarl import URL 7 | import asyncio 8 | import aiohttp 9 | from snare.utils.asyncmock import AsyncMock 10 | 11 | 12 | class TestClonerGetRootHost(unittest.TestCase): 13 | def setUp(self): 14 | self.loop = asyncio.new_event_loop() 15 | 16 | def test_moved_root(self): 17 | self.root = "http://example.com" 18 | self.max_depth = sys.maxsize 19 | self.css_validate = "false" 20 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 21 | self.expected_moved_root = URL("http://www.example.com") 22 | 23 | async def test(): 24 | await self.handler.get_root_host() 25 | 26 | self.loop.run_until_complete(test()) 27 | 28 | self.assertEqual(self.handler.moved_root, self.expected_moved_root) 29 | 30 | @mock.patch("aiohttp.ClientSession") 31 | def test_clienterror(self, session): 32 | self.root = "http://example.com" 33 | self.max_depth = sys.maxsize 34 | self.css_validate = "false" 35 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 36 | 37 | aiohttp.ClientSession = mock.Mock(side_effect=aiohttp.ClientError) 38 | 39 | async def test(): 40 | await self.handler.get_root_host() 41 | 42 | with self.assertRaises(SystemExit): 43 | self.loop.run_until_complete(test()) 44 | -------------------------------------------------------------------------------- /snare/tests/test_cloner_init.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | from snare.cloner import Cloner 4 | import shutil 5 | 6 | 7 | class TestClonerInitialization(unittest.TestCase): 8 | def setUp(self): 9 | self.root = "http://example.com" 10 | self.max_depth = sys.maxsize 11 | self.css_validate = "false" 12 | self.handler = Cloner(self.root, self.max_depth, self.css_validate, default_path="/tmp") 13 | 14 | def test_cloner_init(self): 15 | self.assertIsInstance(self.handler, Cloner) 16 | 17 | def tearDown(self): 18 | shutil.rmtree(self.handler.target_path) 19 | -------------------------------------------------------------------------------- /snare/tests/test_cloner_make_filename.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | import os 4 | import shutil 5 | import yarl 6 | import asyncio 7 | from snare.cloner import Cloner 8 | from snare.utils.page_path_generator import generate_unique_path 9 | 10 | 11 | class TestMakeFilename(unittest.TestCase): 12 | def setUp(self): 13 | self.main_page_path = generate_unique_path() 14 | os.makedirs(self.main_page_path) 15 | self.url = yarl.URL("http://foo.com") 16 | self.root = "http://example.com" 17 | self.max_depth = sys.maxsize 18 | self.loop = asyncio.new_event_loop() 19 | self.css_validate = "false" 20 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 21 | self.filename = None 22 | self.hashname = None 23 | 24 | def test_make_filename(self): 25 | self.filename, self.hashname = self.handler._make_filename(self.url) 26 | self.assertEqual(self.filename, "foo.com") 27 | self.assertEqual(self.hashname, "167a0418dd8ce3bf0ef00dfb6195f038") 28 | 29 | def test_make_filename_same_host(self): 30 | self.filename, self.hashname = self.handler._make_filename(yarl.URL(self.root)) 31 | self.assertEqual(self.filename, "/index.html") 32 | self.assertEqual(self.hashname, "d1546d731a9f30cc80127d57142a482b") 33 | 34 | def test_make_filename_relative(self): 35 | self.url = yarl.URL("/images") 36 | self.filename, self.hashname = self.handler._make_filename(self.url) 37 | self.assertEqual(self.filename, "/images") 38 | self.assertEqual(self.hashname, "41389bcf7f7427468d8c8675db2d4f98") 39 | 40 | def tearDown(self): 41 | shutil.rmtree(self.main_page_path) 42 | -------------------------------------------------------------------------------- /snare/tests/test_cloner_process_links.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import sys 4 | import yarl 5 | from unittest import mock 6 | from snare.cloner import Cloner 7 | 8 | 9 | class TestProcessLinks(unittest.TestCase): 10 | def setUp(self): 11 | self.root = "http://example.com" 12 | self.level = 0 13 | self.max_depth = sys.maxsize 14 | self.loop = asyncio.new_event_loop() 15 | self.css_validate = "false" 16 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 17 | self.expected_content = None 18 | self.return_content = None 19 | self.return_url = None 20 | self.return_level = None 21 | self.qsize = None 22 | 23 | def test_process_link_scheme(self): 24 | test_urls = [ 25 | "file://images/test.png", 26 | "data://images/test.txt", 27 | "javascript://alert(1)/", 28 | ] 29 | 30 | async def test(url_param): 31 | self.return_content = await self.handler.process_link(url_param, self.level) 32 | self.qsize = self.handler.new_urls.qsize() 33 | 34 | for url in test_urls: 35 | 36 | self.loop.run_until_complete(test(url)) 37 | self.expected_content = url 38 | self.return_size = 0 39 | self.assertEqual(self.expected_content, self.return_content) 40 | self.assertEqual(self.qsize, self.return_size) 41 | 42 | def test_process_link_relative(self): 43 | self.url = "/foo/путь/" 44 | self.expected_content = "http://example.com/foo/путь/" 45 | 46 | async def test(): 47 | self.return_content = await self.handler.process_link(self.url, self.level) 48 | self.return_url, self.return_level = await self.handler.new_urls.get() 49 | 50 | self.loop.run_until_complete(test()) 51 | self.assertEqual(self.return_content, "/foo/путь/") 52 | self.assertEqual(yarl.URL(self.return_url).human_repr(), self.expected_content) 53 | self.assertEqual(self.return_level, self.level + 1) 54 | 55 | self.handler.moved_root = yarl.URL("http://example2.com") 56 | self.expected_content = "http://example2.com/foo/путь/" 57 | 58 | self.loop.run_until_complete(test()) 59 | self.assertEqual(self.return_content, "/foo/путь/") 60 | self.assertEqual(yarl.URL(self.return_url).human_repr(), self.expected_content) 61 | self.assertEqual(self.return_level, self.level + 1) 62 | 63 | def test_process_link_absolute(self): 64 | self.url = "http://domain.com" 65 | self.expected_content = "" 66 | 67 | async def test(): 68 | self.return_content = await self.handler.process_link(self.url, self.level) 69 | self.return_url, self.return_level = await self.handler.new_urls.get() 70 | 71 | self.loop.run_until_complete(test()) 72 | self.assertEqual(self.return_content, self.expected_content) 73 | self.assertEqual(yarl.URL(self.url), self.return_url) 74 | self.assertEqual(self.return_level, self.level + 1) 75 | 76 | def test_check_host(self): 77 | self.url = "http://foo.com" 78 | self.return_size = 0 79 | 80 | async def test(): 81 | self.return_content = await self.handler.process_link(self.url, self.level, check_host=True) 82 | self.qsize = self.handler.new_urls.qsize() 83 | 84 | self.loop.run_until_complete(test()) 85 | self.assertEqual(self.return_content, None) 86 | self.assertEqual(self.qsize, self.return_size) 87 | 88 | @mock.patch("yarl.URL") 89 | def test_process_link_unicode_error(self, url): 90 | 91 | yarl.URL = mock.Mock(side_effect=UnicodeError) 92 | 93 | async def test(): 94 | self.return_content = await self.handler.process_link(self.root, self.level) 95 | 96 | self.loop.run_until_complete(test()) 97 | self.assertEqual(self.return_content, self.expected_content) 98 | -------------------------------------------------------------------------------- /snare/tests/test_cloner_replace_links.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | import os 4 | import shutil 5 | import asyncio 6 | from snare.cloner import Cloner 7 | from snare.utils.page_path_generator import generate_unique_path 8 | from snare.utils.asyncmock import AsyncMock 9 | 10 | 11 | class TestReplaceLinks(unittest.TestCase): 12 | def setUp(self): 13 | self.main_page_path = generate_unique_path() 14 | os.makedirs(self.main_page_path) 15 | self.root = "http://example.com" 16 | self.level = 0 17 | self.max_depth = sys.maxsize 18 | self.loop = asyncio.new_event_loop() 19 | self.css_validate = "false" 20 | self.handler = Cloner(self.root, self.max_depth, self.css_validate) 21 | self.content = None 22 | self.expected_content = None 23 | self.return_content = None 24 | 25 | def test_replace_relative_links(self): 26 | self.handler.process_link = AsyncMock(return_value="/test") 27 | self.root = "http://example.com/test" 28 | self.content = '\n\n\n\n\n\n' 29 | 30 | self.expected_content = '\n\n\n\n\n\n' 31 | 32 | async def test(): 33 | self.return_content = await self.handler.replace_links(self.content, self.level) 34 | 35 | self.loop.run_until_complete(test()) 36 | self.assertEqual(str(self.return_content), self.expected_content) 37 | self.handler.process_link.assert_called_with(self.root, self.level, check_host=True) 38 | 39 | def test_replace_image_links(self): 40 | self.handler.process_link = AsyncMock(return_value="/smiley.png") 41 | self.root = "http://example.com/smiley.png" 42 | self.content = '\n\n\n\n\n\n' 43 | 44 | self.expected_content = '\n\n\n\n\n\n' 45 | 46 | async def test(): 47 | self.return_content = await self.handler.replace_links(self.content, self.level) 48 | 49 | self.loop.run_until_complete(test()) 50 | self.assertEqual(str(self.return_content), self.expected_content) 51 | self.handler.process_link.assert_called_with(self.root, self.level) 52 | 53 | def test_replace_action_links(self): 54 | self.handler.process_link = AsyncMock(return_value="/submit.php") 55 | self.root = "http://example.com/submit.php" 56 | self.content = '\n\n\n
\n
\n\n\n' 57 | 58 | self.expected_content = '\n\n\n
\n
\n\n\n' 59 | 60 | async def test(): 61 | self.return_content = await self.handler.replace_links(self.content, self.level) 62 | 63 | self.loop.run_until_complete(test()) 64 | self.assertEqual(str(self.return_content), self.expected_content) 65 | self.handler.process_link.assert_called_with(self.root, self.level) 66 | 67 | def test_replace_redirects(self): 68 | self.root = "http://example.com" 69 | self.content = ( 70 | '\n\n\n

Redirecting...

\n' 71 | "\n\n" 72 | ) 73 | 74 | self.expected_content = ( 75 | '\n\n\n

Redirecting...

\n\n' "\n" 76 | ) 77 | 78 | async def test(): 79 | self.return_content = await self.handler.replace_links(self.content, self.level) 80 | 81 | self.loop.run_until_complete(test()) 82 | self.assertEqual(str(self.return_content), self.expected_content) 83 | 84 | def tearDown(self): 85 | shutil.rmtree(self.main_page_path) 86 | -------------------------------------------------------------------------------- /snare/tests/test_cloner_run.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import sys 3 | from snare.cloner import Cloner 4 | import shutil 5 | import asyncio 6 | 7 | 8 | class TestClonerRun(unittest.TestCase): 9 | def setUp(self): 10 | self.root = "http://example.com" 11 | self.max_depth = sys.maxsize 12 | self.css_validate = "false" 13 | self.handler = Cloner(self.root, self.max_depth, self.css_validate, default_path="/tmp") 14 | self.loop = asyncio.new_event_loop() 15 | 16 | def test_run(self): 17 | self.loop.run_until_complete(self.handler.run()) 18 | -------------------------------------------------------------------------------- /snare/tests/test_html_handler_get_dorks.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import shutil 4 | import os 5 | import yarl 6 | import aiohttp 7 | from json import JSONDecodeError 8 | from snare.utils.asyncmock import AsyncMock 9 | from snare.html_handler import HtmlHandler 10 | from snare.utils.page_path_generator import generate_unique_path 11 | 12 | 13 | class TestGetDorks(unittest.TestCase): 14 | def setUp(self): 15 | self.main_page_path = generate_unique_path() 16 | os.makedirs(self.main_page_path) 17 | self.dorks = dict(response={"dorks": "test_dorks"}) 18 | self.loop = asyncio.new_event_loop() 19 | aiohttp.ClientSession.get = AsyncMock( 20 | return_value=aiohttp.ClientResponse( 21 | url=yarl.URL("http://www.example.com"), 22 | method="GET", 23 | writer=None, 24 | continue100=1, 25 | timer=None, 26 | request_info=None, 27 | traces=None, 28 | loop=self.loop, 29 | session=None, 30 | ) 31 | ) 32 | no_dorks = True 33 | tanner = "tanner.mushmush.org" 34 | self.handler = HtmlHandler(no_dorks, tanner) 35 | self.data = None 36 | 37 | def test_get_dorks(self): 38 | aiohttp.ClientResponse.json = AsyncMock(return_value=dict(response={"dorks": "test_dorks"})) 39 | 40 | async def test(): 41 | self.data = await self.handler.get_dorks() 42 | 43 | self.loop.run_until_complete(test()) 44 | aiohttp.ClientSession.get.assert_called_with("http://tanner.mushmush.org:8090/dorks", timeout=10.0) 45 | 46 | def test_return_dorks(self): 47 | aiohttp.ClientResponse.json = AsyncMock(return_value=self.dorks) 48 | 49 | async def test(): 50 | self.data = await self.handler.get_dorks() 51 | 52 | self.loop.run_until_complete(test()) 53 | self.assertEqual(self.data, self.dorks["response"]["dorks"]) 54 | 55 | def test_logging_error(self): 56 | aiohttp.ClientResponse.json = AsyncMock(side_effect=JSONDecodeError("ERROR", "", 0)) 57 | 58 | async def test(): 59 | self.data = await self.handler.get_dorks() 60 | 61 | with self.assertLogs(level="ERROR") as log: 62 | self.loop.run_until_complete(test()) 63 | self.assertIn("Error getting dorks: ERROR: line 1 column 1 (char 0)", log.output[0]) 64 | 65 | def test_logging_timeout(self): 66 | aiohttp.ClientResponse.json = AsyncMock(side_effect=asyncio.TimeoutError()) 67 | 68 | async def test(): 69 | self.data = await self.handler.get_dorks() 70 | 71 | with self.assertLogs(level="INFO") as log: 72 | self.loop.run_until_complete(test()) 73 | self.assertIn("Dorks timeout", log.output[0]) 74 | 75 | def test_return_dorks_exception(self): 76 | aiohttp.ClientResponse.json = AsyncMock(side_effect=Exception()) 77 | 78 | async def test(): 79 | self.data = await self.handler.get_dorks() 80 | 81 | with self.assertRaises(Exception): 82 | self.loop.run_until_complete(test()) 83 | 84 | def tearDown(self): 85 | shutil.rmtree(self.main_page_path) 86 | -------------------------------------------------------------------------------- /snare/tests/test_html_handler_handle_html_content.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import shutil 4 | import os 5 | from bs4 import BeautifulSoup 6 | from snare.utils.asyncmock import AsyncMock 7 | from snare.html_handler import HtmlHandler 8 | from snare.utils.page_path_generator import generate_unique_path 9 | 10 | 11 | class TestHandleHtmlContent(unittest.TestCase): 12 | def setUp(self): 13 | self.main_page_path = generate_unique_path() 14 | os.makedirs(self.main_page_path) 15 | self.content = """ 16 | 17 | 18 |

A paragraph to be tested

19 | 20 | 21 | """ 22 | self.expected_content = '\n \n

\n' 23 | self.expected_content += ' \n' 24 | self.expected_content += " A\n \n paragraph to be tested\n

\n \n\n" 25 | self.no_dorks_content = '\n \n

\n A paragraph to be tested\n' 26 | self.no_dorks_content += "

\n \n\n" 27 | self.loop = asyncio.new_event_loop() 28 | self.return_content = None 29 | no_dorks = True 30 | tanner = "tanner.mushmush.org" 31 | self.handler = HtmlHandler(no_dorks, tanner) 32 | 33 | def test_handle_content(self): 34 | self.handler.no_dorks = False 35 | self.handler.get_dorks = AsyncMock(return_value=["test_dork1"]) 36 | 37 | async def test(): 38 | self.return_content = await self.handler.handle_content(self.content) 39 | 40 | self.loop.run_until_complete(test()) 41 | soup = BeautifulSoup(self.return_content, "html.parser") 42 | return_content = soup.decode("utf-8") 43 | self.assertEqual(return_content, self.expected_content) 44 | 45 | def test_handle_content_no_dorks(self): 46 | self.handler.no_dorks = True 47 | 48 | async def test(): 49 | self.return_content = await self.handler.handle_content(self.content) 50 | 51 | self.loop.run_until_complete(test()) 52 | soup = BeautifulSoup(self.return_content, "html.parser") 53 | self.return_content = soup.decode("utf-8") 54 | self.assertEqual(self.return_content, self.no_dorks_content) 55 | 56 | def test_handle_content_exception(self): 57 | self.handler.no_dorks = False 58 | self.handler.get_dorks = AsyncMock(return_value=[]) 59 | 60 | async def test(): 61 | self.return_content = await self.handler.handle_content(self.content) 62 | 63 | with self.assertRaises(IndexError): 64 | self.loop.run_until_complete(test()) 65 | 66 | def tearDown(self): 67 | shutil.rmtree(self.main_page_path) 68 | -------------------------------------------------------------------------------- /snare/tests/test_logger.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from snare.utils.logger import Logger, LevelFilter 3 | import logging 4 | import os 5 | 6 | 7 | class TestLogger(unittest.TestCase): 8 | def setUp(self): 9 | self.cloner_log_file = "/tmp/cloner.log" 10 | self.snare_log_file = "/tmp/snare.log" 11 | self.snare_err_log_file = "/tmp/snare.err" 12 | self.record_dict = {"levelno": logging.INFO} 13 | self.logger = Logger.create_logger(self.snare_log_file, self.snare_err_log_file, __name__) 14 | 15 | def test_create_clone_logger(self): 16 | self.assertIsNone(Logger.create_clone_logger(self.cloner_log_file, __name__)) 17 | 18 | def test_create_logger(self): 19 | self.assertIsInstance(self.logger, logging.Logger) 20 | 21 | def test_filter(self): 22 | self.assertTrue(LevelFilter(logging.ERROR).filter(logging.makeLogRecord(self.record_dict))) 23 | 24 | def tearDown(self): 25 | try: 26 | os.remove(self.cloner_log_file) 27 | os.remove(self.snare_log_file) 28 | os.remove(self.snare_err_log_file) 29 | except FileNotFoundError: 30 | pass 31 | -------------------------------------------------------------------------------- /snare/tests/test_middleware.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from snare.middlewares import SnareMiddleware 3 | 4 | 5 | class TestMiddleware(unittest.TestCase): 6 | def setUp(self): 7 | self.middleware = SnareMiddleware( 8 | "error_404.html", 9 | headers=[{"Content-Type": "text/html; charset=UTF-8"}], 10 | server_header="nginx", 11 | ) 12 | 13 | def test_initialization(self): 14 | self.assertIsInstance(self.middleware, SnareMiddleware) 15 | -------------------------------------------------------------------------------- /snare/tests/test_server_handle_request.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import Mock 3 | import asyncio 4 | import argparse 5 | import shutil 6 | import multidict 7 | import os 8 | import aiohttp 9 | from aiohttp.http_parser import RawRequestMessage 10 | from aiohttp import HttpVersion 11 | from aiohttp import web 12 | from yarl import URL 13 | from snare.server import HttpRequestHandler 14 | from snare.utils.asyncmock import AsyncMock 15 | from snare.utils.page_path_generator import generate_unique_path 16 | 17 | 18 | class TestHandleRequest(unittest.TestCase): 19 | def setUp(self): 20 | meta = {} 21 | run_args = argparse.ArgumentParser() 22 | run_args.add_argument("--tanner") 23 | run_args.add_argument("--page-dir") 24 | self.main_page_path = generate_unique_path() 25 | os.makedirs(self.main_page_path) 26 | self.page_dir = self.main_page_path.rsplit("/")[-1] 27 | args = run_args.parse_args(["--page-dir", self.page_dir]) 28 | args_dict = vars(args) 29 | args_dict["full_page_path"] = self.main_page_path 30 | uuid = "9c10172f-7ce2-4fb4-b1c6-abc70141db56".encode("utf-8") 31 | args.tanner = "tanner.mushmush.org" 32 | args.no_dorks = True 33 | args.server_header = "test_server" 34 | args.slurp_enabled = True 35 | self.handler = HttpRequestHandler(meta, args, uuid) 36 | self.request_data = { 37 | "method": "GET", 38 | "path": "/", 39 | "headers": { 40 | "Host": "test_host", 41 | "Content-Type": "test_type", 42 | }, 43 | "status": 200, 44 | "cookies": { 45 | "sess_uuid": "prev_test_uuid", 46 | }, 47 | } 48 | self.loop = asyncio.new_event_loop() 49 | self.response_content = "" 50 | self.response_headers = multidict.CIMultiDict([("Content-Type", "text/html")]) 51 | self.response_status = 200 52 | event_result = dict(response=dict(message=dict(detection={"type": 1}, sess_uuid="test_uuid"))) 53 | RequestHandler = Mock() 54 | protocol = RequestHandler() 55 | message = RawRequestMessage( 56 | method="POST", 57 | path="/", 58 | version=HttpVersion(major=1, minor=1), 59 | headers=self.request_data["headers"], 60 | raw_headers=None, 61 | should_close=None, 62 | compression=None, 63 | upgrade=None, 64 | chunked=None, 65 | url=URL("http://test_url/"), 66 | ) 67 | self.request = web.Request( 68 | message=message, 69 | payload=None, 70 | protocol=protocol, 71 | payload_writer=None, 72 | task="POST", 73 | loop=self.loop, 74 | ) 75 | self.handler.tanner_handler.create_data = Mock(return_value=self.request_data) 76 | self.handler.tanner_handler.submit_data = AsyncMock(return_value=event_result) 77 | self.handler.submit_slurp = AsyncMock() 78 | web.Response.add_header = Mock() 79 | web.Response.write = Mock() 80 | web.Response.send_headers = Mock() 81 | web.Response.write_eof = AsyncMock() 82 | aiohttp.streams.EmptyStreamReader.read = AsyncMock(return_value=b"con1=test1&con2=test2") 83 | self.handler.tanner_handler.parse_tanner_response = AsyncMock( 84 | return_value=( 85 | self.response_content, 86 | self.response_headers, 87 | self.response_status, 88 | ) 89 | ) 90 | 91 | def test_create_request_data(self): 92 | async def test(): 93 | await self.handler.handle_request(self.request) 94 | 95 | self.loop.run_until_complete(test()) 96 | self.handler.tanner_handler.create_data.assert_called_with(self.request, 200) 97 | 98 | def test_submit_request_data(self): 99 | async def test(): 100 | await self.handler.handle_request(self.request) 101 | 102 | self.loop.run_until_complete(test()) 103 | self.handler.tanner_handler.submit_data.assert_called_with(self.request_data) 104 | 105 | def test_submit_request_slurp(self): 106 | async def test(): 107 | await self.handler.handle_request(self.request) 108 | 109 | self.loop.run_until_complete(test()) 110 | self.handler.submit_slurp.assert_called_with(self.request.path_qs) 111 | 112 | def test_parse_response(self): 113 | async def test(): 114 | await self.handler.handle_request(self.request) 115 | 116 | self.loop.run_until_complete(test()) 117 | self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path_qs, {"type": 1}) 118 | 119 | def test_no_prev_sess_uuid(self): 120 | self.request_data = { 121 | "method": "GET", 122 | "path": "/", 123 | "headers": { 124 | "Host": "test_host", 125 | "Content-Type": "test_type", 126 | }, 127 | "status": 200, 128 | } 129 | self.handler.tanner_handler.create_data = Mock(return_value=self.request_data) 130 | 131 | async def test(): 132 | await self.handler.handle_request(self.request) 133 | 134 | self.loop.run_until_complete(test()) 135 | self.handler.tanner_handler.parse_tanner_response.assert_called_with(self.request.path_qs, {"type": 1}) 136 | 137 | def tearDown(self): 138 | shutil.rmtree(self.main_page_path) 139 | -------------------------------------------------------------------------------- /snare/tests/test_server_stop.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import Mock 3 | import asyncio 4 | import argparse 5 | import shutil 6 | import os 7 | from snare.server import HttpRequestHandler 8 | from snare.utils.asyncmock import AsyncMock 9 | from snare.utils.page_path_generator import generate_unique_path 10 | 11 | 12 | class TestServerStop(unittest.TestCase): 13 | def setUp(self): 14 | meta = { 15 | "/status_404": { 16 | "hash": "bacfa45149ffbe8dbff34609bf56d748", 17 | "headers": [{"Content-Type": "text/html; charset=UTF-8"}], 18 | } 19 | } 20 | run_args = argparse.ArgumentParser() 21 | self.main_page_path = generate_unique_path() 22 | os.makedirs(self.main_page_path) 23 | args = run_args.parse_args([]) 24 | args_dict = vars(args) 25 | args_dict["full_page_path"] = self.main_page_path 26 | uuid = "9c10172f-7ce2-4fb4-b1c6-abc70141db56".encode("utf-8") 27 | args.tanner = "tanner.mushmush.org" 28 | args.no_dorks = True 29 | args.host_ip = "127.0.0.1" 30 | args.port = "80" 31 | self.handler = HttpRequestHandler(meta, args, uuid) 32 | self.loop = asyncio.new_event_loop() 33 | 34 | def test_handler_stop(self): 35 | self.handler.runner = AsyncMock() 36 | 37 | async def test(): 38 | await self.handler.stop() 39 | 40 | self.loop.run_until_complete(test()) 41 | 42 | def tearDown(self): 43 | shutil.rmtree(self.main_page_path) 44 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_add_meta_tag.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import shutil 4 | import configparser 5 | from bs4 import BeautifulSoup 6 | from snare.utils.snare_helpers import add_meta_tag 7 | from snare.utils.page_path_generator import generate_unique_path 8 | 9 | 10 | class TestAddMetaTag(unittest.TestCase): 11 | def setUp(self): 12 | self.main_page_path = generate_unique_path() 13 | os.makedirs(self.main_page_path) 14 | self.content = "titlesample" 15 | self.page_dir = self.main_page_path.rsplit("/")[-1] 16 | self.index_page = "index.html" 17 | with open(os.path.join(self.main_page_path, "index.html"), "w") as f: 18 | f.write(self.content) 19 | 20 | def test_add_meta_tag(self): 21 | config = configparser.ConfigParser() 22 | config["WEB-TOOLS"] = dict(google="test google content", bing="test bing content") 23 | add_meta_tag(self.page_dir, self.index_page, config, base_path="/opt/snare") 24 | with open(os.path.join(self.main_page_path, "index.html")) as main: 25 | main_page = main.read() 26 | soup = BeautifulSoup(main_page, "html.parser") 27 | assert soup.find("meta", attrs={"name": "google-site-verification"}) and soup.find( 28 | "meta", attrs={"name": "msvalidate.01"} 29 | ) 30 | 31 | def test_add_meta_tag_with_empty_tags(self): 32 | config = configparser.ConfigParser() 33 | config["WEB-TOOLS"] = dict(google="", bing="") 34 | assert add_meta_tag(self.page_dir, self.index_page, config, base_path="/opt/snare") is None 35 | 36 | def tearDown(self): 37 | shutil.rmtree(self.main_page_path) 38 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_check_meta_file.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from snare.utils.snare_helpers import check_meta_file 3 | 4 | 5 | class TestMetaFile(unittest.TestCase): 6 | def setUp(self): 7 | self.correct_meta = { 8 | "/index.html": { 9 | "hash": "d1546d731a9f30cc80127d57142a482b", 10 | "headers": [{"Accept-Ranges": "bytes"}], 11 | } 12 | } 13 | self.incorrect_meta = { 14 | "/index.html": { 15 | "not_hash": "d1546d731a9f30cc80127d57142a482b", 16 | "headers": [{"Accept-Ranges": "bytes"}], 17 | } 18 | } 19 | 20 | def test_check_meta_file(self): 21 | self.assertTrue(check_meta_file(self.correct_meta)) 22 | self.assertFalse(check_meta_file(self.incorrect_meta)) 23 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_check_privileges.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from os.path import expanduser, join 3 | 4 | from snare.utils.snare_helpers import check_privileges 5 | 6 | 7 | @unittest.skip("fails in Travis") 8 | class TestStrToBool(unittest.TestCase): 9 | def test_privileges_in_root(self): 10 | self.path = "/" 11 | try: 12 | check_privileges(self.path) 13 | except PermissionError as e: 14 | self.fail(f"failed permissions check: {e}") 15 | 16 | def test_privileges_in_home(self): 17 | self.path = expanduser("~") 18 | try: 19 | check_privileges(self.path) 20 | except PermissionError as e: 21 | self.fail(f"failed permissions check: {e}") 22 | 23 | def test_non_existent_root_path(self): 24 | self.path = "/snare" 25 | try: 26 | check_privileges(self.path) 27 | except PermissionError as e: 28 | self.fail(f"failed permissions check: {e}") 29 | 30 | def test_non_existent_home_path(self): 31 | self.path = join(expanduser("~"), "snare") 32 | try: 33 | check_privileges(self.path) 34 | except PermissionError as e: 35 | self.fail(f"failed permissions check: {e}") 36 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_converter.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import shutil 4 | import json 5 | from snare.utils.snare_helpers import Converter 6 | 7 | 8 | class TestConverter(unittest.TestCase): 9 | def setUp(self): 10 | self.content = "" 11 | self.page_path = "/tmp/test/" 12 | if not os.path.exists("/tmp/test/depth"): 13 | os.makedirs("/tmp/test/depth") 14 | self.hname1 = "" 15 | self.hname2 = "" 16 | with open(os.path.join(self.page_path, "index.html"), "w") as f: 17 | f.write(self.content) 18 | with open(os.path.join(self.page_path, "depth/page.html"), "w") as f: 19 | f.write(self.content) 20 | self.cnv = Converter() 21 | 22 | def test_converter(self): 23 | self.cnv.convert(self.page_path) 24 | with open(os.path.join(self.page_path, "meta.json")) as f: 25 | s = json.load(f) 26 | self.hname1 = s["index.html"]["hash"] 27 | self.hname2 = s["depth/page.html"]["hash"] 28 | assert os.path.exists(self.page_path + self.hname1) and os.path.exists(self.page_path + self.hname2) 29 | 30 | def tearDown(self): 31 | shutil.rmtree("/tmp/test") 32 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_parse_timeout.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from snare.utils.snare_helpers import parse_timeout 3 | 4 | 5 | class TestParseTimeout(unittest.TestCase): 6 | def test_parse_timeout(self): 7 | assert parse_timeout("20H") == 20 * 60 * 60 8 | assert parse_timeout("10M") == 10 * 60 9 | assert parse_timeout("1D") == 24 * 60 * 60 10 | 11 | # Default 24H format is used. 12 | assert parse_timeout("24Y") == 24 * 60 * 60 13 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_print_color.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from snare.utils.snare_helpers import print_color 3 | 4 | 5 | class TestPrintColor(unittest.TestCase): 6 | def test_print_color(self): 7 | self.assertIsNone(print_color("testing print_color()", "INFO")) 8 | self.assertIsNone(print_color("testing print_color()", "WRONG_MODE")) 9 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_str_to_bool.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from argparse import ArgumentTypeError 3 | from snare.utils.snare_helpers import str_to_bool 4 | 5 | 6 | class TestStrToBool(unittest.TestCase): 7 | def setUp(self): 8 | self.v = None 9 | 10 | def test_str_to_bool_true(self): 11 | self.v = "true" 12 | assert str_to_bool(self.v) is True 13 | 14 | def test_str_to_bool_false(self): 15 | self.v = "false" 16 | assert str_to_bool(self.v) is False 17 | 18 | def test_str_to_bool_error(self): 19 | self.v = "twz" 20 | with self.assertRaises(ArgumentTypeError): 21 | str_to_bool(self.v) 22 | -------------------------------------------------------------------------------- /snare/tests/test_snare_helpers_versions_manager.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from snare.utils.snare_helpers import VersionManager 3 | 4 | 5 | class TestVersion(unittest.TestCase): 6 | def setUp(self): 7 | self.vm = VersionManager() 8 | self.vm.version = "0.1.0" 9 | 10 | def test_check_compatibilty_fails(self): 11 | with self.assertRaises(RuntimeError): 12 | self.vm.check_compatibility("0.0.0") 13 | 14 | def test_check_compatibilty_ok(self): 15 | self.vm.check_compatibility("0.3.0") 16 | -------------------------------------------------------------------------------- /snare/tests/test_tanner_handler_create_data.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import Mock 3 | import shutil 4 | import os 5 | import asyncio 6 | import argparse 7 | from yarl import URL 8 | from aiohttp import HttpVersion 9 | from aiohttp import web 10 | from aiohttp.http_parser import RawRequestMessage 11 | from snare.tanner_handler import TannerHandler 12 | from snare.utils.page_path_generator import generate_unique_path 13 | 14 | 15 | class TestCreateData(unittest.TestCase): 16 | def setUp(self): 17 | meta = {} 18 | run_args = argparse.ArgumentParser() 19 | run_args.add_argument("--tanner") 20 | run_args.add_argument("--page-dir") 21 | self.main_page_path = generate_unique_path() 22 | os.makedirs(self.main_page_path) 23 | page_dir = self.main_page_path.rsplit("/")[-1] 24 | args = run_args.parse_args(["--page-dir", page_dir]) 25 | args_dict = vars(args) 26 | args_dict["full_page_path"] = self.main_page_path 27 | snare_uuid = "9c10172f-7ce2-4fb4-b1c6-abc70141db56".encode("utf-8") 28 | args.no_dorks = True 29 | self.handler = TannerHandler(args, meta, snare_uuid) 30 | headers = { 31 | "Host": "test_host", 32 | "status": 200, 33 | "Cookie": "sess_uuid=prev_test_uuid; test_cookie=test", 34 | } 35 | message = RawRequestMessage( 36 | method="POST", 37 | path="/", 38 | version=HttpVersion(major=1, minor=1), 39 | headers=headers, 40 | raw_headers=None, 41 | should_close=None, 42 | compression=None, 43 | upgrade=None, 44 | chunked=None, 45 | url=URL("http://test_url/"), 46 | ) 47 | loop = asyncio.get_event_loop() 48 | RequestHandler = Mock() 49 | protocol = RequestHandler() 50 | self.request = web.Request( 51 | message=message, 52 | payload=None, 53 | protocol=protocol, 54 | payload_writer=None, 55 | task="POST", 56 | loop=loop, 57 | ) 58 | self.request.transport.get_extra_info = Mock(return_value=(["test_ip", "test_port"])) 59 | self.response_status = "test_status" 60 | self.data = None 61 | self.expected_data = { 62 | "method": "POST", 63 | "path": "http://test_url/", 64 | "headers": { 65 | "Host": "test_host", 66 | "status": 200, 67 | "Cookie": "sess_uuid=prev_test_uuid; test_cookie=test", 68 | }, 69 | "uuid": "9c10172f-7ce2-4fb4-b1c6-abc70141db56", 70 | "peer": {"ip": "test_ip", "port": "test_port"}, 71 | "status": "test_status", 72 | "cookies": {"sess_uuid": "prev_test_uuid", " test_cookie": "test"}, 73 | } 74 | 75 | def test_create_data(self): 76 | self.data = self.handler.create_data(self.request, self.response_status) 77 | self.assertEqual(self.data, self.expected_data) 78 | 79 | def tearDown(self): 80 | shutil.rmtree(self.main_page_path) 81 | -------------------------------------------------------------------------------- /snare/tests/test_tanner_handler_parse_tanner_response.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import argparse 4 | import shutil 5 | import os 6 | import json 7 | import multidict 8 | from snare.utils.asyncmock import AsyncMock 9 | from snare.utils.page_path_generator import generate_unique_path 10 | from snare.tanner_handler import TannerHandler 11 | 12 | 13 | class TestParseTannerResponse(unittest.TestCase): 14 | def setUp(self): 15 | run_args = argparse.ArgumentParser() 16 | run_args.add_argument("--tanner") 17 | run_args.add_argument("--page-dir") 18 | self.main_page_path = generate_unique_path() 19 | os.makedirs(self.main_page_path) 20 | page_dir = self.main_page_path.rsplit("/")[-1] 21 | meta_content = { 22 | "/index.html": { 23 | "hash": "hash_name", 24 | "headers": [{"Content-Type": "text/html"}], 25 | } 26 | } 27 | self.page_content = "" 28 | self.headers = multidict.CIMultiDict([("Content-Type", "text/html")]) 29 | self.status_code = 200 30 | self.content_type = "text/html" 31 | with open(os.path.join(self.main_page_path, "hash_name"), "w") as f: 32 | f.write(self.page_content) 33 | with open(os.path.join(self.main_page_path, "meta.json"), "w") as f: 34 | json.dump(meta_content, f) 35 | self.args = run_args.parse_args(["--page-dir", page_dir]) 36 | args_dict = vars(self.args) 37 | args_dict["full_page_path"] = self.main_page_path 38 | self.args.index_page = "/index.html" 39 | self.args.no_dorks = True 40 | self.args.tanner = "tanner.mushmush.org" 41 | self.uuid = "test_uuid" 42 | self.handler = TannerHandler(self.args, meta_content, self.uuid) 43 | self.requested_name = "/" 44 | self.loop = asyncio.get_event_loop() 45 | self.handler.html_handler.handle_content = AsyncMock(return_value=self.page_content) 46 | self.res1 = None 47 | self.res2 = None 48 | self.res3 = None 49 | self.detection = None 50 | self.expected_content = None 51 | self.call_content = None 52 | 53 | def test_parse_type_one(self): 54 | self.detection = {"type": 1} 55 | 56 | async def test(): 57 | ( 58 | self.res1, 59 | self.res2, 60 | self.res3, 61 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 62 | 63 | self.loop.run_until_complete(test()) 64 | real_result = [self.res1, self.res2, self.res3] 65 | expected_result = [self.page_content, self.headers, self.status_code] 66 | self.assertCountEqual(real_result, expected_result) 67 | 68 | def test_parse_type_one_query(self): 69 | self.requested_name = "/?" 70 | self.detection = {"type": 1} 71 | 72 | async def test(): 73 | ( 74 | self.res1, 75 | self.res2, 76 | self.res3, 77 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 78 | 79 | self.loop.run_until_complete(test()) 80 | real_result = [self.res1, self.res2, self.res3] 81 | expected_result = [self.page_content, self.headers, self.status_code] 82 | self.assertCountEqual(real_result, expected_result) 83 | 84 | def test_parse_type_one_error(self): 85 | self.requested_name = "something/" 86 | self.detection = {"type": 1} 87 | self.expected_content = None 88 | self.headers = multidict.CIMultiDict() 89 | self.status_code = 404 90 | 91 | async def test(): 92 | ( 93 | self.res1, 94 | self.res2, 95 | self.res3, 96 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 97 | 98 | self.loop.run_until_complete(test()) 99 | real_result = [self.res1, self.res2, self.res3] 100 | expected_result = [self.expected_content, self.headers, self.status_code] 101 | self.assertCountEqual(real_result, expected_result) 102 | 103 | def test_parse_type_two(self): 104 | self.detection = { 105 | "type": 2, 106 | "payload": { 107 | "page": "/index.html", 108 | "value": "test", 109 | }, 110 | } 111 | self.expected_content = b"
test
" 112 | 113 | async def test(): 114 | ( 115 | self.res1, 116 | self.res2, 117 | self.res3, 118 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 119 | 120 | self.loop.run_until_complete(test()) 121 | real_result = [self.res1, self.res2, self.res3] 122 | expected_result = [self.expected_content, self.headers, self.status_code] 123 | self.assertCountEqual(real_result, expected_result) 124 | 125 | def test_parse_type_two_with_headers(self): 126 | self.detection = { 127 | "type": 2, 128 | "payload": { 129 | "page": "", 130 | "value": "test.png", 131 | "headers": { 132 | "content-type": "multipart/form-data", 133 | }, 134 | }, 135 | } 136 | self.expected_content = b"test.png" 137 | self.content_type = "image/png" 138 | self.headers = multidict.CIMultiDict([("Content-Type", "multipart/form-data")]) 139 | 140 | async def test(): 141 | ( 142 | self.res1, 143 | self.res2, 144 | self.res3, 145 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 146 | 147 | self.loop.run_until_complete(test()) 148 | real_result = [self.res1, self.res2, self.res3] 149 | expected_result = [self.expected_content, self.headers, self.status_code] 150 | 151 | self.assertCountEqual(real_result, expected_result) 152 | 153 | def test_parse_type_two_error(self): 154 | self.detection = { 155 | "type": 2, 156 | "payload": { 157 | "page": "/something", 158 | "value": "test", 159 | }, 160 | } 161 | self.expected_content = b"
test
" 162 | self.content_type = r"text/html" 163 | 164 | async def test(): 165 | ( 166 | self.res1, 167 | self.res2, 168 | self.res3, 169 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 170 | 171 | self.loop.run_until_complete(test()) 172 | real_result = [self.res1, self.res2, self.res3] 173 | expected_result = [self.expected_content, self.headers, self.status_code] 174 | self.assertCountEqual(real_result, expected_result) 175 | 176 | def test_parse_type_three(self): 177 | self.detection = { 178 | "type": 3, 179 | "payload": { 180 | "page": "/index.html", 181 | "value": "test", 182 | "status_code": 200, 183 | }, 184 | } 185 | self.expected_content = None 186 | self.headers = multidict.CIMultiDict() 187 | 188 | async def test(): 189 | ( 190 | self.res1, 191 | self.res2, 192 | self.res3, 193 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 194 | 195 | self.loop.run_until_complete(test()) 196 | real_result = [self.res1, self.res2, self.res3] 197 | expected_result = [self.expected_content, self.headers, self.status_code] 198 | self.assertCountEqual(real_result, expected_result) 199 | 200 | def test_call_handle_html(self): 201 | self.detection = {"type": 1} 202 | self.call_content = b"" 203 | self.expected_content = self.page_content 204 | 205 | async def test(): 206 | ( 207 | self.res1, 208 | self.res2, 209 | self.res3, 210 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 211 | 212 | self.loop.run_until_complete(test()) 213 | self.handler.html_handler.handle_content.assert_called_with(self.call_content) 214 | 215 | def test_parse_exception(self): 216 | self.detection = {} 217 | self.expected_content = self.page_content 218 | 219 | async def test(): 220 | ( 221 | self.res1, 222 | self.res2, 223 | self.res3, 224 | ) = await self.handler.parse_tanner_response(self.requested_name, self.detection) 225 | 226 | with self.assertRaises(KeyError): 227 | self.loop.run_until_complete(test()) 228 | 229 | def tearDown(self): 230 | shutil.rmtree(self.main_page_path) 231 | -------------------------------------------------------------------------------- /snare/tests/test_tanner_handler_submit_data.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import asyncio 3 | import argparse 4 | import shutil 5 | import os 6 | import json 7 | import yarl 8 | import aiohttp 9 | from json import JSONDecodeError 10 | from snare.utils.asyncmock import AsyncMock 11 | from snare.tanner_handler import TannerHandler 12 | from snare.utils.page_path_generator import generate_unique_path 13 | 14 | 15 | class TestSubmitData(unittest.TestCase): 16 | def setUp(self): 17 | meta = {} 18 | run_args = argparse.ArgumentParser() 19 | run_args.add_argument("--tanner") 20 | run_args.add_argument("--page-dir") 21 | self.main_page_path = generate_unique_path() 22 | os.makedirs(self.main_page_path) 23 | page_dir = self.main_page_path.rsplit("/")[-1] 24 | args = run_args.parse_args(["--page-dir", page_dir]) 25 | args_dict = vars(args) 26 | args_dict["full_page_path"] = self.main_page_path 27 | self.loop = asyncio.new_event_loop() 28 | self.data = { 29 | "method": "GET", 30 | "path": "/", 31 | "headers": { 32 | "Host": "test_host", 33 | "Connection": "keep-alive", 34 | "Upgrade-Insecure-Requests": "1", 35 | "User-Agent": "test_agent", 36 | "Accept": "text/html", 37 | "Accept-Encoding": "test_encoding", 38 | "Accept-Language": "test_lang", 39 | "Cookie": "test_cookie", 40 | }, 41 | "uuid": "test_uuid", 42 | "peer": {"ip": "::1", "port": 80}, 43 | "status": 200, 44 | "cookies": "test_cookies", 45 | "sess_uuid": "test_uuid", 46 | } 47 | aiohttp.ClientSession.post = AsyncMock( 48 | return_value=aiohttp.ClientResponse( 49 | url=yarl.URL("http://www.example.com"), 50 | method="GET", 51 | writer=None, 52 | continue100=1, 53 | timer=None, 54 | request_info=None, 55 | traces=None, 56 | loop=self.loop, 57 | session=None, 58 | ) 59 | ) 60 | uuid = "test_uuid" 61 | args.tanner = "tanner.mushmush.org" 62 | args.no_dorks = True 63 | self.handler = TannerHandler(args, meta, uuid) 64 | self.result = None 65 | 66 | def test_post_data(self): 67 | aiohttp.ClientResponse.json = AsyncMock(return_value=dict(detection={"type": 1}, sess_uuid="test_uuid")) 68 | 69 | async def test(): 70 | self.result = await self.handler.submit_data(self.data) 71 | 72 | self.loop.run_until_complete(test()) 73 | aiohttp.ClientSession.post.assert_called_with( 74 | "http://tanner.mushmush.org:8090/event", json=self.data, timeout=10.0 75 | ) 76 | 77 | def test_event_result(self): 78 | aiohttp.ClientResponse.json = AsyncMock(return_value=dict(detection={"type": 1}, sess_uuid="test_uuid")) 79 | 80 | async def test(): 81 | self.result = await self.handler.submit_data(self.data) 82 | 83 | self.loop.run_until_complete(test()) 84 | self.assertEqual(self.result, dict(detection={"type": 1}, sess_uuid="test_uuid")) 85 | 86 | def test_submit_data_error(self): 87 | aiohttp.ClientResponse.json = AsyncMock(side_effect=JSONDecodeError("ERROR", "", 0)) 88 | 89 | async def test(): 90 | self.result = await self.handler.submit_data(self.data) 91 | 92 | with self.assertLogs(level="ERROR") as log: 93 | self.loop.run_until_complete(test()) 94 | self.assertIn( 95 | "Error submitting data: ERROR: line 1 column 1 (char 0) {}".format(self.data), 96 | log.output[0], 97 | ) 98 | 99 | def test_event_result_exception(self): 100 | aiohttp.ClientResponse.json = AsyncMock(side_effect=Exception()) 101 | 102 | async def test(): 103 | self.result = await self.handler.submit_data(self.data) 104 | 105 | with self.assertRaises(Exception): 106 | self.loop.run_until_complete(test()) 107 | 108 | def tearDown(self): 109 | shutil.rmtree(self.main_page_path) 110 | -------------------------------------------------------------------------------- /snare/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mushorg/snare/b17fdfe7c2ba3ac540548763d73fc475cfc185c4/snare/utils/__init__.py -------------------------------------------------------------------------------- /snare/utils/asyncmock.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | 4 | class AsyncMock(Mock): # custom function defined to mock asyncio coroutines 5 | def __call__(self, *args, **kwargs): 6 | sup = super(AsyncMock, self) 7 | 8 | async def coro(): 9 | return sup.__call__(*args, **kwargs) 10 | 11 | return coro() 12 | 13 | def __await__(self): 14 | return self().__await__() 15 | -------------------------------------------------------------------------------- /snare/utils/logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import logging.handlers 3 | 4 | 5 | class LevelFilter(logging.Filter): 6 | """Filters (lets through) all messages with level < LEVEL""" 7 | 8 | def __init__(self, level): 9 | self.level = level 10 | 11 | def filter(self, record): 12 | return record.levelno < self.level 13 | 14 | # "<" instead of "<=": since logger.setLevel is inclusive, this should be exclusive 15 | 16 | 17 | class Logger: 18 | @staticmethod 19 | def create_logger(debug_filename, err_filename, logger_name): 20 | logger = logging.getLogger(logger_name) 21 | logger.setLevel(logging.DEBUG) 22 | logger.propagate = False 23 | formatter = logging.Formatter( 24 | fmt="%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s", 25 | datefmt="%Y-%m-%d %H:%M:%S", 26 | ) 27 | 28 | # ERROR log to 'snare.err' 29 | error_log_handler = logging.handlers.RotatingFileHandler(err_filename, encoding="utf-8") 30 | error_log_handler.setLevel(logging.ERROR) 31 | error_log_handler.setFormatter(formatter) 32 | logger.addHandler(error_log_handler) 33 | 34 | # DEBUG log to 'snare.log' 35 | debug_log_handler = logging.handlers.RotatingFileHandler(debug_filename, encoding="utf-8") 36 | debug_log_handler.setLevel(logging.DEBUG) 37 | debug_log_handler.setFormatter(formatter) 38 | max_level_filter = LevelFilter(logging.ERROR) 39 | debug_log_handler.addFilter(max_level_filter) 40 | logger.addHandler(debug_log_handler) 41 | 42 | return logger 43 | 44 | @staticmethod 45 | def create_clone_logger(log_filename, logger_name): 46 | logger = logging.getLogger(logger_name) 47 | formatter = logging.Formatter( 48 | fmt="%(asctime)s %(levelname)s:%(name)s:%(funcName)s: %(message)s", 49 | datefmt="%Y-%m-%d %H:%M:%S", 50 | ) 51 | # logs to 'clone.err' 52 | debug_log_handler = logging.handlers.RotatingFileHandler(log_filename, encoding="utf-8") 53 | debug_log_handler.setLevel(logging.DEBUG) 54 | debug_log_handler.setFormatter(formatter) 55 | logger.addHandler(debug_log_handler) 56 | -------------------------------------------------------------------------------- /snare/utils/page_path_generator.py: -------------------------------------------------------------------------------- 1 | import string 2 | import random 3 | import os 4 | 5 | 6 | def directory_generator(size=9, chars=string.ascii_lowercase + string.digits): 7 | return "".join(random.choice(chars) for _ in range(size)) 8 | 9 | 10 | def generate_unique_path(): 11 | path = "/opt/snare/pages/" + directory_generator() 12 | while os.path.exists(path): 13 | path = "/opt/snare/pages/" + directory_generator() 14 | return path 15 | -------------------------------------------------------------------------------- /snare/utils/snare_helpers.py: -------------------------------------------------------------------------------- 1 | import os 2 | import hashlib 3 | import mimetypes 4 | import json 5 | import shutil 6 | import argparse 7 | import logging 8 | from os import walk 9 | from distutils.version import StrictVersion 10 | from bs4 import BeautifulSoup 11 | 12 | 13 | class VersionManager: 14 | def __init__(self): 15 | self.logger = logging.getLogger(__name__) 16 | self.version = "0.3.0" 17 | self.version_mapper = { 18 | "0.1.0": ["0.1.0", "0.4.0"], 19 | "0.2.0": ["0.5.0", "0.5.0"], 20 | "0.3.0": ["0.5.0", "0.6.0"], 21 | } 22 | 23 | def check_compatibility(self, tanner_version): 24 | min_version = self.version_mapper[self.version][0] 25 | max_version = self.version_mapper[self.version][1] 26 | if not (StrictVersion(min_version) <= StrictVersion(tanner_version) <= StrictVersion(max_version)): 27 | self.logger.exception("Wrong tanner version %s", tanner_version) 28 | raise RuntimeError( 29 | "Wrong tanner version: {}. Compatible versions are {} - {}".format( 30 | tanner_version, min_version, max_version 31 | ) 32 | ) 33 | 34 | 35 | class Converter: 36 | def __init__(self): 37 | self.logger = logging.getLogger(__name__) 38 | self.meta = {} 39 | 40 | def convert(self, path): 41 | files_to_convert = [] 42 | 43 | for (dirpath, dirnames, filenames) in walk(path): 44 | for fn in filenames: 45 | files_to_convert.append(os.path.join(dirpath, fn)) 46 | 47 | for fn in files_to_convert: 48 | path_len = len(path) 49 | file_name = fn[path_len:] 50 | m = hashlib.md5() 51 | m.update(fn.encode("utf-8")) 52 | hash_name = m.hexdigest() 53 | self.meta[file_name] = { 54 | "hash": hash_name, 55 | "headers": [ 56 | {"Content-Type": mimetypes.guess_type(file_name)[0]}, 57 | ], 58 | } 59 | self.logger.debug("Converting the file as %s ", os.path.join(path, hash_name)) 60 | shutil.copyfile(fn, os.path.join(path, hash_name)) 61 | os.remove(fn) 62 | 63 | with open(os.path.join(path, "meta.json"), "w") as mj: 64 | json.dump(self.meta, mj) 65 | 66 | 67 | def add_meta_tag(page_dir, index_page, config, base_path): 68 | google_content = config["WEB-TOOLS"]["google"] 69 | bing_content = config["WEB-TOOLS"]["bing"] 70 | 71 | if not google_content and not bing_content: 72 | return 73 | 74 | main_page_path = os.path.join(os.path.join(base_path, "pages"), page_dir, index_page) 75 | with open(main_page_path) as main: 76 | main_page = main.read() 77 | soup = BeautifulSoup(main_page, "html.parser") 78 | 79 | if google_content and soup.find("meta", attrs={"name": "google-site-verification"}) is None: 80 | google_meta = soup.new_tag("meta") 81 | google_meta.attrs["name"] = "google-site-verification" 82 | google_meta.attrs["content"] = google_content 83 | soup.head.append(google_meta) 84 | if bing_content and soup.find("meta", attrs={"name": "msvalidate.01"}) is None: 85 | bing_meta = soup.new_tag("meta") 86 | bing_meta.attrs["name"] = "msvalidate.01" 87 | bing_meta.attrs["content"] = bing_content 88 | soup.head.append(bing_meta) 89 | 90 | html = soup.prettify("utf-8") 91 | with open(main_page_path, "wb") as file: 92 | file.write(html) 93 | 94 | 95 | def check_meta_file(meta_info): 96 | for key, val in meta_info.items(): 97 | if "hash" in val and any(header in val for header in ["content_type", "headers"]): 98 | continue 99 | else: 100 | return False 101 | return True 102 | 103 | 104 | def parse_timeout(timeout): 105 | timeouts_coeff = {"M": 60, "H": 3600, "D": 86400} 106 | 107 | form = timeout[-1] 108 | if form not in timeouts_coeff.keys(): 109 | print_color("Bad timeout format, default will be used", "WARNING") 110 | result = parse_timeout("24H") 111 | else: 112 | result = int(timeout[:-1]) 113 | result *= timeouts_coeff[form] 114 | return result 115 | 116 | 117 | def str_to_bool(v): 118 | if v.lower() == "true": 119 | return True 120 | elif v.lower() == "false": 121 | return False 122 | else: 123 | raise argparse.ArgumentTypeError("Boolean value expected") 124 | 125 | 126 | def print_color(msg, mode="INFO", end="\n"): 127 | colors = { 128 | "INFO": "\033[97m", # white 129 | "ERROR": "\033[31m", # red 130 | "WARNING": "\033[33m", # yellow 131 | } 132 | try: 133 | color = colors[mode] 134 | except KeyError: 135 | color = colors["INFO"] 136 | print(color + str(msg) + "\033[0m", end=end) 137 | 138 | 139 | def check_privileges(path): 140 | """ 141 | Checks if the user has privileges to the path passed as argument. 142 | """ 143 | if not os.path.exists(path): 144 | try: 145 | os.makedirs(path) 146 | except PermissionError: 147 | raise PermissionError(f"Failed to create path: {os.path.abspath(path)}") 148 | if not os.access(path, os.W_OK): 149 | raise PermissionError(f"Failed to access path: {os.path.abspath(path)}") 150 | --------------------------------------------------------------------------------