├── tests ├── __init__.py ├── test_docs.py └── test_scan_testdata.py ├── requirements.txt ├── .gitignore ├── SECURITY.md ├── DEVELOPMENT.md ├── LICENSE ├── .github └── workflows │ └── runpyci.yml ├── pyproject.toml ├── runpyci.sh ├── CONTRIBUTIONS.md ├── README.md ├── TESTS.md └── snallygaster /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | dnspython 2 | lxml 3 | urllib3 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.py[cod] 3 | *.egg-info/ 4 | dist/ 5 | build/ 6 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | Security Policy 2 | =============== 3 | 4 | Supported Versions 5 | ------------------ 6 | 7 | Only the latest release is supported 8 | 9 | Reporting a Vulnerability 10 | ------------------------- 11 | 12 | Low impact vulnerabilities, e.g. DoS, can be reported directly to the issue tracker. 13 | 14 | Severe vulnerabilities can be reported via E-Mail: 15 | 16 | * https://hboeck.de/en/contact.html 17 | -------------------------------------------------------------------------------- /DEVELOPMENT.md: -------------------------------------------------------------------------------- 1 | This file contains development notes for myself. 2 | 3 | How to make a new release 4 | ========================= 5 | 6 | * raise version number in *pyproject.toml*. 7 | * Tag it: 8 | ``` 9 | git tag -a v0.0.x 10 | [add changelog message] 11 | git push origin v0.0.x 12 | ``` 13 | * Build the wheel and source distribution: 14 | ``` 15 | python -m build 16 | ``` 17 | * Upload to PyPI: 18 | ``` 19 | twine upload dist/* 20 | ``` 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (C) Hanno Böck and contributors 2 | 3 | Permission to use, copy, modify, and/or distribute this software for any 4 | purpose with or without fee is hereby granted. 5 | 6 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 7 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 8 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 9 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 10 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 11 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 12 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 13 | -------------------------------------------------------------------------------- /tests/test_docs.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: 0BSD 2 | 3 | import re 4 | import unittest 5 | 6 | 7 | class TestDocs(unittest.TestCase): 8 | # checking if there is documentation for all snallygaster tests 9 | # in the TEST.md documentation 10 | def test_docs(self): 11 | f = open("snallygaster", encoding="utf-8") 12 | funcs = re.findall("def test_([a-z_]*)", f.read()) 13 | f.close() 14 | fd = open("TESTS.md", encoding="utf-8") 15 | docs = [] 16 | ol = "" 17 | for line in fd: 18 | if line.startswith("---"): 19 | docs.append(ol.rstrip()) 20 | ol = line 21 | fd.close() 22 | self.assertEqual(funcs, docs) 23 | 24 | 25 | if __name__ == "__main__": 26 | unittest.main() 27 | -------------------------------------------------------------------------------- /.github/workflows/runpyci.yml: -------------------------------------------------------------------------------- 1 | # last update: 2025-10-08 2 | # https://github.com/hannob/codingstyle 3 | --- 4 | name: runpyci 5 | "on": 6 | - pull_request 7 | - push 8 | 9 | jobs: 10 | build: 11 | strategy: 12 | matrix: 13 | python-version: [3.9, 3.x, 3.14] 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | - name: Set up Python ${{ matrix.python-version }} ${{ matrix.os }} 18 | uses: actions/setup-python@v5 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Install dependencies and linters 22 | run: | 23 | [ -e requirements.txt ] && pip install -r requirements.txt 24 | pip install pycodestyle pyupgrade pyflakes dlint pylint ruff 25 | - name: Run tests 26 | env: 27 | RUN_ONLINETESTS: 1 28 | run: | 29 | ./runpyci.sh 30 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "snallygaster" 3 | version = "0.0.15" 4 | description = "Tool to scan for secret files on HTTP servers" 5 | readme = "README.md" 6 | authors = [{name = "Hanno Böck"}] 7 | classifiers=[ 8 | "Development Status :: 4 - Beta", 9 | "Intended Audience :: System Administrators", 10 | "Natural Language :: English", 11 | "Programming Language :: Python :: 3", 12 | "Programming Language :: Python :: 3 :: Only", 13 | "Programming Language :: Python :: 3.9", 14 | "Programming Language :: Python :: 3.10", 15 | "Programming Language :: Python :: 3.11", 16 | "Programming Language :: Python :: 3.12", 17 | "Programming Language :: Python :: 3.13", 18 | "Programming Language :: Python :: 3.14" 19 | ] 20 | license = "0BSD" 21 | keywords = ["security", "vulnerability", "http"] 22 | requires-python = ">=3.9" 23 | dependencies = [ 24 | "dnspython", 25 | "lxml", 26 | "urllib3" 27 | ] 28 | 29 | [tool.setuptools_scm] 30 | 31 | [tool.setuptools] 32 | script-files = ["snallygaster"] 33 | 34 | [project.urls] 35 | "Homepage" = "https://github.com/hannob/snallygaster" 36 | "Source" = "https://github.com/hannob/snallygaster" 37 | "Bug Tracker" = "https://github.com/hannob/snallygaster/issues" 38 | 39 | [build-system] 40 | requires = ["setuptools >= 61.0.0", "setuptools_scm"] 41 | build-backend = "setuptools.build_meta" 42 | -------------------------------------------------------------------------------- /runpyci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # last update: 2025-08-30 3 | # https://github.com/hannob/codingstyle 4 | set -euo pipefail 5 | 6 | PYLINTIG="consider-using-with,design,fixme,invalid-name,missing-docstring,modified-iterating-list,no-member,possibly-used-before-assignment,protected-access,too-many-lines,unused-argument,broad-exception-caught,c-extension-no-member,duplicate-code,global-statement,global-variable-not-assigned,import-error,import-outside-toplevel,inconsistent-return-statements,redefined-outer-name,unspecified-encoding" 7 | RUFFIG="ANN,C90,D,FIX001,FIX002,ICN001,PLR0911,PLR0912,PLR0913,PLR0915,PTH,S314,S501,S603,SLF001,T201,TD002,TD003,B008,BLE001,COM812,FBT002,I001,N802,N806,PERF203,PERF401,PLC0415,PLR2004,PLW0602,PLW0603,PT009,RET505,RUF100,S202,S310,S607,S608,SIM102,SIM105,SIM108,SIM113,SIM114,SIM115,TD001,TD004,TRY300" 8 | 9 | pyfind=$(find -name \*.py) 10 | pygrep=$(grep -rl --exclude-dir=.ruff_cache '^#!/usr/bin/python\|^#!/usr/bin/env python' . || true) 11 | pyfiles=$(echo "$pyfind" "$pygrep" | sort -u) 12 | 13 | pycodestyle --max-line-length=100 --ignore=W503,E203 $pyfiles 14 | pyupgrade --py313-plus $pyfiles 15 | pyflakes $pyfiles 16 | flake8 --select=DUO --ignore=DUO107,DUO123,DUO131 $pyfiles 17 | isort --line-length=100 --diff --check-only . 18 | pylint --disable=$PYLINTIG $pyfiles 19 | ruff check --line-length=100 --select=ALL --ignore=$RUFFIG $pyfiles 20 | 21 | if [ -d tests ]; then 22 | python -m unittest -v 23 | fi 24 | -------------------------------------------------------------------------------- /tests/test_scan_testdata.py: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: 0BSD 2 | 3 | import http.server 4 | import os 5 | import ssl 6 | import subprocess 7 | import tempfile 8 | import threading 9 | import unittest 10 | 11 | TESTDATA_REPO = "https://github.com/hannob/snallygaster-testdata" 12 | 13 | TESTDATA = {"backup_archive": "[backup_archive] https://localhost:4443/backup.zip", 14 | "git_dir": "[git_dir] https://localhost:4443/.git/config", 15 | "deadjoe": "[deadjoe] https://localhost:4443/DEADJOE", 16 | "coredump": "[coredump] https://localhost:4443/core", 17 | "backupfiles": "[backupfiles] https://localhost:4443/index.php~", 18 | "ds_store": "[ds_store] https://localhost:4443/.DS_Store", 19 | "privatekey": "[privatekey_pkcs8] https://localhost:4443/server.key", 20 | "desktopini": "[desktopini] https://localhost:4443/desktop.ini", 21 | } 22 | 23 | 24 | class TestScanTestdata(unittest.TestCase): 25 | @unittest.skipUnless(os.environ.get("RUN_ONLINETESTS"), 26 | "Not running online tests") 27 | def test_scan_testdata(self): 28 | tmp = tempfile.mkdtemp(prefix="testdata") 29 | if os.environ.get("TESTDATA_REPOSITORY"): 30 | os.symlink(os.environ.get("TESTDATA_REPOSITORY"), 31 | tmp + "/testdata") 32 | else: 33 | subprocess.run(["git", "clone", "--depth=1", 34 | TESTDATA_REPO, 35 | tmp + "/testdata"], 36 | check=True) 37 | 38 | olddir = os.getcwd() 39 | os.chdir(tmp + "/testdata") 40 | httpd = http.server.HTTPServer(("localhost", 4443), http.server.SimpleHTTPRequestHandler) 41 | context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) 42 | context.load_cert_chain(certfile=tmp + "/testdata/testserver.pem") 43 | httpd.socket = context.wrap_socket(httpd.socket, server_side=True) 44 | t = threading.Thread(target=httpd.serve_forever) 45 | t.daemon = True 46 | t.start() 47 | 48 | for test, expected in TESTDATA.items(): 49 | testrun = subprocess.run([olddir + "/snallygaster", "-t", test, "localhost:4443", 50 | "--nowww", "--nohttp"], 51 | stdout=subprocess.PIPE, check=True) 52 | output = testrun.stdout.decode("utf-8").rstrip() 53 | self.assertEqual(output, expected) 54 | 55 | 56 | if __name__ == "__main__": 57 | unittest.main() 58 | -------------------------------------------------------------------------------- /CONTRIBUTIONS.md: -------------------------------------------------------------------------------- 1 | You want to contribute to snallygaster? Fix a bug or add a new test? Great! 2 | 3 | But please consider a few things: 4 | 5 | Avoid unreasonable Pull Requests 6 | ================================ 7 | 8 | I have seen pull requests of the form: "I fixed this little bug in line 50. I also 9 | changed your coding style all over the place, because I don't like it. Oh, and I added 10 | some newlines here and there for no reason whatsoever. All of that in a single commit." 11 | 12 | Of course I'm exaggerating here, but not much. I often see pull requests that are 13 | incredibly painful to review. 14 | 15 | A pull request should change a single thing. Don't mix unrelated changes in one pull 16 | request. If you want to make larger changes consider discussing the changes before you 17 | start working on them. 18 | 19 | 20 | Avoid complexity 21 | ================ 22 | 23 | I've seen patches that added more lines of code, made the code slower and less readable 24 | and provided no improvement. 25 | 26 | As a rule of thumb: If your pull request adds more lines than it removes you need to 27 | have a reason for that. "This is how this is usually done" and "this makes the code 28 | follow coding paradigm XYZ" are not good reasons. 29 | 30 | 31 | Coding style 32 | ============ 33 | 34 | The code complies with [pycodestyle](https://pypi.org/project/pycodestyle/), except for 35 | the W503-rule to comply with the latest PEP 8 [recommendation for the placement of 36 | binary operators]( 37 | https://www.python.org/dev/peps/pep-0008/#should-a-line-break-before-or-after-a-binary-operator). 38 | 39 | The code should produce no warnings with this command: 40 | 41 | ``` 42 | python tests\test_codingstyle.py 43 | ``` 44 | 45 | 46 | New Tests 47 | ========= 48 | 49 | If you consider adding new tests please consider that there should be a reasonable 50 | balance between impact, prevalence of the issue and cost (time) of the test. 51 | 52 | This is best illustrated with a few examples: 53 | 54 | * The ds_store test often has a very low impact, but it is extremely common (prevalence) 55 | and only a single HTTP request. 56 | * The bitcoin_wallet test rarely finds anything, but the impact is very high and the 57 | test is cheap. 58 | * The sql_dump test is relatively slow, as it tests many filename variations, but the 59 | impact can be very high (leak of large amounts of private data) and it happens quite 60 | often. 61 | 62 | A test with a low impact that rarely finds anything and is very slow is unlikely to get 63 | accepted. Consider doing scans of the Alexa Top 1 Million to get a rough idea of how 64 | prevalent an issue is. 65 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | snallygaster 2 | ============ 3 | 4 | Finds file leaks and other security problems on HTTP servers. 5 | 6 | what? 7 | ----- 8 | 9 | snallygaster is a tool that looks for files accessible on web servers that shouldn't be 10 | public and can pose a security risk. 11 | 12 | Typical examples include publicly accessible git repositories, backup files potentially 13 | containing passwords or database dumps. In addition, it contains a few checks for other 14 | security vulnerabilities. 15 | 16 | As an introduction to these kinds of issues you may want to watch this talk: 17 | * [Attacking with HTTP Requests](https://www.youtube.com/watch?v=Bppr9rbmwz4) 18 | 19 | See the [TESTS.md](TESTS.md) file for an overview of all tests and links to further 20 | information about the issues. 21 | 22 | install 23 | ------- 24 | 25 | snallygaster is available [via pypi](https://pypi.org/project/snallygaster/): 26 | 27 | ``` 28 | pip3 install snallygaster 29 | ``` 30 | 31 | It's a simple python 3 script, so you can just download the file "snallygaster" and 32 | execute it. Dependencies are urllib3, lxml and dnspython. In Debian- or Ubuntu-based 33 | distributions you can install them via: 34 | 35 | ``` 36 | apt install python3-dnspython python3-lxml python3-urllib3 37 | ``` 38 | 39 | distribution packages 40 | --------------------- 41 | 42 | Some Linux and BSD systems have snallygaster packaged: 43 | 44 | * [Gentoo](https://packages.gentoo.org/packages/net-analyzer/snallygaster) 45 | * [NetBSD](https://pkgsrc.se/security/snallygaster) 46 | * [Arch Linux (git version)](https://aur.archlinux.org/packages/snallygaster-git/) 47 | * [openSUSE](https://software.opensuse.org/package/snallygaster) 48 | 49 | More at [Repology](https://repology.org/project/snallygaster/versions). 50 | 51 | faq 52 | --- 53 | 54 | Q: I want to contribute / send a patch / a pull request! 55 | 56 | A: That's great, but please read the [CONTRIBUTIONS.md](CONTRIBUTIONS.md) file. 57 | 58 | Q: What's that name? 59 | 60 | A: [Snallygaster](https://en.wikipedia.org/wiki/Snallygaster) is the name of a dragon 61 | that according to some legends was seen in Maryland and other parts of the US. There's 62 | no particular backstory why this tool got named this way, other than that I was looking 63 | for a fun and interesting name. 64 | 65 | I thought a name of some mythical creature would be nice, but most of those had the 66 | problem that I would have had name collisions with other software. Checking the list of 67 | dragons on Wikipedia I learned about the Snallygaster. The name sounded funny, the idea 68 | that there are dragon legends in the US interesting and I found no other piece of 69 | software with that name. 70 | 71 | credit and thanks 72 | ----------------- 73 | 74 | * Thanks to Tim Philipp Schäfers and Sebastian Neef from the [Internetwache]( 75 | https://www.internetwache.org/) for plenty of ideas about things to look for. 76 | * Thanks to [Craig Young](https://secur3.us/) for many discussions during the 77 | development of this script. 78 | * Thanks to [Sebastian Pipping](https://blog.hartwork.org/) for some help with Python 79 | programming during the development. 80 | * Thanks to [Benjamin Balder Bach](https://overtag.dk/) for teaching me lots of things 81 | about Python packaging. 82 | * Thanks to the organizers of Bornhack, Driving IT, SEC-T and the Rights and Freedom 83 | track at 34C3 for letting me present this work. 84 | 85 | author 86 | ------ 87 | 88 | snallygaster is developed and maintained by [Hanno Böck](https://hboeck.de/). 89 | -------------------------------------------------------------------------------- /TESTS.md: -------------------------------------------------------------------------------- 1 | TESTS 2 | ===== 3 | 4 | An overview of tests provided by snallygaster: 5 | 6 | Default tests 7 | ============= 8 | 9 | These tests are enabled by default and usually output information that directly leads to 10 | potential vulnerabilities. 11 | 12 | 13 | lfm_php 14 | ------- 15 | 16 | This checks for Lazy File Manager, a one-file php script that allows arbitrary file 17 | operations. It is often placed on compromised webpages. 18 | 19 | 20 | idea 21 | ---- 22 | 23 | Configuration file for JetBrains, can contain passwords. 24 | 25 | 26 | symfony_databases_yml 27 | --------------------- 28 | 29 | Database configuration file (databases.yml) used by older versions of Symfony. These 30 | aren't supposed to be stored within the web root, but sometimes they are. 31 | 32 | 33 | rails_database_yml 34 | ------------------ 35 | 36 | Database configuration file (database.yml) used by Ruby on Rails. Misconfigurations can 37 | cause these to be readable. 38 | 39 | 40 | git_dir 41 | ------- 42 | 43 | When deploying web pages with a Git repository the .git directory may end up being 44 | publicly readable. This allows downloading the full repository. 45 | 46 | * [Internetwache: Don't publicly expose .git or how we downloaded your website's 47 | sourcecode]( 48 | https://en.internetwache.org/dont-publicly-expose-git-or-how-we-downloaded-your-websites-sourcecode-an-analysis-of-alexas-1m-28-07-2015/) 49 | * [Golem.de: Riskante Git-Verzeichnisse]( 50 | https://www.golem.de/news/websicherheit-riskante-git-verzeichnisse-1507-115476.html) 51 | * [GitTools - scripts to download .git directories]( 52 | https://github.com/internetwache/GitTools) 53 | * [git-dumper - script to download .git dir, faster than GitTools]( 54 | https://github.com/arthaud/git-dumper) 55 | 56 | 57 | svn_dir 58 | ------- 59 | 60 | Identical to git_dir issue, just with Subversion instead of Git. 61 | 62 | * [svnscaper - script to download .svn directories]( 63 | https://github.com/hannob/svnscraper) 64 | 65 | 66 | apache_server_status 67 | -------------------- 68 | 69 | Apache server-status pages. These can contain visitor URLs and IP addresses of visitors. 70 | 71 | * [Sucuri: Popular sites with Apache server-status enabled]( 72 | https://blog.sucuri.net/2012/10/popular-sites-with-apache-server-status-enabled.html) 73 | 74 | 75 | apache_server_info 76 | ------------------ 77 | 78 | Apache server-info pages. These can contain visitor URLs and IP addresses of visitors. 79 | 80 | * [apache.org: Apache Module mod_info]( 81 | https://httpd.apache.org/docs/2.4/mod/mod_info.html) 82 | * [Heise.de: Webserver-Sicherheitslücke: Heikle Konfigurations- und Statusdaten 83 | publiziert]( 84 | https://www.heise.de/hintergrund/Webserver-Sicherheitsluecke-Heikle-Konfigurations-und-Statusdaten-publiziert-4971830.html?seite=3) 85 | 86 | 87 | coredump 88 | -------- 89 | 90 | Crashing processes on Linux and other unix systems can leave a memory dump file named 91 | "core" that may leak information like passwords. 92 | 93 | * [Hanno's Blog: Don't leave Coredumps on Web Servers]( 94 | https://blog.hboeck.de/archives/887-Dont-leave-Coredumps-on-Web-Servers.html) 95 | 96 | 97 | sftp_config 98 | ----------- 99 | 100 | Configuration file from the FTP client sublime FTP (sftp-config.json). It turns out 101 | sometimes people accidentally upload the configuration file of their FTP client, 102 | including credentials for their web space. 103 | 104 | * [Sucuri: SFTP/FTP Password Exposure via sftp-config.json]( 105 | https://blog.sucuri.net/2012/11/psa-sftpftp-password-exposure-via-sftp-config-json.html) 106 | 107 | 108 | wsftp_ini 109 | --------- 110 | 111 | Similar to sftp_config, but for WS_FTP. 112 | 113 | 114 | filezilla_xml 115 | ------------- 116 | 117 | Similar to sftp_config, but for FileZilla. 118 | 119 | 120 | winscp_ini 121 | ---------- 122 | 123 | Similar to sftp_config, but for WinSCP. 124 | 125 | 126 | ds_store 127 | -------- 128 | 129 | The Apple OS X file manager Finder creates these files. They may leak directory and file 130 | names. 131 | 132 | * [Internetwache: Scanning the Alexa Top 1M for .DS_Store files]( 133 | https://en.internetwache.org/scanning-the-alexa-top-1m-for-ds-store-files-12-03-2018/) 134 | * [ds_stope_exp (recursively download .DS_Store files)]( 135 | https://github.com/lijiejie/ds_store_exp) 136 | 137 | php_cs_fixer 138 | ------------ 139 | 140 | Cache file from PHP-CS-Fixer, a codingstyle checker for PHP. This effectively leaks a 141 | directory listing of PHP files. Supports both v2 and v3 formats. 142 | 143 | 144 | backupfiles 145 | ----------- 146 | 147 | Backup files and other leftovers from editors. Many editors create files with a ~ or 148 | .bak extension when overwriting a previous version. VIM creates swap files of the scheme 149 | `.[filename].swp`. On crashes EMACS creates `#[filename]#`. All of these are 150 | particularly problematic in combination with PHP, as a file that may contain secrets 151 | will end up on the webspace without a .php extension and thus won't be parsed. 152 | 153 | * [FEROSS: 1% of CMS-Powered Sites Expose Their Database Passwords]( 154 | https://feross.org/cmsploit/) 155 | 156 | 157 | backup_archive 158 | -------------- 159 | 160 | Complete or partial backups of servers are sometimes left online. This test checks for 161 | common names like backup.tar.gz. 162 | 163 | * [Golem.de: Datenlecks durch backup.zip]( 164 | https://www.golem.de/news/websicherheit-datenlecks-durch-backup-zip-1904-140564.html) 165 | 166 | 167 | deadjoe 168 | ------- 169 | 170 | The editor JOE creates a file DEADJOE on crashes, which contains content of the 171 | currently edited files. Similar to backupfiles. 172 | 173 | 174 | sql_dump 175 | -------- 176 | 177 | This checks for common names of SQL database dumps. These can lead to massive database 178 | leaks. 179 | 180 | * [Zeit Online: How 2,000 Unsecured Databases Landed on the Internet]( 181 | https://www.zeit.de/digital/datenschutz/2017-07/customer-data-how-2000-unsecured-databases-landed-online) 182 | 183 | 184 | bitcoin_wallet 185 | -------------- 186 | 187 | This scans for bitcoin wallets (wallet.dat) left on servers. While this is rare, 188 | obviously leaking those can come at a high cost. 189 | 190 | 191 | drupal_backup_migrate 192 | --------------------- 193 | 194 | The Drupal backup_migrate plugin stores backups of the CMS database in the web folder. 195 | Access is prevented with an Apache .htaccess file, but that does not work on other web 196 | servers. 197 | 198 | 199 | magento_config 200 | -------------- 201 | 202 | Magento is a PHP web store that saves its config (including database credentials) in an 203 | XML file called "local.xml". Access is prevented with an Apache .htaccess file, but that 204 | does not work on other web servers. 205 | 206 | * [oss-security: Magento leaking of config file local.xml]( 207 | https://seclists.org/oss-sec/2017/q4/141) 208 | 209 | 210 | xaa 211 | --- 212 | 213 | xaa files are the output of the "split" command line tool on Unix systems. It's used to 214 | split large files. As large files often contain lots of data these may lead to large 215 | leaks (similar to sql_dump). 216 | 217 | 218 | optionsbleed 219 | ------------ 220 | 221 | A test for the Optionsbleed vulnerability, in which Apache corrupts the "Allow" header 222 | in a reply to an HTTP OPTIONS request. 223 | 224 | * [Fuzzing Project: Optionsbleed - HTTP OPTIONS method can leak Apache's server memory]( 225 | https://blog.fuzzing-project.org/60-Optionsbleed-HTTP-OPTIONS-method-can-leak-Apaches-server-memory.html) 226 | 227 | 228 | privatekey 229 | ---------- 230 | 231 | Checks for private keys, usually belonging to TLS/X.509 certificates. 232 | 233 | * [Golem.de: Private Keys on Web Servers]( 234 | https://www.golem.de/news/https-private-keys-on-web-servers-1707-128862.html) 235 | 236 | 237 | sshkey 238 | ------ 239 | 240 | Similar to the privatekey check this looks for SSH private keys on web servers. 241 | 242 | 243 | dotenv 244 | ------ 245 | 246 | This looks for Laravel ".env" files that may contain database credentials. 247 | 248 | 249 | invalidsrc 250 | ---------- 251 | 252 | This checks all src-references on a webpage's HTML and looks for inaccessible 253 | references. These may indicate domain takeover vulnerabilities. This test produces 254 | warnings quite often, though many of them are harmless: References to deleted files or 255 | simply syntax errors in URLs. 256 | 257 | * [Hanno's Blog: Abandoned Domain Takeover as a Web Security Risk]( 258 | https://blog.hboeck.de/archives/889-Abandoned-Domain-Takeover-as-a-Web-Security-Risk.html) 259 | 260 | 261 | ilias_defaultpw 262 | --------------- 263 | 264 | This checks installations of the Ilias e-learning software for the presence of a default 265 | username/password (root/homer). Ilias was involved in the 2018 hack of the German 266 | government, though it's unclear what vulnerability was used. 267 | 268 | * [Golem.de: Hack on German Government via E-Learning Software Ilias]( 269 | https://www.golem.de/news/government-hack-hack-on-german-government-via-e-learning-software-ilias-1803-133231.html) 270 | 271 | 272 | cgiecho 273 | ------- 274 | 275 | The cgiecho tool is part of the unmaintained software cgiemail. It contains a 276 | vulnerability where it allows leaking arbitrary files from the web root if they contain 277 | any guessable string in square brackets (e.g. `['password']`). 278 | 279 | * [Cgiemail - Source Code Disclosure/Local File Inclusion Exploit]( 280 | https://github.com/finbar-crago/cgiemail-exploit) 281 | 282 | 283 | phpunit_eval 284 | ------------ 285 | 286 | Tests for a remote code execution vulnerability in a script shipped with older versions 287 | of phpunit that will simply pass the POST data to PHP's eval. 288 | 289 | * [CVE-2017-9841 RCE vulnerability in phpunit]( 290 | https://web.archive.org/web/20181213234925/http://phpunit.vulnbusters.com/) 291 | 292 | 293 | acmereflect 294 | ----------- 295 | 296 | Tests if there's an ACME API endpoint that reflects content and can be abused for XSS. 297 | Outputs acmereflect_html if the API also reflects HTML code, acmereflect_html_sniff if 298 | it outputs HTML code and does MIME sniffing. 299 | 300 | * [XSS using quirky implementations of ACME http-01]( 301 | https://labs.detectify.com/2018/09/04/xss-using-quirky-implementations-of-acme-http-01/) 302 | 303 | 304 | drupaldb 305 | -------- 306 | 307 | Misconfigured Drupal installations may expose their SQLite database. 308 | 309 | 310 | phpwarnings 311 | ----------- 312 | 313 | Tries to trigger a PHP warning with an invalid PHPSESSID. 314 | 315 | 316 | adminer 317 | ------- 318 | 319 | adminer is a one file php database frontend. (I may consider changing this to an info 320 | test, but for now I believe most of these are not intentionally publicly available, 321 | though they often have login forms.) 322 | 323 | * [Adminer leaks passwords; Magecart hackers rejoice]( 324 | https://gwillem.gitlab.io/2019/01/17/adminer-4.6.2-file-disclosure-vulnerability/) 325 | 326 | 327 | elmah 328 | ----- 329 | 330 | Public error console for the ELMAH library. This can contain cookies and other sensitive 331 | pieces of information, it shouldn't be accessible from outside. 332 | 333 | * [ASP.NET session hijacking with Google and ELMAH]( 334 | https://www.troyhunt.com/aspnet-session-hijacking-with-google/) 335 | 336 | 337 | citrix_rce 338 | ---------- 339 | 340 | Check for the Citrix CVE-2019-19781 RCE / directory traversal. 341 | 342 | * [Vulnerability in Citrix Application Delivery Controller and Citrix Gateway]( 343 | https://support.citrix.com/article/CTX267027) 344 | * [Citrix NetScaler CVE-2019-19781: What You Need to Know (Tripwire VERT)]( 345 | https://www.tripwire.com/state-of-security/vert/citrix-netscaler-cve-2019-19781-what-you-need-to-know/) 346 | 347 | 348 | installer 349 | --------- 350 | 351 | Search for unused installers of common PHP web applications. In most cases a stale 352 | installer can be used for code execution by installing the application and uploading a 353 | plugin. 354 | 355 | 356 | wpsubdir 357 | -------- 358 | 359 | Search for unused Wordpress installers in /wordpress/ subdir. 360 | 361 | 362 | telescope 363 | --------- 364 | 365 | Search for publicly accessible instances of the Laravel Telescope software. This allows 366 | seeing detailed HTTP requests, including Cookies, and thus should never be accessible in 367 | production. 368 | 369 | 370 | vb_test 371 | ------- 372 | 373 | vb_test.php is a script provided by the proprietary Vbulletin software that allows 374 | webmasters to check server requirements. It should not be left on live hosts. 375 | 376 | 377 | headerinject 378 | ------------ 379 | 380 | This tries to inject an url-encoded newline and HTTP header into the URL. This often 381 | works on redirections if the host decodes the path and passes it to the location header. 382 | 383 | 384 | wpdebug 385 | ------- 386 | 387 | Publicly accessible debug.log file from Wordpress, contains usually PHP error messages, 388 | sometimes with sensitive data like passwords in stack traces. 389 | 390 | 391 | thumbsdb 392 | -------- 393 | 394 | Image metadata from old Windows versions, may leak file names and thumbnails. 395 | 396 | * [Vinetto thumbnail parser](https://github.com/AtesComp/Vinetto) 397 | 398 | 399 | duplicator 400 | ---------- 401 | 402 | Find leftover installer.php / installer-backup.php files from the Wordpress Duplicator 403 | plugin. Note: While this is claimed as "fixed" by the plugin authors in later versions 404 | as the plugin tries to delete these files after the next login, a vulnerable state can 405 | still happen if the files are not successfully deleted after a migration. 406 | 407 | * [Synacktiv: Remote code execution vulnerability in WordPress Duplicator]( 408 | https://www.synacktiv.com/ressources/advisories/WordPress_Duplicator-1.2.40-RCE.pdf) 409 | * [Anonleaks: KennotFM / Details zu Hack und Defacement]( 410 | https://anonleaks.net/2021/optinfoil/kennotfm-details-zu-hack-und-defacement/) 411 | 412 | 413 | desktopini 414 | ---------- 415 | 416 | Finds Windows Explorer desktop.ini metadata files. These sometimes leak information 417 | about possibly installed software on a developer machine or file names. Impact is 418 | usually low, but it is very common. 419 | 420 | 421 | postdebug 422 | --------- 423 | 424 | Some web frameworks show a debugging console when they receive an unexpected HTTP 425 | method. This can often be triggered by sending a POST request to the main page. This 426 | test looks for such debugging consoles from Laravel, Symfony, and Rails. 427 | 428 | 429 | djangodebug 430 | ----------- 431 | 432 | Information on 404 error pages can identify Django installations with debugging mode 433 | enabled. 434 | 435 | 436 | symfonydebug 437 | ------------ 438 | 439 | Similar to djangodebug, information on 404 error pages can identify Symfony 440 | installations with debugging mode enabled. 441 | 442 | 443 | axfr 444 | ---- 445 | 446 | Checks if name servers answer to AXFR zone transfer requests. These are usually never 447 | intended to be publicly accessible. 448 | 449 | * [Internetwache: Scanning Alexa's Top 1M for AXFR]( 450 | https://en.internetwache.org/scanning-alexas-top-1m-for-axfr-29-03-2015/) 451 | * [US-CERT: DNS Zone Transfer AXFR Requests May Leak Domain Information]( 452 | https://www.us-cert.gov/ncas/alerts/TA15-103A) 453 | * [D. J. Bernstein: How the AXFR protocol works]( 454 | https://cr.yp.to/djbdns/axfr-notes.html) 455 | 456 | 457 | openmonit 458 | --------- 459 | 460 | Check for Monit web interface with default username and password. 461 | 462 | * [Monit: Configuration file with default username/password combination (admin/monit)]( 463 | https://bitbucket.org/tildeslash/monit/issues/881/configuration-file-with-default-username) 464 | 465 | 466 | openelasticsearch 467 | ----------------- 468 | 469 | Check for open Elasticsearch instances with admin:admin (default password from Open 470 | Distro for Elasticsearch). 471 | 472 | 473 | Info tests 474 | ========== 475 | 476 | These tests are enabled with the "-i" parameter. They output information about a site 477 | that may be valuable for analysis, but does not directly indicate a security problem. 478 | 479 | 480 | drupal 481 | ------ 482 | 483 | Checks for the presence of the Drupal CMS and outputs the version. 484 | 485 | 486 | wordpress 487 | --------- 488 | 489 | Check for the presence of Wordpress and output version. 490 | 491 | 492 | mailman 493 | ------- 494 | 495 | Check for mailman and output version. 496 | 497 | 498 | django_staticfiles_json 499 | ----------------------- 500 | 501 | Checks for the presence of a Django assets manifest file [`staticfiles.json` 502 | ](https://docs.djangoproject.com/en/4.2/ref/contrib/staticfiles/). 503 | 504 | 505 | composer 506 | -------- 507 | 508 | Check for composer.json/composer.lock files. Can be checked with the [Symfony security 509 | check](https://symfony.com/doc/current/setup.html#security-checker) afterwards. 510 | 511 | 512 | phpinfo 513 | ------- 514 | 515 | Check for the presence of a phpinfo() script. 516 | -------------------------------------------------------------------------------- /snallygaster: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-License-Identifier: 0BSD 3 | 4 | # Dependencies: 5 | # * Python 3 6 | # * urllib3 (for HTTP requests with Keep Alive support) 7 | # * lxml (for HTML parsing) 8 | # * dnspython (for DNS queries) 9 | 10 | 11 | import argparse 12 | import functools 13 | import json 14 | import random 15 | import re 16 | import socket 17 | import ssl 18 | import string 19 | import sys 20 | import urllib.parse 21 | 22 | import dns.query 23 | import dns.resolver 24 | import dns.zone 25 | import lxml.etree # noqa: DUO107 26 | import lxml.html # noqa: DUO107 27 | import urllib3 28 | 29 | STANDARD_PHP_FILES = ["index.php", "wp-config.php", "configuration.php", 30 | "config.php", "config.inc.php", "settings.php"] 31 | 32 | 33 | # initializing global state variables 34 | duplicate_preventer = [] 35 | mainpage_cache = {} 36 | dns_cache = {} 37 | 38 | # This disables warnings about the lack of certificate verification. 39 | # Usually this is a bad idea, but for this tool we want to find HTTPS leaks 40 | # even if they are shipped with invalid certificates. 41 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) # noqa: DUO131 42 | 43 | 44 | def DEFAULT(f): 45 | f._is_default_test = True 46 | return f 47 | 48 | 49 | def INFO(f): 50 | f._is_info_test = True 51 | return f 52 | 53 | 54 | def HOSTNAME(f): 55 | f._is_hostname_test = True 56 | return f 57 | 58 | 59 | def pdebug(msg): 60 | if args.debug: 61 | print(f"[[debug]] {msg}") 62 | 63 | 64 | def pout(cause, url, misc="", noisymsg=False): 65 | if noisymsg and not args.noisy: 66 | return 67 | # we're storing URL without protocol/www-prefix and cause to avoid 68 | # duplicates on same host 69 | dup_check = cause + "__" + re.sub(r"http[s]?://(www\.)?", "", url) + misc 70 | if dup_check not in duplicate_preventer: 71 | duplicate_preventer.append(dup_check) 72 | if args.json: 73 | json_out.append({"cause": cause, "url": url, "misc": misc}) 74 | elif misc: 75 | print(f"[{cause}] {url} {misc}") 76 | else: 77 | print(f"[{cause}] {url}") 78 | 79 | 80 | def randstring(): 81 | return "".join(random.SystemRandom().choice(string.ascii_lowercase) for i in range(8)) 82 | 83 | 84 | # a random string that stays the same during one execution of snallygaster, 85 | # makes duplicate detection easier. 86 | @functools.cache 87 | def staticrandstring(): 88 | return "".join(random.SystemRandom().choice(string.ascii_lowercase) for i in range(8)) 89 | 90 | 91 | def escape(msg): 92 | return repr(msg)[1:-1] 93 | 94 | 95 | def fetcher(fullurl, binary=False, getredir=False, geterrpage=False): 96 | data = "" 97 | redir = "" 98 | try: 99 | r = pool.request("GET", fullurl, retries=False, redirect=False) 100 | if getredir: 101 | headers = {k.lower(): v for k, v in r.headers.items()} 102 | redir = headers.get("location", "") 103 | elif (r.status != 200 and not geterrpage): 104 | data = "" 105 | elif binary: 106 | data = r.data 107 | else: 108 | data = r.data.decode("ascii", errors="ignore") 109 | except (urllib3.exceptions.HTTPError, UnicodeError, 110 | ConnectionRefusedError): 111 | data = "" 112 | if getredir: 113 | return data, redir 114 | return data 115 | 116 | 117 | def fetchpartial(fullurl, size, returnsize=False, binary=False): 118 | try: 119 | r = pool.request("GET", fullurl, retries=False, redirect=False, 120 | preload_content=False) 121 | if r.status == 200: 122 | ret = r.read(size) 123 | if binary: 124 | rv = ret 125 | else: 126 | rv = ret.decode("ascii", errors="ignore") 127 | if returnsize: 128 | size = r.headers.getlist("content-length") 129 | if size == []: 130 | size = 0 131 | else: 132 | size = int(size[0]) 133 | r.release_conn() 134 | return (rv, size) 135 | r.release_conn() 136 | return rv 137 | r.release_conn() 138 | except (urllib3.exceptions.HTTPError, UnicodeError, 139 | ConnectionRefusedError): 140 | pass 141 | if returnsize: 142 | return ("", 0) 143 | return "" 144 | 145 | 146 | @functools.cache 147 | def check404(url): 148 | rndurl = url + "/" + staticrandstring() + ".htm" 149 | 150 | pdebug(f"Checking 404 page state of {rndurl}") 151 | try: 152 | r = pool.request("GET", rndurl, retries=False, redirect=False) 153 | except (urllib3.exceptions.HTTPError, UnicodeError, 154 | ConnectionRefusedError): 155 | return False 156 | what404 = {} 157 | what404["rndurl"] = rndurl 158 | what404["state"] = r.status != 200 159 | what404["content"] = r.data.decode("ascii", errors="ignore") 160 | if any(m in what404["content"] for m in ["" in r: 280 | pout("apache_server_info", url + "/server-info") 281 | 282 | 283 | @DEFAULT 284 | def test_coredump(url): 285 | r = fetchpartial(url + "/core", 20, binary=True) 286 | if r and r[0:4] == b"\x7fELF": 287 | pout("coredump", url + "/core") 288 | 289 | 290 | @DEFAULT 291 | def test_sftp_config(url): 292 | r = fetcher(url + "/sftp-config.json") 293 | if '"type":' in r and "ftp" in r and '"save_before_upload"' in r: 294 | pout("sftp_config", url + "/sftp-config.json") 295 | 296 | 297 | @DEFAULT 298 | def test_wsftp_ini(url): 299 | for fn in ["WS_FTP.ini", "ws_ftp.ini", "WS_FTP.INI"]: 300 | r = fetcher(url + "/" + fn) 301 | if "[_config_]" in r: 302 | pout("wsftp_ini", url + "/" + fn) 303 | 304 | 305 | @DEFAULT 306 | def test_filezilla_xml(url): 307 | for fn in ["filezilla.xml", "sitemanager.xml", "FileZilla.xml"]: 308 | r = fetcher(url + "/" + fn) 309 | if " 10000000: 439 | pout("xaa", url + "/xaa") 440 | return 441 | # Check for signs of common compression formats (gz, bzip2, xz, zstd, zip). 442 | if (r[0:3] == b"\x1f\x8b\x08" 443 | or r[0:3] in [b"BZh", b"BZ0"] 444 | or r[0:6] == b"\xFD7zXZ\x00" 445 | or r[0:4] in [b"\x28\xB5\x2F\xFD", b"PK\x03\x04"]): 446 | pout("xaa", url + "/xaa") 447 | 448 | 449 | @DEFAULT 450 | def test_optionsbleed(url): 451 | try: 452 | r = pool.request("OPTIONS", url, retries=False, redirect=False) 453 | except (ConnectionRefusedError, urllib3.exceptions.HTTPError, 454 | UnicodeError): 455 | return 456 | try: 457 | allow = str(r.headers["Allow"]) 458 | except KeyError: 459 | return 460 | if allow == "": 461 | pout("options_empty", url, noisymsg=True) 462 | return 463 | 464 | # catch some obvious cases first 465 | if (",," in allow) or (allow[0] == ",") or (allow[-1] == ","): 466 | pout("optionsbleed", url, escape(allow)) 467 | return 468 | 469 | z = [x.strip() for x in allow.split(",")] 470 | if re.match("^[a-zA-Z-]*$", "".join(z)): 471 | if len(z) > len(set(z)): 472 | pout("options_duplicates", url, escape(allow), noisymsg=True) 473 | return 474 | if re.match("^[a-zA-Z- ]*$", allow): 475 | pout("options_spaces", url, escape(allow)) 476 | return 477 | pout("optionsbleed", url, escape(allow)) 478 | 479 | 480 | @DEFAULT 481 | def test_privatekey(url): 482 | hostkey = re.sub("^www.", "", re.sub("(.*//|/.*)", "", url)) + ".key" 483 | wwwkey = "www." + hostkey 484 | for fn in ["server.key", "privatekey.key", "myserver.key", "key.pem", 485 | hostkey, wwwkey]: 486 | r = fetcher(url + "/" + fn) 487 | if "BEGIN PRIVATE KEY" in r: 488 | pout("privatekey_pkcs8", f"{url}/{fn}") 489 | if "BEGIN RSA PRIVATE KEY" in r: 490 | pout("privatekey_rsa", f"{url}/{fn}") 491 | if "BEGIN DSA PRIVATE KEY" in r: 492 | pout("privatekey_dsa", f"{url}/{fn}") 493 | if "BEGIN EC PRIVATE KEY" in r: 494 | pout("privatekey_ec", f"{url}/{fn}") 495 | 496 | 497 | @DEFAULT 498 | def test_sshkey(url): 499 | for fn in ["id_rsa", "id_dsa", ".ssh/id_rsa", ".ssh/id_dsa"]: 500 | r = fetcher(url + "/" + fn) 501 | if "BEGIN" in r and "PRIVATE KEY" in r: 502 | pout("sshkey", f"{url}/{fn}") 503 | 504 | 505 | @DEFAULT 506 | def test_dotenv(url): 507 | r = fetcher(url + "/.env") 508 | if "APP_ENV=" in r or "DB_PASSWORD=" in r: 509 | pout("dotenv", url + "/.env") 510 | 511 | 512 | @DEFAULT 513 | def test_invalidsrc(url): 514 | r = getmainpage(url) 515 | try: 516 | p = lxml.html.document_fromstring(r.encode()) 517 | except lxml.etree.ParserError: 518 | return 519 | 520 | g = p.xpath("//*[@src]/@src") 521 | srcs = sorted(set(g)) 522 | 523 | checkeddomains = [] 524 | for src in srcs: 525 | try: 526 | realurl = urllib.parse.urljoin(url, src) 527 | domain = urllib.parse.urlparse(realurl).hostname 528 | protocol = urllib.parse.urlparse(realurl).scheme 529 | except ValueError: 530 | pout("invalidsrc_brokenurl", escape(url), src) 531 | continue 532 | if domain is None: 533 | continue 534 | if protocol not in ["https", "http"]: 535 | continue 536 | 537 | # We avoid double-checking multiple requests to the same host. 538 | # This is a compromise between speed and in-depth scanning. 539 | if domain in checkeddomains: 540 | continue 541 | checkeddomains.append(domain) 542 | pdebug(f"Checking url {realurl}") 543 | 544 | if dnscache(domain) is None: 545 | pout("invalidsrc_dns", url, escape(src)) 546 | continue 547 | 548 | try: 549 | r = pool.request("GET", realurl, retries=False, redirect=False) 550 | if r.status >= 400: 551 | pout("invalidsrc_http", url, escape(src)) 552 | except UnicodeEncodeError: 553 | pass 554 | except (ConnectionRefusedError, ConnectionResetError, 555 | urllib3.exceptions.HTTPError): 556 | pout("invalidsrc_http_connfail", url, escape(src)) 557 | 558 | 559 | @DEFAULT 560 | def test_ilias_defaultpw(url): 561 | getmainpage(url) 562 | if (url + "/ilias.php" in mainpage_cache[url]["location"] 563 | or (url + "/login.php" in mainpage_cache[url]["location"] 564 | and "powered by ILIAS" in fetcher(mainpage_cache[url]["location"]))): 565 | # we're confident we found an ILIAS installation 566 | pdebug("Ilias found") 567 | try: 568 | login = pool.request("POST", url + "/ilias.php?cmd=post&baseClass=ilStartUpGUI", 569 | fields={"username": "root", 570 | "password": "homer", 571 | "cmd[doStandardAuthentication]": "Login"}, 572 | headers={"Cookie": "iltest=;PHPSESSID=" + randstring()}) 573 | data = login.data.decode("ascii", errors="ignore") 574 | if (('class="ilFailureMessage"' not in data) 575 | and ('name="il_message_focus"' not in data) 576 | and (('class="ilBlockContent"' in data) 577 | or ('class="ilAdminRow"' in data))): 578 | pout("ilias_defaultpw", url, "root/homer") 579 | except (ConnectionRefusedError, ConnectionResetError, 580 | urllib3.exceptions.HTTPError): 581 | pass 582 | 583 | 584 | @DEFAULT 585 | def test_cgiecho(url): 586 | for pre in [url + "/cgi-bin/cgiecho", url + "/cgi-sys/cgiecho"]: 587 | try: 588 | r = pool.request("GET", pre + "/" + randstring()) 589 | if r.status == 500 and "

cgiemail" in r.data.decode("ascii", errors="ignore"): 590 | pout("cgiecho", pre) 591 | except (ConnectionRefusedError, ConnectionResetError, 592 | urllib3.exceptions.HTTPError): 593 | pass 594 | 595 | 596 | @DEFAULT 597 | def test_phpunit_eval(url): 598 | try: 599 | r = pool.request("POST", url + "/vendor/phpunit/phpunit/src/Util/PHP/eval-stdin.php", 600 | body='" + reflect, 614 | retries=False, redirect=False) 615 | if not r.data.decode("ascii", errors="ignore").startswith("" + reflect): 616 | return 617 | headers = {k.lower(): v for k, v in r.headers.items()} 618 | if ("content-type" in headers) and headers["content-type"].startswith("text/plain"): 619 | return 620 | pout("acmereflect", url + "/.well-known/acme-challenge/reflect") 621 | 622 | except (urllib3.exceptions.HTTPError, UnicodeError, 623 | ConnectionRefusedError): 624 | return 625 | 626 | 627 | @DEFAULT 628 | def test_drupaldb(url): 629 | r = fetchpartial(url + "/sites/default/files/.ht.sqlite", 20, binary=True) 630 | if r and r[0:13] == b"SQLite format": 631 | pout("drupaldb", url + "/sites/default/files/.ht.sqlite") 632 | 633 | 634 | @DEFAULT 635 | def test_phpwarnings(url): 636 | try: 637 | r = pool.request("GET", url, headers={"Cookie": "PHPSESSID=in_vålíd"}) 638 | if ("The session id is too long or contains illegal characters" 639 | in r.data.decode("ascii", errors="ignore")): 640 | pout("phpwarnings", url) 641 | except (urllib3.exceptions.HTTPError, UnicodeError, 642 | ConnectionRefusedError): 643 | pass 644 | 645 | 646 | @DEFAULT 647 | def test_adminer(url): 648 | r = fetcher(url + "/adminer.php") 649 | if "adminer.org" in r: 650 | pout("adminer", url + "/adminer.php") 651 | 652 | 653 | @DEFAULT 654 | def test_elmah(url): 655 | r = fetcher(url + "/elmah.axd") 656 | if "Error Log for" in r: 657 | pout("elmah", url + "/elmah.axd") 658 | r = fetcher(url + "/scripts/elmah.axd") 659 | if "Error Log for" in r: 660 | pout("elmah", url + "/scripts/elmah.axd") 661 | 662 | 663 | @DEFAULT 664 | def test_citrix_rce(url): 665 | try: 666 | r = pool.request("GET", url + "/vpn/../vpns/portal/tips.html", 667 | retries=False, redirect=False, 668 | headers={"NSC_USER": "x", "NSC_NONCE": "x"}) 669 | if '

' in r.data.decode("ascii", errors="ignore"): 670 | pout("citrix_rce", url + "/vpn/../vpns/portal/tips.html") 671 | except (urllib3.exceptions.HTTPError, UnicodeError, 672 | ConnectionRefusedError): 673 | pass 674 | 675 | 676 | @DEFAULT 677 | def test_installer(url): 678 | r = getmainpage(url) 679 | if (mainpage_cache[url]["location"].endswith("wp-admin/setup-config.php") 680 | or 'href="wp-admin/css/install.css"' in r): 681 | pout("installer_wordpress", url) 682 | elif mainpage_cache[url]["location"].endswith("installation/index.php"): 683 | pout("installer_joomla", url) 684 | elif mainpage_cache[url]["location"].endswith("typo3/install.php"): 685 | pout("installer_typo3", url) 686 | elif mainpage_cache[url]["location"].endswith("install.php"): 687 | pout("installer_drupal", url) 688 | elif mainpage_cache[url]["location"].endswith("serendipity_admin.php"): 689 | pout("installer_s9y", url) 690 | elif "LocalSettings.php not found" in r: 691 | pout("installer_mediawiki", url) 692 | elif "8 easy steps and will take around 5 minutes" in r: 693 | pout("installer_matomo", url) 694 | elif "Create an admin account" in r: 695 | pout("installer_nextcloud", url) 696 | 697 | 698 | @DEFAULT 699 | def test_wpsubdir(url): 700 | r, redir = fetcher(url + "/wordpress/", getredir=True) 701 | if (redir.endswith("wp-admin/setup-config.php") 702 | or 'href="wp-admin/css/install.css"' in r): 703 | pout("wpsubdir", url + "/wordpress/") 704 | 705 | 706 | @DEFAULT 707 | def test_telescope(url): 708 | r = fetcher(url + "/telescope", geterrpage=True) 709 | if "Laravel Telescope" in r: 710 | pout("telescope", url + "/telescope") 711 | elif "The Telescope assets are not published" in r: 712 | pout("telescope_inactive", url + "/telescope") 713 | 714 | 715 | @DEFAULT 716 | def test_vb_test(url): 717 | r = fetcher(url + "/vb_test.php") 718 | if "vBulletin Test Script" in r: 719 | pout("vb_test", url + "/vb_test.php") 720 | 721 | 722 | @DEFAULT 723 | def test_headerinject(url): 724 | rnd = randstring() 725 | try: 726 | r = pool.request("GET", f"{url}/%0D%0A{rnd}:1", retries=False, redirect=False) 727 | if rnd in r.headers: 728 | pout("headerinject", f"{url}/%0D%0A{rnd}:1") 729 | except (urllib3.exceptions.HTTPError, UnicodeError, 730 | ConnectionRefusedError): 731 | pass 732 | 733 | 734 | @DEFAULT 735 | def test_wpdebug(url): 736 | r = fetcher(url + "/wp-content/debug.log") 737 | if re.match(r"^\[\d\d-\w\w\w-\d\d\d\d ", r): 738 | pout("wpdebug", url + "/wp-content/debug.log") 739 | 740 | 741 | @DEFAULT 742 | def test_thumbsdb(url): 743 | r = fetcher(url + "/Thumbs.db", binary=True) 744 | if r and r[0:8] == b"\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1": 745 | pout("thumbsdb", url + "/Thumbs.db") 746 | 747 | 748 | @DEFAULT 749 | def test_duplicator(url): 750 | for fn in ["installer.php", "installer-backup.php"]: 751 | r = fetcher(f"{url}/{fn}") 752 | if "/dup-installer/main.installer.php" in r: 753 | pout("duplicator", f"{url}/{fn}") 754 | for fn in ["backups-dup-pro", "backups-dup-lite"]: 755 | r = fetcher(f"{url}/wp-content/{fn}/") 756 | if ">Index of /" in r: 757 | pout("duplicator_dirlisting", f"{url}/wp-content/{fn}/") 758 | 759 | 760 | @DEFAULT 761 | def test_desktopini(url): 762 | r = fetcher(url + "/desktop.ini") 763 | if "[\x00.\x00S\x00h\x00e\x00l\x00l\x00C\x00l\x00a\x00s\x00s" in r: 764 | pout("desktopini", url + "/desktop.ini") 765 | r = fetcher(url + "/Desktop.ini") 766 | if "[\x00.\x00S\x00h\x00e\x00l\x00l\x00C\x00l\x00a\x00s\x00s" in r: 767 | pout("desktopini", url + "/Desktop.ini") 768 | 769 | 770 | @DEFAULT 771 | def test_postdebug(url): 772 | try: 773 | r = pool.request("POST", url, retries=False, redirect=False) 774 | data = r.data.decode("ascii", errors="ignore") 775 | if ( 776 | "The POST method is not supported for" in data 777 | and "Symfony\\Component\\HttpKernel\\Exception" in data 778 | ): 779 | pout("postdebug_laravel", url + " POST") 780 | elif "Symfony Exception" in data and '<div class="exception-' in data: 781 | pout("postdebug_symfony", url + " POST") 782 | elif "<title>Action Controller: Exception caught" in data: 783 | pout("postdebug_rails", url + " POST") 784 | except (urllib3.exceptions.HTTPError, UnicodeError, ConnectionRefusedError): 785 | pass 786 | 787 | 788 | @DEFAULT 789 | def test_djangodebug(url): 790 | what404 = check404(url) 791 | if what404 and "you have <code>DEBUG = True</code>" in what404["content"]: 792 | pout("djangodebug", what404["rndurl"]) 793 | 794 | 795 | @DEFAULT 796 | def test_symfonydebug(url): 797 | what404 = check404(url) 798 | if what404 and "<title>No route found" in what404["content"] and \ 799 | "Symfony Exception" in what404["content"]: 800 | pout("symfonydebug", what404["rndurl"]) 801 | 802 | 803 | @DEFAULT 804 | @HOSTNAME 805 | def test_axfr(qhost): 806 | try: 807 | if "resolve" in dir(dns.resolver): 808 | ns = dns.resolver.resolve(qhost, "NS") 809 | else: # dnspython before 2.0 810 | ns = dns.resolver.query(qhost, "NS") 811 | except (dns.exception.DNSException, dns.exception.Timeout, 812 | ConnectionResetError, ConnectionRefusedError, 813 | EOFError, socket.gaierror, TimeoutError, OSError): 814 | return 815 | for rr in ns.rrset: 816 | r = str(rr) 817 | ipv4 = [] 818 | ipv6 = [] 819 | try: 820 | if "resolve" in dir(dns.resolver): 821 | ipv4 = dns.resolver.resolve(r, "a").rrset 822 | ipv6 = dns.resolver.resolve(r, "aaaa").rrset 823 | else: # dnspython before 2.0 824 | ipv4 = dns.resolver.query(r, "a").rrset 825 | ipv6 = dns.resolver.query(r, "aaaa").rrset 826 | except (dns.resolver.NoAnswer, dns.resolver.NXDOMAIN, 827 | dns.resolver.LifetimeTimeout): 828 | pass 829 | ips = [] 830 | for ip in ipv4: 831 | ips.append(str(ip)) 832 | for ip in ipv6: 833 | ips.append(str(ip)) 834 | for ip in ips: 835 | try: 836 | axfr = dns.zone.from_xfr(dns.query.xfr(ip, qhost)) 837 | if axfr: 838 | pout("axfr", qhost, r) 839 | except (dns.exception.DNSException, dns.exception.Timeout, 840 | ConnectionResetError, ConnectionRefusedError, 841 | EOFError, socket.gaierror, TimeoutError, OSError): 842 | pass 843 | 844 | 845 | @DEFAULT 846 | @HOSTNAME 847 | def test_openmonit(qhost): 848 | url = f"http://{qhost}:2812/" 849 | headers = urllib3.util.make_headers(basic_auth="admin:monit") 850 | try: 851 | r = pool.request("GET", url, headers=headers) 852 | if "<title>Monit:" in r.data.decode("ascii", errors="ignore"): 853 | pout("openmonit", url) 854 | except (urllib3.exceptions.HTTPError, UnicodeError, 855 | ConnectionRefusedError): 856 | pass 857 | 858 | 859 | @DEFAULT 860 | @HOSTNAME 861 | def test_openelasticsearch(qhost): 862 | headers = urllib3.util.make_headers(basic_auth="admin:admin") 863 | try: 864 | r = pool.request("GET", f"http://{qhost}:9200", headers=headers) 865 | if '"cluster_name" :' in r.data.decode("ascii", errors="ignore"): 866 | pout("openelasticsearch", f"http://{qhost}:9200") 867 | except (urllib3.exceptions.HTTPError, UnicodeError, 868 | ConnectionRefusedError): 869 | pass 870 | try: 871 | r = pool.request("GET", f"https://{qhost}:9200", headers=headers) 872 | if '"cluster_name" :' in r.data.decode("ascii", errors="ignore"): 873 | pout("openelasticsearch", f"https://{qhost}:9200") 874 | except (urllib3.exceptions.HTTPError, UnicodeError, 875 | ConnectionRefusedError): 876 | pass 877 | 878 | 879 | @INFO 880 | def test_drupal(url): 881 | r = fetcher(url + "/CHANGELOG.txt") 882 | try: 883 | if r != "": 884 | result = re.findall("Drupal [0-9.]*", r) 885 | version = result[0][7:] 886 | pout("drupal", url, version) 887 | r = fetcher(url + "/core/CHANGELOG.txt") 888 | if r != "": 889 | result = re.findall("Drupal [0-9.]*", r) 890 | version = result[0][7:] 891 | pout("drupal", url, version) 892 | except IndexError: 893 | pass 894 | 895 | 896 | @INFO 897 | def test_wordpress(url): 898 | r = getmainpage(url) 899 | try: 900 | p = lxml.html.document_fromstring(r.encode()) 901 | except lxml.etree.ParserError: 902 | return 903 | g = p.xpath("//meta[@name='generator']/@content") 904 | if g and g[0].startswith("WordPress "): 905 | version = g[0].split(" ", 1)[1] 906 | if set(version).issubset("0123456789."): 907 | pout("wordpress", url, version) 908 | 909 | 910 | @INFO 911 | def test_mailman(url): 912 | murl = f"{url}/mailman/listinfo" 913 | r = fetcher(murl) 914 | if "Delivered by Mailman" in r: 915 | ver = re.findall("version ([0-9.]+)", r) 916 | if len(ver) > 0: 917 | ver = ver[0] 918 | else: 919 | ver = "unknown" 920 | if "There currently are no publicly-advertised" in r: 921 | pout("mailman_unused", f"{murl} {ver}") 922 | else: 923 | pout("mailman", f"{murl} {ver}") 924 | 925 | 926 | @INFO 927 | def test_django_staticfiles_json(url): 928 | furl = url + "/static/staticfiles.json" 929 | data = fetcher(furl) 930 | try: 931 | parsed = json.loads(data) 932 | except json.JSONDecodeError: 933 | pass 934 | else: 935 | if isinstance(parsed, dict) and "paths" in parsed: 936 | pout("django_staticfiles_json", furl) 937 | 938 | 939 | @INFO 940 | def test_composer(url): 941 | for c in ["composer.json", "composer.lock"]: 942 | furl = url + "/" + c 943 | r = fetcher(furl) 944 | if '"require":' in r or '"packages":' in r: 945 | pout("composer", furl) 946 | 947 | 948 | @INFO 949 | def test_phpinfo(url): 950 | for fn in ["phpinfo.php", "info.php", "i.php", "test.php"]: 951 | r = fetcher(url + "/" + fn) 952 | if "phpinfo()" in r: 953 | pout("phpinfo", url + "/" + fn) 954 | 955 | 956 | def new_excepthook(etype, value, traceback): 957 | if etype is KeyboardInterrupt: 958 | pdebug("Interrupted by user...") 959 | sys.exit(1) 960 | 961 | print("Oh oh... an unhandled exception has happened. This shouldn't be.") 962 | print("Please report a bug and include all output.") 963 | print() 964 | print("called with") 965 | print(" ".join(sys.argv)) 966 | print() 967 | sys.__excepthook__(etype, value, traceback) 968 | 969 | 970 | sys.excepthook = new_excepthook 971 | 972 | 973 | parser = argparse.ArgumentParser() 974 | parser.add_argument("hosts", nargs="+", help="hostname to scan") 975 | parser.add_argument("-t", "--tests", help="Comma-separated tests to run.") 976 | parser.add_argument("--useragent", help="User agent to send") 977 | parser.add_argument("--nowww", action="store_true", 978 | help="Skip scanning www.[host]") 979 | parser.add_argument("--nohttp", action="store_true", 980 | help="Don't scan http") 981 | parser.add_argument("--nohttps", action="store_true", 982 | help="Don't scan https") 983 | parser.add_argument("-i", "--info", action="store_true", 984 | help="Enable all info tests (no bugs/security vulnerabilities)") 985 | parser.add_argument("-n", "--noisy", action="store_true", 986 | help="Show noisy messages that indicate boring bugs, but no security issue") 987 | parser.add_argument("-p", "--path", default="", action="store", type=str, 988 | help="Base path on server (scans root dir by default)") 989 | parser.add_argument("-j", "--json", action="store_true", 990 | help="Produce JSON output") 991 | parser.add_argument("-d", "--debug", action="store_true", 992 | help="Show detailed debugging info") 993 | args = parser.parse_args() 994 | 995 | # Initializing global pool manager 996 | user_agent = {"user-agent": "Mozilla/5.0 (X11; Linux x86_64; rv:53.0) Gecko/20100101 Firefox/53.0"} 997 | if args.useragent: 998 | user_agent = {"user-agent": args.useragent} 999 | urllib3_major = int(urllib3.__version__.split(".", maxsplit=1)[0]) 1000 | if urllib3_major >= 2: 1001 | pool = urllib3.PoolManager(10, headers=user_agent, cert_reqs="CERT_NONE", # noqa: DUO132 1002 | retries=False, timeout=2, ssl_minimum_version=ssl.TLSVersion.SSLv3) 1003 | else: 1004 | pool = urllib3.PoolManager(10, headers=user_agent, cert_reqs="CERT_NONE", # noqa: DUO132 1005 | retries=False, timeout=2) 1006 | 1007 | # This is necessary for directory traversal attacks like citrix_cve 1008 | urllib3.util.url.NORMALIZABLE_SCHEMES = () 1009 | 1010 | if args.tests is None: 1011 | tests = [g for f, g in locals().items() if hasattr(g, "_is_default_test")] 1012 | else: 1013 | tests = [] 1014 | try: 1015 | for x in args.tests.split(","): 1016 | tests.append(locals()["test_" + x]) 1017 | except KeyError: 1018 | print(f"Test {x} does not exist") 1019 | sys.exit(1) 1020 | 1021 | if args.info: 1022 | tests += [g for f, g in locals().items() if hasattr(g, "_is_info_test")] 1023 | 1024 | path = args.path.rstrip("/") 1025 | if path != "" and path[0] != "/": 1026 | path = "/" + path 1027 | if path != "": 1028 | pdebug(f"Path: {path}") 1029 | 1030 | hosts = list(args.hosts) 1031 | if not args.nowww: 1032 | for h in args.hosts: 1033 | hosts.append("www." + h) 1034 | 1035 | for i, h in enumerate(hosts): 1036 | if h.startswith(("http://", "https://")): 1037 | print("ERROR: Please run snallygaster with a hostname, not a URL.") 1038 | sys.exit(1) 1039 | try: 1040 | hosts[i] = h.encode("idna").decode("ascii") 1041 | except UnicodeError: 1042 | print("ERROR: Invalid hostname") 1043 | sys.exit(1) 1044 | if h != hosts[i]: 1045 | pdebug(f"Converted {h} to {hosts[i]}") 1046 | 1047 | pdebug(f"All hosts: {','.join(hosts)}") 1048 | 1049 | 1050 | json_out = [] 1051 | for host in hosts: 1052 | pdebug(f"Scanning {host}") 1053 | for test in tests: 1054 | pdebug(f"Running {test.__name__} test") 1055 | if hasattr(test, "_is_hostname_test"): 1056 | test(host) 1057 | else: 1058 | if not args.nohttp: 1059 | test("http://" + host + path) 1060 | if not args.nohttps: 1061 | test("https://" + host + path) 1062 | 1063 | # clear all sockets 1064 | pool.clear() 1065 | 1066 | if args.json: 1067 | print(json.dumps(json_out)) 1068 | --------------------------------------------------------------------------------