├── .gitignore ├── LICENSE ├── README.md ├── clean.sh ├── keywords.txt ├── ldaphelper.py ├── persistence.py ├── reg_gen.py ├── regulars.txt ├── requirements.txt ├── smbat.py ├── smbsr.py └── worker.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.log 2 | *.db 3 | *.csv 4 | __pycache__/ 5 | uncpaths.txt 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SMBAT - Finally the AIO SMB Tool 2 | SMBAT merges the features implemented for SMBSR (find secrets in shares) and RSMBI (assess the RW permission that a user has among all the target shares). 3 | Using the **-mode** parameter it is possible to run SMBAT as SMBSR,RSMBI or as full power SMBSR/RSMBI. 4 | As its "parents", this tool works in two phases: 5 | 6 | * **Enumeration**: basing on the target (CIDR, Computer Objects from AD, IP list, ...), SMBAT uses the provided credentials to enumerate the available shares and build a dictionary of target (server:list(shares)). For this pysmb library is used 7 | * **Action**: Basing on the **-mode** parameter SMBAT starts to carry out its main duties, during this phases the SMB shares are mounted in a temp folder and accessed "locally", finally those are unmounted and deleted. 8 | 9 | Results are saved in a sqlite database but also exported in CSV. 10 | 11 | ## SMBSR Brain 12 | 13 | SMBSR considers someting interesting basing on its: 14 | 15 | * Content 16 | * Exstension 17 | * Name 18 | 19 | The interesting keywords the tool should look for are defined via the command line as well as: 20 | 21 | * File extension blacklist 22 | * Shares blacklist 23 | * Folder blacklist (Watch out, also subfolders are gone) 24 | * Number of Threads 25 | * Should i masscan or not? 26 | * Interesting file extensions (I guess something like ppk, kdbx, ...) 27 | * Maximum file size (Bytes) allowed to be checked (Believe me, too big might take some time) 28 | * Should i export the results in two nice CSV files? 29 | * How deep should i look into subfolders? 30 | * Wordlist of regular expression to match 31 | * Domain Controller IP for ldap bind 32 | * Other common ones and required 33 | 34 | The database containes one table for all the matches called smbsr, made of the following columns: 35 | 36 | * file 37 | * share 38 | * ip 39 | * position 40 | * matchedWith 41 | * Creation Date 42 | * Last Modified Date 43 | * Last Accessed Date 44 | * First Time found date 45 | * Last Time Found Date 46 | * runTag of the session 47 | * Extract of the text matched (25 chars before and after the interesting match) 48 | * Clickable finding to manually check the result 49 | 50 | And also another table for the interesting file list containing the following columns: 51 | 52 | * file 53 | * share 54 | * ip 55 | * Creation Date 56 | * Last Modified Date 57 | * Last Accessed Date 58 | * First Time found date 59 | * Last Time Found Date 60 | * runTag of the session 61 | * Clickable finding to manually check the result 62 | 63 | ### File Supported 64 | 65 | SMBSR learned how to read: 66 | 67 | * .csv via python builtins 68 | * .doc via antiword 69 | * .docx via python-docx2txt 70 | * .eml via python builtins 71 | * .epub via ebooklib 72 | * .gif via tesseract-ocr 73 | * .jpg and .jpeg via tesseract-ocr 74 | * .json via python builtins 75 | * .html and .htm via beautifulsoup4 76 | * .mp3 via sox, SpeechRecognition, and pocketsphinx 77 | * .msg via msg-extractor 78 | * .odt via python builtins 79 | * .ogg via sox, SpeechRecognition, and pocketsphinx 80 | * .pdf via pdftotext (default) or pdfminer* .six 81 | * .png via tesseract-ocr 82 | * .pptx via python-pptx 83 | * .ps via ps2text 84 | * .rtf via unrtf 85 | * .tiff and .tif via tesseract-ocr 86 | * .txt via python builtins 87 | * .wav via SpeechRecognition and pocketsphinx 88 | * .xlsx via xlrd 89 | * .xls via xlrd 90 | 91 | ### reg_gen.py 92 | 93 | As the last update SMBSR has been granted with the power of looking for secrets that match a given regular expression (see regulars.txt file containing some good examples to 94 | to match). Given this new super power i have also implemented a new script which given a wordlist it generates a list of regular expression which match the password patterns 95 | it found into the wordlist. Before printing out everything the list of regular expression is (sort -u)-ed. The script can be optimized in case the pattern presents for example 96 | two or more ascii_lower in a row, but it's not like that now. 97 | 98 | ## RSMBI Brain 99 | 100 | RSMBI is a python tool that answers to the question: What are the writable shares in this big domain? 101 | RSMBI connect to each target and it mounts the available shares in the /tmp folder (but that can also be changed). Once the shares are successfully mounted, the threads (or the 102 | solo one) would start (os.)walking recursively all the folders, trying get a file handle with writing rights. If the handle is obtained successfully the UNC path of that file 103 | is saved within the database (this time also in a clickable format). Once a share is fully analyzed, the folder is unmounted (gracefully or lazily). 104 | Results are saved in a sqlite database and also exported in a nice CSV. 105 | 106 | Accepted input targets are: 107 | 108 | * UNC patchs 109 | * CIDR 110 | * IP address(es) 111 | * Computer Objects from LDAP, RSMBI retrieves that for you 112 | 113 | The -username and -password passed via the command line are used by RSMBI to enumerate shares using pysmb and for retrieving the list of computer objects from Active Directory 114 | via LDAP protocol. 115 | The content of the smbcreds file (needed for the mount) must be as following: 116 | 117 | ``` 118 | username=ob 119 | password=ciaogrande 120 | domain=ciao.grande 121 | ``` 122 | 123 | ## Usage 124 | 125 | For instance from the project folder: 126 | 127 | ```bash 128 | sudo python3 smbat.py -username $username -password $password -domain ciaogrande.local -smbcreds /tmp/smbcreds -csv -debug -mode both -wordlist keywords.txt -uncpaths uncpaths.txt -file-interesting ppk,kdbx,pfx -multithread -T 30 129 | 130 | ``` 131 | 132 | Help message also contains some guidelines: 133 | 134 | ```text 135 | usage: smbat.py [-h] [-username USERNAME] [-password PASSWORD] [-domain DOMAIN] [-fake-hostname FAKE_HOSTNAME] [-multithread] [-logfile LOGFILE] [-dbfile DBFILE] 136 | [-share-black SHARE_BLACK] [-local-path LOCAL_PATH] [-debug] [-target TARGET] [-target-list TARGET_LIST] [-tag TAG] [-ldap] [-dc-ip DC_IP] [-T T] 137 | [-masscan] [-smbcreds SMBCREDS] [-uncpaths UNCPATHS] [-csv] [-mode MODE] [-regulars REGULARS] [-wordlist WORDLIST] [-hits HITS] 138 | [-file-interesting FILE_INTERESTING] [-max-size MAX_SIZE] [-file-extensions-black FILE_EXTENSIONS_BLACK] [-regular-exp REGULAR_EXP] 139 | 140 | SMB @udit Tool 141 | 142 | optional arguments: 143 | -h, --help show this help message and exit 144 | -username USERNAME Username for authenticated scan 145 | -password PASSWORD Password for authenticated scan 146 | -domain DOMAIN Domain for authenticated scan, please use FQDN 147 | -fake-hostname FAKE_HOSTNAME 148 | Computer hostname SMB connection will be from 149 | -multithread Assign a thread to any share to check 150 | -logfile LOGFILE Log file path 151 | -dbfile DBFILE DB file path 152 | -share-black SHARE_BLACK 153 | Blacklist of shares 154 | -local-path LOCAL_PATH 155 | Path to folder where to mount the shares, default set to /tmp 156 | -debug Verbose logging debug mode on 157 | -target TARGET IP address, CIDR or hostname 158 | -target-list TARGET_LIST 159 | Path to file containing a list of targets 160 | -tag TAG Label the run 161 | -ldap Query LDAP to retrieve the list of computer objects in a given domain 162 | -dc-ip DC_IP DC IP of the domain you want to retrieve computer objects from 163 | -T T Define the number of thread to use, default set to 10 164 | -masscan Scan for 445 before trying to analyze the target 165 | -smbcreds SMBCREDS Path to the file containing the SMB credential 166 | -uncpaths UNCPATHS Path to the file containing the list of UNCPATHS you want to scan 167 | -csv Export results to CSV files in the project folder 168 | -mode MODE Choose between SMBSR,RSMBI and Both 169 | -regulars REGULARS File containing regex expression to match [SMBSR] 170 | -wordlist WORDLIST File containing the string to look for [SMBSR] 171 | -hits HITS Max findings per file [SMBSR] 172 | -file-interesting FILE_INTERESTING 173 | Comma separated file extensions you want to be notified about [SMBSR] 174 | -max-size MAX_SIZE Maximum size of the file to be considered for scanning (bytes) [SMBSR] 175 | -file-extensions-black FILE_EXTENSIONS_BLACK 176 | Comma separated file extensions to skip while secrets harvesting [SMBSR] 177 | -regular-exp REGULAR_EXP 178 | File containing regex expression to match [SMBSR] 179 | 180 | ``` 181 | 182 | # Credits 183 | 184 | * Everyone who is going to help out finding issues and improving the tool 185 | * [Retrospected](https://github.com/Retrospected): For helping out every Friday with debugging the code and brainstorming on new features -------------------------------------------------------------------------------- /clean.sh: -------------------------------------------------------------------------------- 1 | rm *.log 2 | rm *.csv 3 | rm *.db 4 | -------------------------------------------------------------------------------- /keywords.txt: -------------------------------------------------------------------------------- 1 | password 2 | ciao 3 | grande 4 | bomber 5 | -------------------------------------------------------------------------------- /ldaphelper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from ldap3 import Connection, Server, ANONYMOUS, SIMPLE, SYNC, ASYNC, KERBEROS 4 | from ldap3 import Server, Connection, SAFE_SYNC, SASL, GSSAPI, DSA, SUBTREE, NTLM 5 | from subprocess import Popen, PIPE 6 | import logging 7 | import re 8 | 9 | logger = logging.getLogger('rSMBi') 10 | 11 | 12 | class LDAPHelper(): 13 | 14 | def __init__(self, options): 15 | self.options = options 16 | 17 | def kerberosAuth(self): 18 | userid = self.options.username 19 | password = self.options.password 20 | realm = (self.options.domain).upper() 21 | kinit = '/usr/bin/kinit' 22 | kinit_args = [kinit, '%s@%s' % (userid, realm)] 23 | kinit = Popen(kinit_args, stdin=PIPE, stdout=PIPE, stderr=PIPE) 24 | kinit.communicate(input="{}\n".format(password).encode("utf-8")) 25 | kinit.wait() 26 | 27 | def retrieveComputerObjects(self): 28 | 29 | server = Server(self.options.dc_ip, get_info=DSA) 30 | authstring = self.options.username + \ 31 | '@' + (self.options.domain).upper() 32 | 33 | try: 34 | conn = Connection(server, authstring, client_strategy=SAFE_SYNC, 35 | auto_bind=True, authentication=SASL, sasl_mechanism=GSSAPI) 36 | 37 | except Exception as e: 38 | logger.error("exception in LDAP Connection") 39 | logger.error(e) 40 | 41 | dn = server.info.other["defaultNamingContext"][0] 42 | 43 | # status, result, response, _ = conn.search(dn, '(objectClass=Computer)', attributes=['dNSHostName'], paged_size=2000) 44 | status, result, response, _ = conn.search( 45 | dn, '(&(objectCategory=Computer)(name=*))', search_scope=SUBTREE, attributes=['dNSHostName'], paged_size=500) 46 | 47 | computerObjectsList = [] 48 | total_entries = len(response) 49 | for co in response: 50 | try: 51 | computerObjectsList.append( 52 | (co['attributes']['dNSHostName'])[0]) 53 | # print ((co['attributes']['dNSHostName'])[0]) 54 | except Exception as e: 55 | logger.warning("Error retrieving dNSHostName for ") 56 | logger.warning(co) 57 | 58 | cookie = conn.result['controls']['1.2.840.113556.1.4.319']['value']['cookie'] 59 | while cookie: 60 | status, result, response, _ = conn.search( 61 | dn, '(&(objectCategory=Computer)(name=*))', search_scope=SUBTREE, attributes=['dNSHostName'], paged_size=500, paged_cookie=cookie) 62 | total_entries += len(response) 63 | for co in response: 64 | try: 65 | computerObjectsList.append( 66 | (co['attributes']['dNSHostName'])[0]) 67 | except Exception as e: 68 | logger.warning("Error retrieving dNSHostName for ") 69 | logger.warning(co) 70 | cookie = conn.result['controls']['1.2.840.113556.1.4.319']['value']['cookie'] 71 | logger.info("Retrieved computer objects: ") 72 | logger.info(total_entries) 73 | return computerObjectsList 74 | 75 | def retrieveComputerObjectsNTLM(self): 76 | 77 | server = Server(self.options.dc_ip, get_info=DSA) 78 | authstring = self.options.domain+"\\"+self.options.username 79 | 80 | try: 81 | conn = Connection(server, authstring, password=self.options.password, 82 | client_strategy=SAFE_SYNC, auto_bind=True, authentication=NTLM) 83 | 84 | except Exception as e: 85 | logger.error("exception in LDAP Connection [NTLM]") 86 | logger.error(e) 87 | 88 | dn = server.info.other["defaultNamingContext"][0] 89 | 90 | status, result, response, _ = conn.search( 91 | dn, '(&(objectCategory=Computer)(name=*))', search_scope=SUBTREE, attributes=['dNSHostName'], paged_size=500) 92 | 93 | computerObjectsList = [] 94 | total_entries = len(response) 95 | domain = "" 96 | for co in response: 97 | try: 98 | computerObjectsList.append( 99 | (co['attributes']['dNSHostName'])[0]) 100 | # print ((co['attributes']['dNSHostName'])[0]) 101 | except Exception as e: 102 | try: 103 | x = re.findall('DC=(?<==)(.*?)(?=(,|$))', co['dn']) 104 | for domainsub in x: 105 | domain += (domainsub[0]) + '.' 106 | 107 | computerObjectsList.append(((co['dn']).split(',')[0]).split( 108 | '=')[1] + '.' + domain[:len(domain)-1]) 109 | # print(((co['dn']).split(',')[0]).split( 110 | # '=')[1] + '.' + domain[:len(domain)-1]) 111 | except Exception as e: 112 | logger.warning("Error retrieving dNSHostName for ") 113 | logger.warning(co) 114 | domain = "" 115 | 116 | cookie = conn.result['controls']['1.2.840.113556.1.4.319']['value']['cookie'] 117 | while cookie: 118 | status, result, response, _ = conn.search( 119 | dn, '(&(objectCategory=Computer)(name=*))', search_scope=SUBTREE, attributes=['dNSHostName'], paged_size=500, paged_cookie=cookie) 120 | total_entries += len(response) 121 | for co in response: 122 | try: 123 | computerObjectsList.append( 124 | (co['attributes']['dNSHostName'])[0]) 125 | except Exception as e: 126 | try: 127 | x = re.findall('DC=(?<==)(.*?)(?=(,|$))', co['dn']) 128 | for domainsub in x: 129 | domain += (domainsub[0]) + '.' 130 | 131 | computerObjectsList.append(((co['dn']).split(',')[0]).split( 132 | '=')[1] + '.' + domain[:len(domain)-1]) 133 | # print(((co['dn']).split(',')[0]).split( 134 | # '=')[1] + '.' + domain[:len(domain)-1]) 135 | except Exception as e: 136 | logger.warning("Error retrieving dNSHostName for ") 137 | logger.warning(co) 138 | domain = "" 139 | cookie = conn.result['controls']['1.2.840.113556.1.4.319']['value']['cookie'] 140 | logger.info("Retrieved computer objects: ") 141 | logger.info(total_entries) 142 | return computerObjectsList 143 | -------------------------------------------------------------------------------- /persistence.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | import csv 3 | import threading 4 | import logging 5 | import sys 6 | import datetime 7 | from datetime import datetime 8 | 9 | logger = logging.getLogger('rSMBi') 10 | 11 | 12 | class Database: 13 | def __init__(self, db_file): 14 | self.db_file = db_file 15 | 16 | def connect_database(self): 17 | self.conn = sqlite3.connect(self.db_file, check_same_thread=False) 18 | self.cursor = self.conn.cursor() 19 | self.lock = threading.Lock() 20 | 21 | def create_database(self): 22 | self.connect_database() 23 | try: 24 | rsmbi_match_table = """ CREATE TABLE IF NOT EXISTS rsmbi ( 25 | id integer PRIMARY KEY AUTOINCREMENT, 26 | file text NOT NULL, 27 | share text NOT NULL, 28 | ip text NOT NULL, 29 | tsFirstFound text NOT NULL, 30 | tsLastFound text NOT NULL, 31 | runTag text NOT NULL, 32 | winClickable text NOT NULL 33 | ); """ 34 | 35 | if self.cursor is not None: 36 | self.create_table(rsmbi_match_table) 37 | 38 | except Exception as e: 39 | logger.error( 40 | "Encountered error while creating the database: " + str(e)) 41 | sys.exit(1) 42 | 43 | def exportToCSV(self, tag): 44 | cursor = self.cursor 45 | exportQuery = "SELECT * from rsmbi WHERE runTag = '{tag}\'".format( 46 | tag=tag) 47 | 48 | sr = cursor.execute(exportQuery) 49 | with open('rsmbi_results.csv', 'w') as f: 50 | writer = csv.writer(f) 51 | writer.writerows(sr) 52 | 53 | def commit(self): 54 | self.conn.commit() 55 | 56 | def create_table(self, create_table_sql): 57 | 58 | try: 59 | self.cursor.execute(create_table_sql) 60 | except Exception as e: 61 | logger.error(e) 62 | 63 | def insertFinding(self, filename, share, ip, tag): 64 | now = datetime.now() 65 | date = now.strftime("%d-%m-%Y") 66 | # remove the local path tmp path 67 | 68 | filename = '/'.join(filename.split('/')[3:]) 69 | clickable = ("\\\\" + ip + "\\" + share + 70 | "\\" + filename).replace('/', '\\') 71 | 72 | try: 73 | self.lock.acquire(True) 74 | cursor = self.cursor 75 | 76 | cursor.execute('SELECT id,file FROM rsmbi WHERE ip = ? AND share = ? AND file = ?', ( 77 | ip, share, filename)) 78 | 79 | results = cursor.fetchall() 80 | 81 | if len(results) == 0: 82 | 83 | insertFindingQuery = "INSERT INTO rsmbi (file, share, ip, tsFirstFound, tsLastFound, runTag, winClickable) VALUES (?,?,?,?,?,?,?)" 84 | cursor.execute(insertFindingQuery, 85 | (filename, share, ip, date, date, tag, clickable.replace("/", "\\"))) 86 | self.commit() 87 | else: 88 | 89 | updateQuery = 'UPDATE rsmbi SET tsLastFound = ? WHERE ip = ? AND share = ? AND file= ?' 90 | cursor.execute(updateQuery, (date, ip, share, 91 | filename)) 92 | self.commit() 93 | 94 | updateQuery = 'UPDATE rsmbi SET runTag = ? WHERE ip = ? AND share = ? AND file= ?' 95 | cursor.execute(updateQuery, (tag, ip, share, 96 | filename)) 97 | 98 | except Exception as e: 99 | logger.error("Error while updating database: " + str(e)) 100 | self.lock.release() 101 | finally: 102 | self.lock.release() 103 | 104 | 105 | class DatabaseSMBSR: 106 | def __init__(self, db_file): 107 | self.db_file = db_file 108 | 109 | def connect_database(self): 110 | self.conn = sqlite3.connect(self.db_file, check_same_thread=False) 111 | self.cursor = self.conn.cursor() 112 | self.lock = threading.Lock() 113 | 114 | def create_database(self): 115 | self.connect_database() 116 | try: 117 | smb_match_table = """ CREATE TABLE IF NOT EXISTS smbsr ( 118 | id integer PRIMARY KEY AUTOINCREMENT, 119 | file text NOT NULL, 120 | share text NOT NULL, 121 | ip text NOT NULL, 122 | position text NOT NULL, 123 | matchedWith text NOT NULL, 124 | tsCreated text NOT NULL, 125 | tsModified text NOT NULL, 126 | tsAccessed text NOT NULL, 127 | tsFirstFound text NOT NULL, 128 | tsLastFound text NOT NULL, 129 | runTag text NOT NULL, 130 | extract text NOT NULL, 131 | winClickable text NOT NULL 132 | ); """ 133 | smb_files_table = """ CREATE TABLE IF NOT EXISTS smbfile ( 134 | id integer PRIMARY KEY AUTOINCREMENT, 135 | file text NOT NULL, 136 | share text NOT NULL, 137 | ip text NOT NULL, 138 | tsCreated text NOT NULL, 139 | tsModified text NOT NULL, 140 | tsAccessed text NOT NULL, 141 | tsFirstFound text NOT NULL, 142 | tsLastFound text NOT NULL, 143 | runTag text NOT NULL, 144 | winClickable text NOT NULL 145 | ); """ 146 | 147 | if self.cursor is not None: 148 | self.create_table(smb_match_table) 149 | self.create_table(smb_files_table) 150 | 151 | except Exception as e: 152 | logger.error( 153 | "Encountered error while creating the database: " + str(e)) 154 | sys.exit(1) 155 | 156 | def exportToCSV(self, tag): 157 | cursor = self.cursor 158 | exportQuery = "SELECT * from smbsr WHERE runTag = '{tag}\'".format( 159 | tag=tag) 160 | exportQueryFile = "SELECT * from smbfile WHERE runTag = '{tag}\'".format( 161 | tag=tag) 162 | 163 | sr = cursor.execute(exportQuery) 164 | with open('smbsr_results.csv', 'w') as f: 165 | writer = csv.writer(f) 166 | writer.writerows(sr) 167 | sf = cursor.execute(exportQueryFile) 168 | with open('smbsrfile_results.csv', 'w') as g: 169 | writer = csv.writer(g) 170 | writer.writerows(sf) 171 | 172 | def commit(self): 173 | self.conn.commit() 174 | 175 | def create_table(self, create_table_sql): 176 | 177 | try: 178 | self.cursor.execute(create_table_sql) 179 | except Exception as e: 180 | logger.error(e) 181 | 182 | def insertFinding(self, filename, share, ip, line, matchedwith, times, tag, text): 183 | 184 | filename = '/'.join(filename.split('/')[3:]) 185 | clickable = ("\\\\" + ip + "\\" + share + 186 | "\\" + filename).replace('/', '\\') 187 | now = datetime.now() 188 | date = now.strftime("%d-%m-%Y") 189 | try: 190 | self.lock.acquire(True) 191 | cursor = self.cursor 192 | results = cursor.execute('SELECT id, extract FROM smbsr WHERE ip = ? AND share = ? AND file = ? AND matchedWith = ? AND position = ?', ( 193 | ip, share, filename, matchedwith, line)).fetchall() 194 | 195 | if len(results) == 0: 196 | insertFindingQuery = "INSERT INTO smbsr (file, share, ip, position, matchedWith, tsCreated, tsModified, tsAccessed, tsFirstFound, tsLastFound, runTag, extract, winClickable) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)" 197 | cursor.execute(insertFindingQuery, (filename, share, ip, line, 198 | matchedwith, times[0], times[1], times[2], date, date, tag, text, clickable)) 199 | self.commit() 200 | else: 201 | textOld = ((results[0])[1]) 202 | updateQuery = 'UPDATE smbsr SET tsLastFound = ? WHERE ip = ? AND share = ? AND file= ? AND matchedWith = ? AND position = ? ' 203 | cursor.execute(updateQuery, (date, ip, share, 204 | filename, matchedwith, line)) 205 | self.commit() 206 | if textOld != text: 207 | updateQuery = 'UPDATE smbsr SET extract = ? WHERE ip = ? AND share = ? AND file = ? AND matchedWith = ? AND position = ?' 208 | cursor.execute(updateQuery, (text, ip, share, 209 | filename, matchedwith, line)) 210 | self.commit() 211 | updateQuery = 'UPDATE smbsr SET runTag = ? WHERE ip = ? AND share = ? AND file = ? AND matchedWith = ? AND position = ? AND extract = ?' 212 | cursor.execute(updateQuery, (tag, text, ip, 213 | share, filename, matchedwith, line)) 214 | self.commit() 215 | except Exception as e: 216 | logger.error( 217 | "Error while updating database for secret match: " + str(e)) 218 | self.lock.release() 219 | finally: 220 | self.lock.release() 221 | 222 | def insertFileFinding(self, filename, share, ip, times, tag): 223 | now = datetime.now() 224 | filename = '/'.join(filename.split('/')[3:]) 225 | date = now.strftime("%d-%m-%Y") 226 | clickable = ("\\\\" + ip + "\\" + share + 227 | "\\" + filename).replace('/', '\\') 228 | try: 229 | self.lock.acquire(True) 230 | cursor = self.cursor 231 | checkQuery = 'SELECT id FROM smbfile WHERE ip = ? AND share = ? AND file = ?' 232 | results = cursor.execute( 233 | checkQuery, (ip, share, filename)).fetchall() 234 | 235 | if len(results) == 0: 236 | insertFindingQuery = "INSERT INTO smbfile (file, share, ip, tsCreated, tsModified, tsAccessed, tsFirstFound, tsLastFound, runTag, winClickable) VALUES (?,?,?,?,?,?,?,?,?,?)" 237 | cursor.execute(insertFindingQuery, (filename, share, 238 | ip, times[0], times[1], times[2], date, date, tag, clickable)) 239 | self.commit() 240 | else: 241 | 242 | updateQuery = 'UPDATE smbfile SET tsLastFound = ? WHERE ip= ? AND share = ? AND file = ?' 243 | cursor.execute(updateQuery, (date, ip, share, filename)) 244 | self.commit() 245 | updateQuery = 'UPDATE smbfile SET runTag = ? WHERE ip= ? AND share = ? AND file = ?' 246 | cursor.execute(updateQuery, (tag, ip, share, filename)) 247 | self.commit() 248 | except Exception as e: 249 | logger.error( 250 | "Error while updating database for file finding: " + str(e)) 251 | self.lock.release() 252 | finally: 253 | self.lock.release() 254 | -------------------------------------------------------------------------------- /reg_gen.py: -------------------------------------------------------------------------------- 1 | 2 | import string 3 | import sys 4 | 5 | def checkChar(x,counter): 6 | lower = set(string.ascii_lowercase) 7 | upper = set(string.ascii_uppercase) 8 | 9 | if x.isspace(): 10 | return "[\s]" 11 | elif x.isalnum() is False: 12 | return """[!@#&_()–\[\{\}\]:;'%,?\/\*~\$\^\+="<>]""" 13 | elif x in lower: 14 | return "[a-z]" 15 | elif x in upper: 16 | return "[A-Z]" 17 | else: 18 | return "[\d]" 19 | 20 | 21 | filepath = input("Wordlist path, please: ") 22 | final = [] 23 | lines = [] 24 | result = "" 25 | try: 26 | with open(filepath, 'rb') as f: 27 | for line in f: 28 | try: 29 | lines.append(line.strip(b'\n')) 30 | except Exception as e: 31 | print ('Error while reading line in the wordlist' + str(e)) 32 | continue 33 | f.close() 34 | except Exception as e: 35 | print (e) 36 | sys.exit(1) 37 | for line in lines: 38 | result = "" 39 | try: 40 | line = line.decode("utf-8") 41 | except Exception as e: 42 | continue 43 | for element in range(0, len(line)): 44 | result += checkChar(line[element],element) 45 | final.append(result) 46 | 47 | final = list( dict.fromkeys(final) ) 48 | 49 | with open("regulars.txt", "a") as f: 50 | for x in final: 51 | f.write("(" +x + ")\n") 52 | f.close() 53 | 54 | -------------------------------------------------------------------------------- /regulars.txt: -------------------------------------------------------------------------------- 1 | (\.accesscontrol\.windows\.net)|(\.graph\.windows\.net)|(\.onmicrosoft\.com)|(\.azure-api\.net)|(\.biztalk\.windows\.net)|(\.blob\.core\.windows\.net)|(\.cloudapp\.net)|(\.cloudapp\.azure\.com)|(\.azurecr\.io)|(\.azurecontainer\.io)|(\.vo\.msecnd\.net)|(\.file\.core\.windows\.net)|(\.azurefd\.net)|(\.management\.core\.windows\.net)|(\.origin\.mediaservices\.windows\.net)|(\.azure-mobile\.net)|(\.queue\.core\.windows\.net)|(\.servicebus\.windows\.net)|(\.database\.windows\.net)|(\.azureedge\.net)|(\.table\.core\.windows\.net)|(\.trafficmanager\.net)|(\.azurewebsites\.net)|(\.visualstudio\.com) 2 | (?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:=|\|\|:|<=|=>|:)(?:'|\"|\s|=|\x60){0,5}([0-9a-z\-_.=]{10,150})(?:['|\"|\n|\r|\s|\x60|;]|$) 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ldap3 2 | pysmb 3 | python_masscan 4 | textract 5 | -------------------------------------------------------------------------------- /smbat.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import os 4 | import sys 5 | from subprocess import * 6 | from smb import * 7 | from smb.SMBConnection import SMBConnection 8 | import argparse 9 | import datetime 10 | from datetime import datetime 11 | import logging 12 | from persistence import Database 13 | from persistence import DatabaseSMBSR 14 | from ldaphelper import LDAPHelper 15 | import random 16 | import string 17 | import re 18 | import ipaddress 19 | import masscan 20 | from threading import Lock 21 | from worker import rsmbiworker 22 | 23 | 24 | def listShares(serverName, options): 25 | connection = SMBConnection(options.username, options.password, options.fake_hostname, 26 | 'netbios-server-name', options.domain, use_ntlm_v2=True, is_direct_tcp=True) 27 | try: 28 | connection.connect(serverName, 445) 29 | except Exception as e: 30 | logger.error("Error connecting to: " + serverName + 31 | ", with exception: " + str(e)) 32 | try: 33 | shares = connection.listShares() 34 | except Exception as e: 35 | logger.error("Error while listing shares from: " + 36 | serverName + ", with exception: " + str(e)) 37 | connection.close() 38 | return shares 39 | 40 | 41 | def setUpLogging(options): 42 | # cleaning the handlers 43 | logging.getLogger().handlers = [] 44 | logger.handlers = [] 45 | 46 | logger.setLevel(logging.INFO) 47 | # creating log file handler 48 | handler = logging.FileHandler(options.logfile) 49 | handler.setLevel(logging.INFO) 50 | # creating stdout handler 51 | 52 | stdoutHandler = logging.StreamHandler(sys.stdout) 53 | stdoutHandler.setLevel(logging.INFO) 54 | 55 | # creating a common formatter 56 | formatter = logging.Formatter( 57 | '%(asctime)s - %(name)s - %(levelname)s - %(message)s') 58 | # setting the formatter for each handler 59 | handler.setFormatter(formatter) 60 | stdoutHandler.setFormatter(formatter) 61 | # add handlers to logger 62 | logger.addHandler(handler) 63 | logger.addHandler(stdoutHandler) 64 | 65 | 66 | def setUpLoggingDebug(options): 67 | 68 | # cleaning the handlers 69 | logging.getLogger().handlers = [] 70 | logger.handlers = [] 71 | 72 | logger.setLevel(logging.DEBUG) 73 | # creating log file handler 74 | handler = logging.FileHandler(options.logfile) 75 | handler.setLevel(logging.DEBUG) 76 | # creating stdout handler 77 | 78 | stdoutHandler = logging.StreamHandler(sys.stdout) 79 | stdoutHandler.setLevel(logging.DEBUG) 80 | 81 | # creating a common formatter 82 | formatter = logging.Formatter( 83 | '%(asctime)s - %(name)s - %(levelname)s - %(message)s') 84 | # setting the formatter for each handler 85 | handler.setFormatter(formatter) 86 | stdoutHandler.setFormatter(formatter) 87 | # add handlers to logger 88 | logger.addHandler(handler) 89 | logger.addHandler(stdoutHandler) 90 | 91 | 92 | def setupPersistence(options): 93 | dbs = {} 94 | if (options.mode.upper() == "SMBSR"): 95 | if (options.dbfile == './rsmbi.db'): 96 | options.dbfile = './smbsr.db' 97 | dbs["SMBSR"] = DatabaseSMBSR(options.dbfile) 98 | if not os.path.exists(options.dbfile): 99 | 100 | logger.info("Database not found, creating [SMBSR]") 101 | dbs["SMBSR"].create_database() 102 | logger.info("Database created successfully [SMBSR]") 103 | dbs["SMBSR"].connect_database() 104 | else: 105 | logger.info("Database already existing [SMBSR]") 106 | dbs["SMBSR"].connect_database() 107 | elif (options.mode.upper() == "RSMBI"): 108 | dbs["RSMBI"] = Database(options.dbfile) 109 | if not os.path.exists(options.dbfile): 110 | 111 | logger.info("Database not found, creating") 112 | dbs["RSMBI"].create_database() 113 | logger.info("Database created successfully") 114 | dbs["RSMBI"].connect_database() 115 | else: 116 | logger.info("Database already existing") 117 | dbs["RSMBI"].connect_database() 118 | else: 119 | dbs["SMBSR"] = DatabaseSMBSR("./smbsr.db") 120 | if not os.path.exists("./smbsr.db"): 121 | 122 | logger.info("Database not found, creating [SMBSR]") 123 | dbs["SMBSR"].create_database() 124 | logger.info("Database created successfully [SMBSR]") 125 | dbs["SMBSR"].connect_database() 126 | else: 127 | logger.info("Database already existing [SMBSR]") 128 | dbs["SMBSR"].connect_database() 129 | 130 | dbs["RSMBI"] = Database("./rsmbi.db") 131 | if not os.path.exists("./rsmbi.db"): 132 | 133 | logger.info("Database not found, creating") 134 | dbs["RSMBI"].create_database() 135 | logger.info("Database created successfully") 136 | dbs["RSMBI"].connect_database() 137 | else: 138 | logger.info("Database already existing") 139 | dbs["RSMBI"].connect_database() 140 | 141 | return dbs 142 | 143 | 144 | def retrieveComputerObjects(options): 145 | ldaphelperQ = LDAPHelper(options) 146 | ldap_targets = ldaphelperQ.retrieveComputerObjectsNTLM() 147 | return ldap_targets 148 | 149 | 150 | def massScan(toscan): 151 | mass = masscan.PortScanner() 152 | final = [] 153 | try: 154 | mass.scan(','.join(toscan), ports=445, arguments='--rate 1000') 155 | except Exception as e: 156 | logger.error("masscan failed with error: " + str(e)) 157 | sys.exit(1) 158 | for key in mass.scan_result['scan']: 159 | if mass.scan_result['scan'][key]['tcp'][445]['state'] == 'open': 160 | final.append(key) 161 | 162 | return final 163 | 164 | 165 | def setupTagRun(tag): 166 | if tag == "NOLABEL": 167 | now = datetime.now() 168 | date = now.strftime("%d-%m-%Y") 169 | return "RUN-" + date + "-" + \ 170 | ''.join((random.choice(string.ascii_lowercase) for x in range(8))) 171 | return tag 172 | 173 | 174 | def extractCIDR(tempTarget): 175 | cidr = [] 176 | for target in tempTarget: 177 | 178 | ipcheck = re.match( 179 | "^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]{1,2}))$", target) 180 | if ipcheck: 181 | cidr.append(target) 182 | return cidr 183 | 184 | 185 | def fromCIDRtoIPs(toexpand): 186 | 187 | iplist = [str(ip) for ip in ipaddress.IPv4Network(toexpand)] 188 | return iplist 189 | 190 | 191 | def addUncPaths(targetDict, options): 192 | 193 | try: 194 | with open(options.uncpaths) as f: 195 | uncpaths = [line.rstrip() for line in f] 196 | f.close() 197 | except Exception as e: 198 | logger.error("Error while reading the uncpaths file: " + str(e)) 199 | 200 | for uncpath in uncpaths: 201 | server = uncpath.split('/')[2] 202 | path = '/'.join(uncpath.split('/')[3:]) 203 | if server not in targetDict.keys(): 204 | targetDict[server] = [] 205 | targetDict[server].append(path) 206 | else: 207 | targetDict[server].append(path) 208 | 209 | return targetDict 210 | 211 | 212 | def parseTargets(options): 213 | final = [] 214 | # reading target from file if set 215 | if options.target_list != "unset": 216 | try: 217 | with open(options.target_list) as f: 218 | targetsraw = [line.rstrip() for line in f] 219 | f.close() 220 | except Exception as e: 221 | logger.error("Error while reading the target list file: " + str(e)) 222 | # extracting CIDRs from the list, if any 223 | cidrs = extractCIDR(targetsraw) 224 | 225 | # cleaning the targetsraw list from the CIDRs found 226 | for cidr in cidrs: 227 | final = final + fromCIDRtoIPs(cidr) 228 | if cidr in targetsraw: 229 | targetsraw.remove(cidr) 230 | 231 | final = final + targetsraw 232 | 233 | # retrieving targets from ldap if required 234 | if (options.ldap): 235 | computerObjects = retrieveComputerObjects(options) 236 | final = final + computerObjects 237 | # take target from single IP 238 | if (options.target): 239 | if len(extractCIDR(options.target)) > 0: 240 | # here the function to extract ip list from CIDR 241 | final = final + fromCIDRtoIPs([options.target]) 242 | else: 243 | final.append(options.target) 244 | # filtering the targets basing on masscan output 245 | if (options.masscan): 246 | final = massScan(final) 247 | 248 | if len(final) == 0 and options.uncpaths == "UNSET": 249 | logger.info("List of targets is empty, exiting ...") 250 | sys.exit(1) 251 | 252 | return final 253 | 254 | 255 | def unleashThreads(options, scope, db, targetDict, lock, targetIPs, wordListDict): 256 | 257 | threads = [] 258 | # workername, options, targetdict, db, lock, scope, targets 259 | 260 | logger.info("Starting with threads") 261 | for thread in range(options.T): 262 | try: 263 | worker = rsmbiworker("Worker-" + str(thread+1), 264 | options, targetDict, db, lock, scope, targetIPs, wordListDict) 265 | worker.start() 266 | threads.append(worker) 267 | except Exception as e: 268 | logger.error("Error while multithreading: " + str(e)) 269 | sys.exit(1) 270 | for thread in threads: 271 | thread.join() 272 | 273 | 274 | def readMatches(options): 275 | 276 | filepath = options.wordlist 277 | file_regular = options.regular_exp 278 | lines = [] 279 | if filepath != 'unset': 280 | try: 281 | with open(filepath) as f: 282 | lines = [line.rstrip() for line in f] 283 | f.close() 284 | # return lines 285 | except Exception as e: 286 | logger.error("Exception while reading the file " + str(e)) 287 | sys.exit(1) 288 | 289 | rlines = [] 290 | if file_regular != 'unset': 291 | 292 | try: 293 | with open(file_regular) as r: 294 | rlines = [line.rstrip() for line in r] 295 | r.close() 296 | except Exception as e: 297 | logger.error( 298 | "Exception while reading the regular expression file " + str(e)) 299 | 300 | to_match_dict = { 301 | 302 | "words": lines, 303 | "regex": rlines 304 | } 305 | return to_match_dict 306 | 307 | 308 | # getting global logger for rSMBI 309 | logger = logging.getLogger('rSMBi') 310 | 311 | if __name__ == '__main__': 312 | 313 | if not os.geteuid() == 0: 314 | sys.exit("\nOnly root can run this script\n") 315 | parser = argparse.ArgumentParser( 316 | add_help=True, description="SMB @udit Tool") 317 | parser.add_argument('-username', action='store', default='anonymous', 318 | type=str, help='Username for authenticated scan') 319 | parser.add_argument('-password', action='store', default='s3cret', 320 | type=str, help='Password for authenticated scan') 321 | parser.add_argument('-domain', action='store', 322 | default='SECRET.LOCAL', help='Domain for authenticated scan, please use FQDN') 323 | parser.add_argument('-fake-hostname', action='store', default='localhost', 324 | help='Computer hostname SMB connection will be from') 325 | parser.add_argument('-multithread', action='store_true', 326 | default=False, help="Assign a thread to any share to check") 327 | parser.add_argument('-logfile', action='store', 328 | default='rsmbi.log', type=str, help='Log file path') 329 | parser.add_argument('-dbfile', action='store', 330 | default='./rsmbi.db', type=str, help='DB file path') 331 | parser.add_argument('-share-black', action='store', type=str, default='none', 332 | help='Blacklist of shares') 333 | parser.add_argument('-local-path', action='store', type=str, default='/tmp', 334 | help='Path to folder where to mount the shares, default set to /tmp') 335 | parser.add_argument('-debug', action='store_true', default=False, 336 | help='Verbose logging debug mode on') 337 | # might be needed to change this 338 | parser.add_argument('-target', action="store", 339 | help='IP address, CIDR or hostname') 340 | parser.add_argument('-target-list', action="store", default='unset', 341 | help='Path to file containing a list of targets') 342 | parser.add_argument('-tag', action='store', 343 | default="NOLABEL", type=str, help='Label the run') 344 | parser.add_argument('-ldap', action='store_true', default=False, 345 | help='Query LDAP to retrieve the list of computer objects in a given domain') 346 | parser.add_argument('-dc-ip', action='store', 347 | help='DC IP of the domain you want to retrieve computer objects from') 348 | parser.add_argument('-T', action='store', default=10, 349 | type=int, help="Define the number of thread to use, default set to 10") 350 | parser.add_argument('-masscan', action='store_true', default=False, 351 | help="Scan for 445 before trying to analyze the target") 352 | parser.add_argument('-smbcreds', action='store', 353 | type=str, help='Path to the file containing the SMB credential') 354 | parser.add_argument('-uncpaths', action='store', default="UNSET", 355 | type=str, help='Path to the file containing the list of UNCPATHS you want to scan') 356 | parser.add_argument('-csv', action='store_true', default=False, 357 | help='Export results to CSV files in the project folder') 358 | parser.add_argument('-mode', action='store', 359 | default='both', help="Choose between SMBSR,RSMBI and Both") 360 | parser.add_argument('-regulars', action="store", default='unset', 361 | type=str, help="File containing regex expression to match [SMBSR]") 362 | parser.add_argument('-wordlist', action="store", default='unset', 363 | type=str, help="File containing the string to look for [SMBSR]") 364 | parser.add_argument('-hits', action='store', default=5000, 365 | type=int, help='Max findings per file [SMBSR]') 366 | parser.add_argument('-file-interesting', action='store', default='none', type=str, 367 | help='Comma separated file extensions you want to be notified about [SMBSR]') 368 | parser.add_argument('-max-size', action="store", default=50000, type=int, 369 | help="Maximum size of the file to be considered for scanning (bytes) [SMBSR]") 370 | parser.add_argument('-file-extensions-black', action='store', type=str, default='none', 371 | help='Comma separated file extensions to skip while secrets harvesting [SMBSR]') 372 | parser.add_argument('-regular-exp', action="store", default='unset', 373 | type=str, help="File containing regex expression to match [SMBSR]") 374 | 375 | options = parser.parse_args() 376 | 377 | if options.debug: 378 | setUpLoggingDebug(options) 379 | else: 380 | setUpLogging(options) 381 | 382 | dbs = setupPersistence(options) 383 | options.tag = setupTagRun(options.tag) 384 | targetIPs = parseTargets(options) 385 | targetDict = {} 386 | lock = Lock() 387 | 388 | if options.mode.upper() == "SMBSR" or options.mode.upper() == "BOTH": 389 | wordlistDict = readMatches(options) 390 | else: 391 | wordlistDict = [] 392 | 393 | if options.multithread is True: 394 | 395 | logger.info("I'm Speed") 396 | if len(targetIPs) > 0: 397 | unleashThreads(options, "Enum", None, 398 | targetDict, lock, targetIPs, None) 399 | if options.uncpaths != "UNSET": 400 | logger.info("Adding UNCPATHS to the target dictionary") 401 | targetDict = addUncPaths(targetDict, options) 402 | 403 | unleashThreads(options, "Action", dbs, targetDict, 404 | lock, None, wordlistDict) 405 | 406 | else: 407 | logger.info("Starting solo worker") 408 | options.T = 1 409 | if len(targetIPs) > 0: 410 | unleashThreads(options, "Enum", None, 411 | targetDict, lock, targetIPs, None) 412 | if options.uncpaths != "UNSET": 413 | logger.info("Adding UNCPATHS to the target dictionary") 414 | targetDict = addUncPaths(targetDict, options) 415 | 416 | unleashThreads(options, "Action", dbs, targetDict, 417 | lock, None, wordlistDict) 418 | 419 | if options.csv: 420 | if options.mode.upper() == "SMBSR": 421 | dbs["SMBSR"].exportToCSV(options.tag) 422 | elif options.mode.upper() == "RSMBI": 423 | dbs["RSMBI"].exportToCSV(options.tag) 424 | else: 425 | dbs["SMBSR"].exportToCSV(options.tag) 426 | dbs["RSMBI"].exportToCSV(options.tag) 427 | 428 | logger.info("SMB@ has finished, cheers") 429 | -------------------------------------------------------------------------------- /smbsr.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import tempfile 3 | import re 4 | import random 5 | import sys 6 | import os 7 | import sqlite3 8 | import csv 9 | from itertools import compress 10 | import datetime 11 | from datetime import datetime 12 | import faulthandler 13 | import io 14 | import string 15 | import textract 16 | 17 | 18 | logger = logging.getLogger('rSMBi') 19 | 20 | 21 | class SMBSR(object): 22 | def __init__(self, workername, options, db, file, share, ip, tag): 23 | super(SMBSR, self).__init__() 24 | self.options = options 25 | self.workername = workername 26 | self.db = db 27 | self.file = file 28 | self.share = share 29 | self.ip = ip 30 | self.tag = tag 31 | 32 | def retrieveTextSpecial(self, file_object): 33 | try: 34 | # os.rename(file_object.name, file_object.name + ".docx") 35 | text = textract.process(file_object.name) 36 | logger.debug("hello " + file_object.name) 37 | return text 38 | except Exception as e: 39 | os.remove(file_object.name) 40 | logger.error(f"[{self.workername}] | Error while parsing special file " + 41 | file_object.name + " with exception: " + str(e)) 42 | return "textractfailed" 43 | 44 | def get_bool(self, prompt): 45 | while True: 46 | try: 47 | return {"y": True, "n": False}[input(prompt).lower()] 48 | except KeyError: 49 | logger.error( 50 | f"[{self.workername}] | Invalid input please enter [y/n]") 51 | 52 | def retrieveTimes(self, filename): 53 | try: 54 | times = [] 55 | 56 | stats = os.stat(filename) 57 | 58 | ts_created = datetime.fromtimestamp( 59 | stats.st_ctime).strftime('%Y-%m-%d %H:%M:%S') 60 | ts_accessed = datetime.fromtimestamp( 61 | stats.st_atime).strftime('%Y-%m-%d %H:%M:%S') 62 | ts_modified = datetime.fromtimestamp( 63 | stats.st_mtime).strftime('%Y-%m-%d %H:%M:%S') 64 | times.append(ts_created) 65 | times.append(ts_modified) 66 | times.append(ts_accessed) 67 | return times 68 | except Exception as e: 69 | logger.error(f"[{self.workername}] | Error while retrieving timestamp of file: " + 70 | filename + "with exception: " + str(e)) 71 | 72 | def passwordSMBSR(self, text, filename, to_match, counter): 73 | try: 74 | if text == "" or text is None: 75 | return False 76 | 77 | results = [] 78 | output = False 79 | lbound = 0 80 | ubound = 0 81 | tosave = "" 82 | substartidx = 0 83 | words = to_match["words"] 84 | regex = to_match["regex"] 85 | for substring in words: 86 | results.append(substring.lower() in text.lower()) 87 | output = any(results) 88 | if output: 89 | try: 90 | m = [i for i, x in enumerate(results) if x] 91 | for z in m: 92 | logger.info(f"[{self.workername}] | Found interesting match in " + 93 | filename + " with " + words[z] + ", line: " + str(counter)) 94 | substartidx = (text.lower()).find(words[z].lower()) 95 | if len(text) < 50: 96 | tosave = text 97 | else: 98 | if substartidx < 25: 99 | lbound = 0 100 | else: 101 | lbound = substartidx - 25 102 | if (len(text) - (substartidx+len(words[z]))) < 25: 103 | 104 | ubound = len(text) 105 | else: 106 | ubound = (substartidx+len(words[z]) + 25) 107 | 108 | tosave = text[lbound:ubound] 109 | 110 | self.db.insertFinding(filename, self.share, self.ip, str(counter), words[z], self.retrieveTimes( 111 | filename), self.options.tag, tosave.replace("\n", " ")) 112 | return True 113 | except Exception as e: 114 | logger.debug( 115 | f"[{self.workername}] | Error while looking for strings to match") 116 | if len(regex) > 0: 117 | for i in regex: 118 | try: 119 | matchedraw = re.search(i, text) 120 | if matchedraw: 121 | matched = (matchedraw).group(0) 122 | logger.info(f"[{self.workername}] | Found interesting match in " + 123 | filename + " with regex " + i + ", line: " + str(counter)) 124 | substartidx = (text.lower()).find(matched.lower()) 125 | 126 | if len(text) < 50: 127 | tosave = text 128 | else: 129 | if substartidx < 25: 130 | lbound = 0 131 | else: 132 | lbound = substartidx - 25 133 | if (len(text) - (substartidx+len(matched))) < 25: 134 | 135 | ubound = len(text) 136 | else: 137 | ubound = (substartidx+len(matched) + 25) 138 | 139 | tosave = text[lbound:ubound] 140 | self.db.insertFinding(filename, self.share, self.ip, str(counter), words[z], self.retrieveTimes( 141 | filename), self.options.tag, tosave.replace("\n", " ")) 142 | return True 143 | except Exception as e: 144 | logger.debug( 145 | f"[{self.workername}] | Error while looking for regexp: "+str(i)) 146 | return False 147 | except Exception as e: 148 | logger.debug( 149 | f"[{self.workername}] | Error while parsing line of file: "+str(e)) 150 | 151 | def parse(self, filename, to_match, options): 152 | line_counter = 0 153 | hits = 0 154 | # file_obj = tempfile.NamedTemporaryFile() 155 | 156 | file_ext = (filename.split('/')[-1]).split('.')[-1] or "empty" 157 | if file_ext.lower() in self.options.file_extensions_black.split(','): 158 | logger.debug( 159 | f"[{self.workername}] | This extensions is blacklisted") 160 | else: 161 | if file_ext.lower() in self.options.file_interesting.split(','): 162 | logger.info( 163 | f"[{self.workername}] | Found interesting file: " + filename) 164 | self.db.insertFileFinding(filename, self.share, self.ip, self.retrieveTimes( 165 | filename), self.options.tag) 166 | if (filename.split('/')[-1]).split('.')[0].lower() in to_match["words"]: 167 | logger.info( 168 | f"[{self.workername}] | Found interesting file named " + filename) 169 | self.db.insertFileFinding(filename, self.share, self.ip, self.retrieveTimes( 170 | filename), self.options.tag) 171 | # here probably the start of the try/catch 172 | try: 173 | filesize = os.path.getsize(filename) 174 | except Exception as e: 175 | logger.error( 176 | f"[{self.workername}] | Error while retrieving the file size, skipping") 177 | return 178 | 179 | if filesize > self.options.max_size: 180 | logger.debug(f"[{self.workername}] | Skipping file " + 181 | filename + ", it is too big and you said i can't handle it") 182 | 183 | elif len(to_match["words"]) > 0 or len(to_match["regex"]) > 0: 184 | try: 185 | file_obj = open(filename, "r") 186 | except Exception as e: 187 | logger.error( 188 | f"[{self.workername}] | Error while opening handle to file") 189 | return 190 | # here the extension check for office files 191 | if file_ext.lower() in ['docx', 'doc', 'docx', 'eml', 'epub', 'gif', 'jpg', 'mp3', 'msg', 'odt', 'ogg', 'pdf', 'png', 'pptx', 'ps', 'rtf', 'tiff', 'tif', 'wav', 'xlsx', 'xls']: 192 | 193 | lines = (self.retrieveTextSpecial(file_obj)) 194 | file_obj.close() 195 | if lines != "textractfailed": 196 | lines = lines.split(b' ') 197 | try: 198 | os.remove(filename) 199 | except Exception as e: 200 | logger.error( 201 | f"[{self.workername}] | Error deleting the temp file: " + filename) 202 | 203 | else: 204 | file_obj.seek(0) 205 | try: 206 | lines = file_obj.readlines() 207 | except Exception as e: 208 | logger.error(f"[{self.workername}] | Encountered exception while reading file: " + 209 | filename + " with extension " + file_ext + " | Exception: " + str(e)) 210 | return 211 | # need to work on the lines here bcs the strip with bytes does not work apparently 212 | 213 | if len(lines) > 0 and lines != "textractfailed": 214 | for line in lines: 215 | line_counter += 1 216 | try: 217 | 218 | if self.passwordSMBSR(line.rstrip(), filename, to_match, line_counter): 219 | hits += 1 220 | if hits >= options.hits: 221 | logger.debug( 222 | f"[{self.workername}] | Reached max hits for " + filename) 223 | break 224 | except Exception as e: 225 | logger.error(f"[{self.workername}] | Encountered exception while analyzing file line: " + 226 | filename + " with extension " + file_ext + " | Exception: " + str(e)) 227 | break 228 | file_obj.close() 229 | -------------------------------------------------------------------------------- /worker.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import threading 3 | import string 4 | import random 5 | from os.path import expanduser 6 | import os 7 | from subprocess import * 8 | import subprocess 9 | from smb import * 10 | from smb.SMBConnection import SMBConnection 11 | from persistence import Database 12 | from persistence import DatabaseSMBSR 13 | import time 14 | from smbsr import SMBSR 15 | logger = logging.getLogger('rSMBi') 16 | 17 | 18 | class rsmicore(object): 19 | 20 | def __init__(self, workername, options, db, targetobj, targetIPenum, wordlistDict): 21 | super(rsmicore, self).__init__() 22 | self.options = options 23 | self.workername = workername 24 | self.db = db 25 | self.targetobj = targetobj 26 | self.targetIPenum = targetIPenum 27 | self.wordlistDict = wordlistDict 28 | 29 | def runSMBSRbrain(self, file): 30 | 31 | logger.debug(f"[{self.workername}] | Working on file: " + file) 32 | # (self, workername, options, db): 33 | smbsrunner = SMBSR(self.workername, self.options, self.db["SMBSR"], file, list( 34 | self.targetobj.values())[0], list(self.targetobj.keys())[0], self.options.tag) 35 | # here i need to call the parse function 36 | # def parse(self, filename, to_match, options): 37 | smbsrunner.parse(file, self.wordlistDict, self.options) 38 | 39 | def checkWritingRights(self, filePath, sharePath): 40 | 41 | try: 42 | f = open(filePath, "a") 43 | except Exception: 44 | return 45 | logger.info(f"[{self.workername}] | " + " Writing permissions on: " + 46 | filePath) 47 | # here there is a finding def insertFinding(self, filename, share, ip, tag): 48 | self.db["RSMBI"].insertFinding(filePath, list(self.targetobj.values())[0], 49 | list(self.targetobj.keys())[0], self.options.tag) 50 | f.close() 51 | 52 | def createFolder(self, options): 53 | localpath = options.local_path + "/" + \ 54 | ''.join(random.choices(string.ascii_letters, k=7)) 55 | try: 56 | os.mkdir(expanduser(localpath)) 57 | logger.debug(f"[{self.workername}] | Created folder: " + localpath) 58 | except Exception as e: 59 | logger.error(f"[{self.workername}] |" + " Error while creating folder: " + 60 | localpath + " with exception: " + str(e)) 61 | 62 | return localpath 63 | 64 | def deleteFolder(self, path): 65 | logger.debug(f"[{self.workername}] |" + " Removing folder: " + 66 | path) 67 | try: 68 | os.rmdir(path) 69 | except Exception as e: 70 | logger.error(f"[{self.workername}] |" + " Error removing " + path + " with exception " + str(e) + 71 | ", keep in mind you might need to cleanup yourself") 72 | 73 | def listShares(self, serverName, options): 74 | connection = SMBConnection(options.username, options.password, options.fake_hostname, 75 | 'netbios-server-name', options.domain, use_ntlm_v2=True, is_direct_tcp=True) 76 | try: 77 | connection.connect(serverName, 445) 78 | except Exception as e: 79 | logger.info(f"[{self.workername}] | " + "Error connecting to: " + serverName + 80 | ", with exception: " + str(e)) 81 | try: 82 | shares = connection.listShares() 83 | except Exception as e: 84 | logger.info(f"[{self.workername}] | " + "Error while listing shares from: " + 85 | serverName + ", with exception: " + str(e)) 86 | shares = [] 87 | connection.close() 88 | return shares 89 | 90 | def mountShare(self, localPath, remoteShare, pathCredFile): 91 | logger.debug(f"[{self.workername}] | " + "Mounting share: " + remoteShare + 92 | " in: " + localPath) 93 | try: 94 | check_call(['mount', '-t', 'cifs', remoteShare, '-o', 'credentials=' + pathCredFile, 95 | expanduser(localPath)], stderr=subprocess.DEVNULL) 96 | return True 97 | except Exception as e: 98 | logger.error(f"[{self.workername}] | " + "Exception while trying to mount " + 99 | remoteShare + " with exception: " + str(e)) 100 | return False 101 | 102 | def umountShare(self, localPath): 103 | logger.debug(f"[{self.workername}] | " + "Unmounting share: " + 104 | localPath) 105 | try: 106 | check_call(['umount', '-f', '-l', localPath], 107 | stderr=subprocess.DEVNULL) 108 | except Exception as e: 109 | logger.error(f"[{self.workername}] | " + "Exception while trying to unmount " + 110 | localPath + " with exception: " + str(e)) 111 | 112 | def walkFolders(self, path): 113 | 114 | logger.debug(f"[{self.workername}] | " + "Walking folder in: " + path) 115 | 116 | try: 117 | for root, dirs, files in os.walk(path): 118 | for file in files: 119 | if self.options.mode.upper() == "SMBSR": 120 | self.runSMBSRbrain(root + "/" + file) 121 | elif self.options.mode.upper() == "RSMBI": 122 | self.checkWritingRights(root + "/" + file, path) 123 | elif self.options.mode.upper() == "BOTH": 124 | self.runSMBSRbrain(root + "/" + file) 125 | self.checkWritingRights(root + "/" + file, path) 126 | 127 | # here i need to check what i want to run: SMBSR, RSMBI, both 128 | 129 | except Exception as e: 130 | logger.error(f"[{self.workername}] | " + "Error while walking folders of path: " + 131 | path + " with exception: " + str(e)) 132 | if "Permission denied" not in str(e): 133 | self.umountShare(path) 134 | 135 | def analyzeTarget(self): 136 | logger.info(f"[{self.workername}] | " + 137 | " working on: " + str(self.targetobj)) 138 | localpath = "" 139 | localpath = self.createFolder(self.options) 140 | if localpath != "": 141 | try: 142 | 143 | if self.mountShare(localpath, "//" + list(self.targetobj.keys() 144 | )[0] + "/" + list(self.targetobj.values())[0], self.options.smbcreds): 145 | self.walkFolders(localpath) 146 | self.umountShare(localpath) 147 | self.deleteFolder(localpath) 148 | 149 | except Exception as e: 150 | logger.error(f"[{self.workername}] | " + "Error while working on: //" + list(self.targetobj.keys()) 151 | [0] + "/" + list(self.targetobj.values())[0]) 152 | 153 | def enumTargets(self): 154 | logger.info("I'm " + self.workername + 155 | " enumerating targets") 156 | tempShares = [] 157 | # need to check here if i get empty shares 158 | logger.debug(f"[{self.workername}] | " + 159 | "Listing shares for: " + self.targetIPenum) 160 | for share in self.listShares(self.targetIPenum, self.options): 161 | if not share.isSpecial and share.name not in ['NETLOGON', 'IPC$'] and (share.name).lower() not in list(map(lambda x: x.lower(), self.options.share_black.split(','))): 162 | logger.debug(f"[{self.workername}] | " + "Found share: " + share.name + 163 | " on host:" + self.targetIPenum) 164 | tempShares.append(share.name) 165 | 166 | return self.targetIPenum, tempShares 167 | 168 | 169 | class rsmbiworker (threading.Thread): 170 | def __init__(self, workername, options, targetdict, db, lock, scope, targetsIPs, wordlistDict): 171 | threading.Thread.__init__(self) 172 | self.workername = workername 173 | self.options = options 174 | self.targetdict = targetdict 175 | self.db = db 176 | self.lock = lock 177 | self.scope = scope 178 | self.targetsIPs = targetsIPs 179 | self.wordlistDict = wordlistDict 180 | 181 | def run(self): 182 | logger.info("Starting " + self.workername) 183 | if self.scope == "Action": 184 | logger.info("My duty is to Find") 185 | while True: 186 | self.lock.acquire() 187 | if (len(list(self.targetdict.keys())) == 0): 188 | logger.debug(f"[{self.workername}] | " + 189 | "No Targets left to analyze, Ciao Grande") 190 | self.lock.release() 191 | break 192 | key = list(self.targetdict.keys())[0] 193 | logger.info(f"[{self.workername}] | " + "Targets left to analyze: " + 194 | str(len(list(self.targetdict.keys())))) 195 | try: 196 | targetobj = {} 197 | if len((self.targetdict[key])) > 0: 198 | logger.debug(f"[{self.workername}] | " + "Shares to analyze left for: " + 199 | key + " are: " + str(len((self.targetdict[key])))) 200 | targetobj[key] = (self.targetdict[key]).pop(0) 201 | else: 202 | logger.debug(f"[{self.workername}] | " + "No shares left for: " + key + 203 | ", I'm going to pop it out") 204 | self.targetdict.pop(key) 205 | finally: 206 | self.lock.release() 207 | if bool(targetobj): 208 | rsmbi = rsmicore( 209 | self.workername, self.options, self.db, targetobj, None, self.wordlistDict) 210 | rsmbi.analyzeTarget() 211 | # self.targetdict[] 212 | else: 213 | logger.info("My duty is to enum") 214 | # workername, options, db, targetobj, targetIPenum 215 | while len(self.targetsIPs) > 0: 216 | ipToEnum = self.targetsIPs.pop(0) 217 | rsmbi = rsmicore( 218 | self.workername, self.options, None, None, ipToEnum, None) 219 | toDict = rsmbi.enumTargets() 220 | self.targetdict[toDict[0]] = toDict[1] 221 | logger.info("Exiting " + self.workername) 222 | --------------------------------------------------------------------------------