├── .canari ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.md ├── contributors.txt ├── docs ├── sniffMyPackets-Entities.docx └── sniffMyPackets-Entities.pdf ├── requirements.txt ├── setup.py └── src └── sniffmypacketsv2 ├── __init__.py ├── resources ├── __init__.py ├── etc │ ├── __init__.py │ └── sniffmypacketsv2.conf ├── external │ └── __init__.py ├── images │ ├── __init__.py │ └── entities │ │ ├── SmP-Biohazard.png │ │ ├── SmP-Cipher.png │ │ ├── SmP-Creds.png │ │ ├── SmP-Domain.png │ │ ├── SmP-Email.png │ │ ├── SmP-EvilFile.jpg │ │ ├── SmP-File.png │ │ ├── SmP-Folder.png │ │ ├── SmP-Host.png │ │ ├── SmP-Map.png │ │ ├── SmP-Misc.png │ │ ├── SmP-Pcap.png │ │ ├── SmP-SessionID.png │ │ ├── SmP-Stream.png │ │ ├── SmP-VT.png │ │ └── SmP-ZIpFile.png └── maltego │ ├── __init__.py │ ├── entities.mtz │ ├── sniffmypacketsv2.decodepcap.machine │ └── sniffmypacketsv2.replaysession.machine └── transforms ├── __init__.py ├── common ├── __init__.py ├── apicalls.py ├── auxtools.py ├── dbconnect.py ├── entities.py ├── findcreds.py ├── geoip.py ├── hashmethods.py ├── layers │ ├── BadLayers.py │ ├── __init__.py │ └── http.py ├── loadpackets.py ├── packetParser.py ├── pcapstreams.py ├── pcaptools.py └── protocols │ ├── __init__.py │ ├── dissector.py │ ├── ftp.py │ ├── http.py │ ├── imap.py │ ├── irc.py │ ├── pop.py │ ├── sip.py │ ├── smtp.py │ ├── ssh.py │ └── telnet.py ├── emailaddress.py ├── emailattachments.py ├── extract-artifacts.py ├── find-smtp-creds.py ├── getfolder.py ├── index-pcap.py ├── md5hash2virustotal.py ├── pcap-dns-domain.py ├── pcap-geoip.py ├── pcap-http.py ├── pcap-openwireshark.py ├── pcap-packets.py ├── pcap-ssltraffic.py ├── pcap-streams.py ├── pcap2stream.py ├── session_2_dns.py ├── session_2_ipaddr.py ├── session_2_pcap.py ├── session_2_streams.py ├── streams2ipaddr.py ├── uploadfile.py ├── uploadpcap.py ├── uploadzip.py └── zipfolder.py /.canari: -------------------------------------------------------------------------------- 1 | [metadata] 2 | 3 | author = catalyst256 4 | project = sniffmypacketsv2 5 | maintainer = catalyst256 6 | email = catalyst256@gmail.com 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | 56 | 57 | #Exclude conf files generated when canari package installed, not the main conf file though 58 | *.conf 59 | !/src/sniffmypacketsv2/resources/etc/*.conf 60 | 61 | #Exclude python autosave and complied files 62 | *.pyc 63 | *.py~ 64 | 65 | #Exclude sniffMyPackets.mtz from src directory 66 | /src/sniffmypacketsv2.mtz 67 | 68 | .idea/ 69 | .DS_Store 70 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | Copyright 2014 Adam Maxwell 179 | 180 | Licensed under the Apache License, Version 2.0 (the "License"); 181 | you may not use this file except in compliance with the License. 182 | You may obtain a copy of the License at 183 | 184 | http://www.apache.org/licenses/LICENSE-2.0 185 | 186 | Unless required by applicable law or agreed to in writing, software 187 | distributed under the License is distributed on an "AS IS" BASIS, 188 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 189 | See the License for the specific language governing permissions and 190 | limitations under the License. 191 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.md 2 | recursive-include src *.py *.conf *.gif *.png *.mtz *.machine 3 | recursive-include maltego *.mtz 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | sniffmypacketsv2 2 | ================ 3 | 4 | Next major release of sniffMyPackets - Now with added packet loving 5 | 6 | ####NOTE: You can now choose to use a database backend or not. This is defined in the ```sniffmypacketsv2.conf``` file which will be created when you run the ```canari create-profile``` command listed below. 7 | 8 | Once you have the sniffmypacketsv2.conf file, open it in a text editor and change the usedb value from 0 to 1.: 9 | 10 | https://github.com/SneakersInc/sniffmypacketsv2-web 11 | 12 | 13 | These instructions are to install sniffMyPacketsv2 onto your machine. In order for the transforms to work you need to have the following 2 components installed: 14 | 15 | 1. Maltego 16 | 17 | 2. Canari Framework 18 | 19 | Installation Instructions: 20 | 21 | Maltego 22 | 23 | Download Maltego from their website or if you are using BackTrack/Kali its installed by default. 24 | 25 | Run Maltego so it creates the required folders and directory structure 26 | Go through the default configuration for the first run (that wizard thing) 27 | Close Maltego 28 | 29 | Canari Framework 30 | 31 | The recommended method is to get Canari from the github repo here: 32 | 33 | Canari Framework - https://github.com/allfro/canari 34 | 35 | From the directory you wish to have Canari installed run the following commands: 36 | ``` 37 | git clone https://github.com/allfro/canari.git 38 | cd canari 39 | python setup.py install 40 | ``` 41 | This will install Canari and download the required dependencies. 42 | 43 | Once installed you can test by running: 44 | 45 | ```canari version ``` 46 | You should see something like this: 47 | 48 | Your running *Canari Framework v1.1* 49 | 50 | You are now ready to install sniffMyPacketsv2... 51 | 52 | From your working directory clone the github repo using: 53 | ``` 54 | git clone https://github.com/SneakersInc/sniffmypacketsv2.git 55 | cd sniffmypacketsv2 56 | ``` 57 | You now need to install the Python modules required. You can do this one of two ways: 58 | ``` 59 | 1. sudo pip install -r requirements.txt 60 | 2. sudo python setup.py install 61 | 62 | cd src 63 | canari create-profile sniffmypacketsv2 -w [working directory] 64 | ``` 65 | so in my case: 66 | ``` 67 | -w /root/localTransforms/sniffmypacketsv2/src 68 | ``` 69 | This will create a sniffmypacketsv2.mtz file (and sniffmypackets.conf) 70 | 71 | Open Maltego, click on the Maltego Icon in the toolbar, then Import - Import Configuration 72 | 73 | Follow the wizard to install the transforms, entities and machine into Maltego 74 | 75 | All the sniffmypacketsv2 transforms and entities are under the [SmP] headings 76 | -------------------------------------------------------------------------------- /contributors.txt: -------------------------------------------------------------------------------- 1 | # List of contributors to sniffMyPackets V2. 2 | 3 | NOTE: 4 | Contributors are people whose code is used (although it my be slightly modified) within sniffMyPackets. 5 | Where possible/applicable original license has been left in the source code. 6 | 7 | # Pcredz 1.0.0 - Created by Laurent Gaffie (lgaffie@trustwave.com) 8 | # Scapy dissectors - Created by Abdulellah Alsaheel (cs_saheel@hotmail.com) 9 | -------------------------------------------------------------------------------- /docs/sniffMyPackets-Entities.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/docs/sniffMyPackets-Entities.docx -------------------------------------------------------------------------------- /docs/sniffMyPackets-Entities.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/docs/sniffMyPackets-Entities.pdf -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | canari 2 | pymongo 3 | scapy 4 | python-magic>=0.4.6 5 | pygeoip 6 | requests 7 | http://corelabs.coresecurity.com/index.php?module=Wiki&action=attachment&type=tool&page=Pcapy&file=pcapy-0.10.8.tar.gz 8 | impacket 9 | tldextract 10 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='sniffmypacketsv2', 5 | author='catalyst256', 6 | version='1.0', 7 | author_email='catalyst256@gmail.com', 8 | description='Maltego transforms for pcap analysis and more', 9 | license='GPL', 10 | packages=find_packages('src'), 11 | package_dir={ '' : 'src' }, 12 | zip_safe=False, 13 | package_data={ 14 | '' : [ '*.gif', '*.png', '*.conf', '*.mtz', '*.machine' ] # list of resources 15 | }, 16 | install_requires=[ 17 | 'canari>=1.1', 18 | 'pymongo>=2.7.2', 19 | 'scapy', 20 | 'python-magic>=0.4.6', 21 | 'pygeoip', 22 | 'requests', 23 | 'impacket', 24 | 'pcapy', 25 | 'tldextract' 26 | ], 27 | dependency_links=[ 28 | 'http://corelabs.coresecurity.com/index.php?module=Wiki&action=attachment&type=tool&page=Pcapy&file=pcapy-0.10.8.tar.gz' 29 | ] 30 | ) 31 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' 12 | 13 | __all__ = [ 14 | 'resources', 15 | 'transforms' 16 | ] -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' 12 | 13 | __all__ = [ 14 | 'etc', 15 | 'images', 16 | 'maltego', 17 | 'external' 18 | ] -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/etc/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/etc/sniffmypacketsv2.conf: -------------------------------------------------------------------------------- 1 | [mongodb] 2 | server = '127.0.0.1' 3 | dbs = 'sniffMyPackets' 4 | port = 27017 5 | username = '' 6 | password = '' 7 | 8 | [working] 9 | directory = '/tmp' 10 | usedb = 0 11 | 12 | [web] 13 | server = '127.0.0.1' 14 | port = '5666' 15 | apipath = '' 16 | 17 | [geoip] 18 | db = '' 19 | homelat = '51.5081' 20 | homelng = '0.0761' 21 | 22 | [api] 23 | vt = '' 24 | shodan = '' 25 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/external/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Biohazard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Biohazard.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Cipher.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Cipher.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Creds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Creds.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Domain.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Domain.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Email.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Email.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-EvilFile.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-EvilFile.jpg -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-File.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-File.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Folder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Folder.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Host.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Host.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Map.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Misc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Misc.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Pcap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Pcap.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-SessionID.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-SessionID.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-Stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-Stream.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-VT.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-VT.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/images/entities/SmP-ZIpFile.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/images/entities/SmP-ZIpFile.png -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/maltego/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/maltego/entities.mtz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/resources/maltego/entities.mtz -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/maltego/sniffmypacketsv2.decodepcap.machine: -------------------------------------------------------------------------------- 1 | //Welcome to Maltego Machines! 2 | 3 | //Each machine starts with a statement like this 4 | machine("sniffmypacketsv2.decodepcap", 5 | displayName:"[SmP] - Decode PCAP", 6 | author:"Adam Maxwell (@catalyst256)", 7 | description: "Extract information out of PCAP file.") { 8 | 9 | start { 10 | paths{ 11 | run("sniffMyPackets.v2.pcap_2_streams") 12 | run("sniffMyPacketsv2.v2.pcap_2_geoip") 13 | } 14 | paths{ 15 | run("sniffMyPacketsv2.v2.stream2info") 16 | run("sniffMyPacketsv2.v2.pcap_2_dnsrequests") 17 | run("sniffMyPacketsv2.v2.pcap_2_ssltraffic") 18 | run("sniffMyPacketsv2.v2.streams_2_ipaddress") 19 | run("sniffMyPacketsv2.v2.pcap_2_http") 20 | run("sniffMyPacketsv2.v2.pcap_2_artifacts") 21 | run("sniffMyPacketsv2.v2.pcap_2_packets") 22 | } 23 | } 24 | } -------------------------------------------------------------------------------- /src/sniffmypacketsv2/resources/maltego/sniffmypacketsv2.replaysession.machine: -------------------------------------------------------------------------------- 1 | //Welcome to Maltego Machines! 2 | 3 | //Each machine starts with a statement like this 4 | machine("sniffmypacketsv2.replaysession", 5 | displayName:"[SmP] - Replay Session (auto)", 6 | author:"Adam Maxwell (@catalyst256)", 7 | description: "Replays a Session (based on Session ID)") { 8 | 9 | 10 | onTimer(15) { 11 | run("sniffMyPacketsv2.v2.session_2_streams") 12 | paths{ 13 | run("sniffMyPacketsv2.v2.session_2_ipaddr") 14 | run("sniffMyPacketsv2.v2.session_2_dns") 15 | paths{ 16 | run("paterva.v2.DNSNameToDomain_DNS") 17 | run("paterva.v2.DNSNameToIPAddress_DNS") 18 | } 19 | 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' 12 | 13 | __all__ = [ 14 | 'find-smtp-creds', 15 | 'session_2_dns', 16 | 'session_2_ipaddr', 17 | 'session_2_streams', 18 | 'session_2_pcap', 19 | 'pcap-packets', 20 | 'uploadfile', 21 | 'emailaddress', 22 | 'emailattachments', 23 | 'uploadpcap', 24 | 'uploadzip', 25 | 'zipfolder', 26 | 'getfolder', 27 | 'md5hash2virustotal', 28 | 'pcap2stream', 29 | 'streams2ipaddr', 30 | 'pcap-ssltraffic', 31 | 'extract-artifacts', 32 | 'pcap-streams', 33 | 'pcap-geoip', 34 | 'pcap-http', 35 | 'pcap-dns-domain', 36 | 'index-pcap', 37 | 'pcap-openwireshark', 38 | 'common' 39 | ] -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __author__ = 'catalyst256' 4 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 5 | __credits__ = [] 6 | 7 | __license__ = 'GPL' 8 | __version__ = '0.1' 9 | __maintainer__ = 'catalyst256' 10 | __email__ = 'catalyst256@gmail.com' 11 | __status__ = 'Development' 12 | 13 | __all__ = [ 14 | 'entities', 15 | 'dbconnect', 16 | 'hashmethods', 17 | 'pcaptools', 18 | 'layers', 19 | 'protocols' 20 | ] -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/apicalls.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Part of the sniffMyPackets v2 Framework 4 | 5 | import time 6 | import requests 7 | from canari.config import config 8 | 9 | 10 | try: 11 | vtapi = config['api/vt'].strip('\'') 12 | except: 13 | pass 14 | 15 | 16 | def vt_lookup_file(md5hash): 17 | time.sleep(15) 18 | base_url = 'https://www.virustotal.com/vtapi/v2/file/report' 19 | payload = {'resource': md5hash, 'apikey': vtapi} 20 | try: 21 | r = requests.post(base_url, data=payload) 22 | if r.status_code != 200: 23 | pass 24 | j = r.json() 25 | if j['response_code'] == 0: 26 | pass 27 | else: 28 | return j 29 | except Exception as e: 30 | return str(e) 31 | 32 | 33 | def vt_lookup_url(url): 34 | base_url = 'http://www.virustotal.com/vtapi/v2/url/report' 35 | payload = {'resource': url, 'apikey': vtapi} 36 | try: 37 | r = requests.post(base_url, data=payload) 38 | if r.status_code != 200: 39 | pass 40 | j = r.json() 41 | if j['response_code'] == 0: 42 | pass 43 | else: 44 | return j['permalink'] 45 | except Exception as e: 46 | return str(e) 47 | 48 | 49 | def vt_lookup_ip(ipaddr): 50 | base_url = 'http://www.virustotal.com/vtapi/v2/ip-address/report' 51 | payload = {'ip': ipaddr, 'apikey': vtapi} 52 | try: 53 | r = requests.get(base_url, data=payload) 54 | if r.status_code != 200: 55 | pass 56 | j = r.json() 57 | if j['response_code'] == 0: 58 | pass 59 | else: 60 | return j 61 | except Exception as e: 62 | return str(e) 63 | 64 | 65 | def vt_lookup_domain(domain): 66 | base_url = 'http://www.virustotal.com/vtapi/v2/domain/report' 67 | payload = {'domain': domain, 'apikey': vtapi} 68 | try: 69 | r = requests.get(base_url, data=payload) 70 | if r.status_code != 200: 71 | pass 72 | j = r.json() 73 | if j['response_code'] == 0: 74 | pass 75 | else: 76 | return j 77 | except Exception as e: 78 | return str(e) -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/auxtools.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Part of the sniffMyPackets v2 framework 4 | 5 | import magic 6 | from dbconnect import mongo_connect 7 | import time 8 | 9 | 10 | def check_file(filename): 11 | try: 12 | try: 13 | m = magic.open(magic.MAGIC_NONE) 14 | m.load() 15 | f = m.file(filename) 16 | return f 17 | except Exception: 18 | m = magic.from_file(filename) 19 | return m 20 | except Exception as e: 21 | return str(e) 22 | 23 | 24 | def error_logging(error, module): 25 | e = str(error) 26 | now = time.strftime("%c") 27 | try: 28 | # Connect to the database so we can insert the record created below 29 | x = mongo_connect() 30 | c = x['ERRORS'] 31 | rec = {'TimeStamp': now, 'Module': module, 'Error Message': e} 32 | c.insert(rec) 33 | except: 34 | pass 35 | 36 | 37 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/dbconnect.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Part of the sniffMyPackets v2 framework 4 | 5 | from canari.config import config 6 | import pymongo 7 | 8 | 9 | def mongo_connect(): 10 | dbs = config['mongodb/dbs'].strip('\'') 11 | server = config['mongodb/server'].strip('\'') 12 | port = config['mongodb/port'] 13 | username = config['mongodb/username'].strip('\'') 14 | password = config['mongodb/username'].strip('\'') 15 | 16 | try: 17 | connection = pymongo.MongoClient(server, port) 18 | db = connection[dbs] 19 | except pymongo.errors.ConnectionFailure, e: 20 | return "Could not connect to MongoDB: %s" % e 21 | else: 22 | return db 23 | 24 | 25 | def find_session(md5hash): 26 | x = mongo_connect() 27 | # Get the PCAP ID for the pcap file 28 | try: 29 | s = x.INDEX.find({"MD5 Hash": md5hash}).count() 30 | if s == 0: 31 | t = x.STREAMS.find({"MD5 Hash": md5hash}).count() 32 | if t > 0: 33 | r = x.STREAMS.find_one({"MD5 Hash": md5hash}, {"PCAP ID": 1, "Stream ID": 1, "Folder": 1, "_id": 0}) 34 | for i in r: 35 | pcap_id = i['PCAP ID'] 36 | session_id = i['Stream ID'] 37 | folder = i['Folder'] 38 | return pcap_id, session_id, folder 39 | 40 | else: 41 | return 'No PCAP ID, you need to index the pcap file' 42 | if s > 0: 43 | r = x.INDEX.find({"MD5 Hash": md5hash}, {"PCAP ID": 1, "Working Directory": 1, "_id": 0}) 44 | for i in r: 45 | pcap_id = i['PCAP ID'] 46 | session_id = i['PCAP ID'] 47 | folder = i['Working Directory'] 48 | return pcap_id, session_id, folder 49 | except Exception as e: 50 | return str(e) 51 | 52 | 53 | 54 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/entities.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | 4 | from canari.maltego.message import Entity, EntityField, EntityFieldType, MatchingRule 5 | 6 | __author__ = 'catalyst256' 7 | __copyright__ = 'Copyright 2014, Sniffmypacketsv2 Project' 8 | __credits__ = [] 9 | 10 | __license__ = 'GPL' 11 | __version__ = '0.1' 12 | __maintainer__ = 'catalyst256' 13 | __email__ = 'catalyst256@gmail.com' 14 | __status__ = 'Development' 15 | 16 | __all__ = [ 17 | 'Sniffmypacketsv2Entity', 18 | 'MySniffmypacketsv2Entity', 19 | 'pcapFile', 20 | 'SessionID', 21 | 'Folder', 22 | 'Host', 23 | 'GeoMap', 24 | 'Artifact' 25 | ] 26 | 27 | 28 | class Sniffmypacketsv2Entity(Entity): 29 | _namespace_ = 'sniffmypacketsv2' 30 | 31 | # @EntityField(name='sniffmypacketsv2.fieldN', propname='fieldN', displayname='Field N', matchingrule=MatchingRule.Loose) 32 | # @EntityField(name='sniffmypacketsv2.field1', propname='field1', displayname='Field 1', type=EntityFieldType.Integer) 33 | 34 | 35 | class pcapFile(Sniffmypacketsv2Entity): 36 | pass 37 | 38 | 39 | class SessionID(Sniffmypacketsv2Entity): 40 | pass 41 | 42 | 43 | class Folder(Sniffmypacketsv2Entity): 44 | pass 45 | 46 | 47 | class Host(Sniffmypacketsv2Entity): 48 | pass 49 | 50 | 51 | class GeoMap(Sniffmypacketsv2Entity): 52 | pass 53 | 54 | 55 | @EntityField(name='sniffmypacketsv2.fhash', propname='fhash', displayname='File Hash', type=EntityFieldType.String) 56 | @EntityField(name='sniffmypacketsv2.ftype', propname='ftype', displayname='File Type', type=EntityFieldType.String) 57 | class Artifact(Sniffmypacketsv2Entity): 58 | pass 59 | 60 | 61 | class pcapStream(Sniffmypacketsv2Entity): 62 | pass 63 | 64 | 65 | class VirusTotal(Sniffmypacketsv2Entity): 66 | pass 67 | 68 | 69 | class ZipFile(Sniffmypacketsv2Entity): 70 | pass 71 | 72 | 73 | class EmailAttachment(Sniffmypacketsv2Entity): 74 | pass 75 | 76 | 77 | class Credential(Sniffmypacketsv2Entity): 78 | pass -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/findcreds.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Part of sniffMyPackets framework. 4 | 5 | import re 6 | import binascii 7 | import base64 8 | from auxtools import error_logging 9 | import logging 10 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 11 | from scapy.all import * 12 | 13 | 14 | # Base functions that we will use to check the packets for interesting stuff 15 | 16 | 17 | def check_ascii(s): 18 | try: 19 | a = s.decode('ascii') 20 | return a 21 | except UnicodeDecodeError: 22 | pass 23 | 24 | 25 | def decode_base64(s): 26 | try: 27 | x = base64.decodestring(s) 28 | return x 29 | except binascii.Error: 30 | pass 31 | 32 | 33 | # Look for various type of credentials stored in packets.as 34 | 35 | 36 | def smtp_creds(pcap): 37 | strings = [] 38 | output = [] 39 | try: 40 | pkts = rdpcap(pcap) 41 | for p in pkts: 42 | if p.haslayer(TCP) and p.haslayer(Raw) and p.getlayer(TCP).dport == 25: 43 | load = p[Raw].load 44 | if load not in strings: 45 | strings.append(load) 46 | else: 47 | pass 48 | else: 49 | pass 50 | except Exception as e: 51 | error_logging(str(e), 'SMTP Creds') 52 | 53 | for s in strings: 54 | t = decode_base64(s) 55 | if t is not None: 56 | c = check_ascii(t) 57 | if c is not None and len(c) > 3: 58 | output.append(c) 59 | else: 60 | pass 61 | else: 62 | pass 63 | 64 | return output 65 | 66 | 67 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/geoip.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Part of sniffMyPackets framework. 4 | # GeoIP Lookup modules to cut down on code changes. 5 | 6 | import pygeoip 7 | from canari.config import config 8 | 9 | 10 | def lookup_geo(ip): 11 | try: 12 | # homelat = config['geoip/homelat'].strip('\'') 13 | # homelng = config['geoip/homelng'].strip('\'') 14 | db = config['geoip/db'].strip('\'') 15 | try: 16 | gi = pygeoip.GeoIP(db) 17 | except Exception as e: 18 | return str(e) 19 | rec = gi.record_by_addr(ip) 20 | if rec is not None: 21 | return rec 22 | except Exception as e: 23 | return str(e) 24 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/hashmethods.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Part of sniffMyPackets framework. 4 | # Hashing modules to cut down on code changes. 5 | 6 | import hashlib 7 | 8 | 9 | def md5_for_file(path): 10 | try: 11 | f = open(path, 'rb') 12 | md5hash = hashlib.md5(f.read()).hexdigest() 13 | return md5hash 14 | except Exception as e: 15 | return str(e) 16 | 17 | 18 | def sha1_for_file(path): 19 | try: 20 | f = open(path, 'rb') 21 | sha1hash = hashlib.sha1(f.read()).hexdigest() 22 | return sha1hash 23 | except Exception as e: 24 | return str(e) 25 | 26 | 27 | def sha256_for_file(path): 28 | try: 29 | f = open(path, 'rb') 30 | sha256hash = hashlib.sha256(f.read()).hexdigest() 31 | return sha256hash 32 | except Exception as e: 33 | return str(e) 34 | 35 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/layers/BadLayers.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import re 4 | 5 | # Welcome to Gobbler, the Scapy pcap parser and dump scripts 6 | # Part of the sniffMyPackets suite http://www.sniffmypackets.net 7 | # Written by @catalyst256 / catalyst256@gmail.com 8 | 9 | 10 | def exclude_layers(x, xname): 11 | # This is here to deal with the way that scapy stacks some of it's layers together which makes extracting them difficult. 12 | # Deal with IP packets that have "options" 13 | if xname == 'IP': 14 | if x['options'] != None: 15 | d = dict((k, v) for k, v in x.iteritems() if k not in 'options') 16 | return d 17 | # DNS packets 18 | if xname == 'DNS': 19 | if (x['qd'] or x['an'] or x['ar'] or x['ns']) != None: 20 | d = dict((k, v) for k, v in x.iteritems() if k not in ('qd', 'an', 'ns', 'ar')) 21 | return d 22 | # LLMNR are part multicast, part DNS, wholly a bitch to parse 23 | if xname == 'Link Local Multicast Node Resolution - Query': 24 | if (x['qd'] or x['an'] or x['ar'] or x['ns']) != None: 25 | d = dict((k, v) for k, v in x.iteritems() if k not in ('qd', 'an', 'ns', 'ar')) 26 | return d 27 | # Nasty SNMP Layers 28 | if xname == 'SNMP': 29 | if x['PDU'] != None: 30 | d = dict((k, v) for k, v in x.iteritems() if k not in 'PDU') 31 | d = str(d) 32 | d = re.sub('', '', d) 34 | s = eval(d) 35 | return s 36 | if xname == 'SNMPget': 37 | if x['varbindlist'] != None: 38 | d = dict((k, v) for k, v in x.iteritems() if k not in 'varbindlist') 39 | d = str(d) 40 | d = re.sub('', '', d) 42 | s = eval(d) 43 | return s 44 | if xname == 'SNMPvarbind': 45 | d = str(x) 46 | d = re.sub('', '', d) 48 | s = eval(d) 49 | return s 50 | if xname == 'SNMPresponse': 51 | if x['varbindlist'] != None: 52 | d = dict((k, v) for k, v in x.iteritems() if k not in 'varbindlist') 53 | d = str(d) 54 | d = re.sub('', '', d) 56 | s = eval(d) 57 | return s 58 | if xname == 'DHCP6 Client Identifier Option': 59 | if x['duid'] != None: 60 | d = dict((k, v) for k, v in x.iteritems() if k not in 'duid') 61 | return d 62 | if xname == 'IPv6 Extension Header - Hop-by-Hop Options Header': 63 | if x['options'] != None: 64 | d = dict((k,v) for k, v in x.iteritems() if k not in 'options') 65 | return d 66 | else: 67 | return x -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/layers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/transforms/common/layers/__init__.py -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/layers/http.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # -*- coding: UTF-8 -*- 3 | # Author : Steeve Barbeau, Luca Invernizzi 4 | # This program is published under a GPLv2 license 5 | 6 | import re 7 | from scapy.all import TCP, bind_layers, Packet, StrField 8 | 9 | 10 | def _canonicalize_header(name): 11 | ''' Takes a header key (i.e., "Host" in "Host: www.google.com", 12 | and returns a canonical representation of it ''' 13 | return name.strip().lower() 14 | 15 | 16 | def _parse_headers(s): 17 | ''' Takes a HTTP packet, and returns a tuple containing: 18 | - the first line (e.g., "GET ...") 19 | - the headers in a dictionary 20 | - the body ''' 21 | try: 22 | headers, body = s.split("\r\n\r\n", 1) 23 | except: 24 | headers = s 25 | body = '' 26 | headers = headers.split("\r\n") 27 | first_line, headers = headers[0].strip(), headers[1:] 28 | headers_found = {} 29 | for header_line in headers: 30 | try: 31 | key, value = header_line.split(':', 1) 32 | except: 33 | continue 34 | headers_found[_canonicalize_header(key)] = header_line.strip() 35 | return first_line, headers_found, body 36 | 37 | 38 | def _dissect_headers(obj, s): 39 | ''' Takes a HTTP packet as the string s, and populates the scapy layer obj 40 | (either HTTPResponse or HTTPRequest). Returns the first line of the 41 | HTTP packet, and the body 42 | ''' 43 | first_line, headers, body = _parse_headers(s) 44 | for f in obj.fields_desc: 45 | canonical_name = _canonicalize_header(f.name) 46 | try: 47 | header_line = headers[canonical_name] 48 | except: 49 | continue 50 | key, value = header_line.split(':', 1) 51 | obj.setfieldval(f.name, value.strip()) 52 | del headers[canonical_name] 53 | if headers: 54 | obj.setfieldval( 55 | 'Additional-Headers', '\r\n'.join(headers.values()) + '\r\n') 56 | return first_line, body 57 | 58 | 59 | def _self_build(obj, field_pos_list=None): 60 | ''' Takse an HTTPRequest or HTTPResponse object, and creates its internal 61 | scapy representation as a string. That is, generates the HTTP 62 | packet as a string ''' 63 | p = "" 64 | for f in obj.fields_desc: 65 | val = obj.getfieldval(f.name) 66 | if not val: 67 | continue 68 | val += '\r\n' 69 | if f.name in ['Method', 'Additional-Headers', 'Status-Line']: 70 | p = f.addfield(obj, p, val) 71 | else: 72 | p = f.addfield(obj, p, "%s: %s" % (f.name, val)) 73 | return p 74 | 75 | 76 | class HTTPRequest(Packet): 77 | 78 | name = "HTTP Request" 79 | http_methods = "^(OPTIONS|GET|HEAD|POST|PUT|DELETE|TRACE|CONNECT)" 80 | fields_desc = [StrField("Method", None, fmt="H"), 81 | StrField("Path", None, fmt="H"), 82 | StrField("Http-Version", None, fmt="H"), 83 | StrField("Host", None, fmt="H"), 84 | StrField("User-Agent", None, fmt="H"), 85 | StrField("Accept", None, fmt="H"), 86 | StrField("Accept-Language", None, fmt="H"), 87 | StrField("Accept-Encoding", None, fmt="H"), 88 | StrField("Accept-Charset", None, fmt="H"), 89 | StrField("Referer", None, fmt="H"), 90 | StrField("Authorization", None, fmt="H"), 91 | StrField("Expect", None, fmt="H"), 92 | StrField("From", None, fmt="H"), 93 | StrField("If-Match", None, fmt="H"), 94 | StrField("If-Modified-Since", None, fmt="H"), 95 | StrField("If-None-Match", None, fmt="H"), 96 | StrField("If-Range", None, fmt="H"), 97 | StrField("If-Unmodified-Since", None, fmt="H"), 98 | StrField("Max-Forwards", None, fmt="H"), 99 | StrField("Proxy-Authorization", None, fmt="H"), 100 | StrField("Range", None, fmt="H"), 101 | StrField("TE", None, fmt="H"), 102 | StrField("Cache-Control", None, fmt="H"), 103 | StrField("Connection", None, fmt="H"), 104 | StrField("Date", None, fmt="H"), 105 | StrField("Pragma", None, fmt="H"), 106 | StrField("Trailer", None, fmt="H"), 107 | StrField("Transfer-Encoding", None, fmt="H"), 108 | StrField("Upgrade", None, fmt="H"), 109 | StrField("Via", None, fmt="H"), 110 | StrField("Warning", None, fmt="H"), 111 | StrField("Keep-Alive", None, fmt="H"), 112 | StrField("Allow", None, fmt="H"), 113 | StrField("Content-Encoding", None, fmt="H"), 114 | StrField("Content-Language", None, fmt="H"), 115 | StrField("Content-Length", None, fmt="H"), 116 | StrField("Content-Location", None, fmt="H"), 117 | StrField("Content-MD5", None, fmt="H"), 118 | StrField("Content-Range", None, fmt="H"), 119 | StrField("Content-Type", None, fmt="H"), 120 | StrField("Expires", None, fmt="H"), 121 | StrField("Last-Modified", None, fmt="H"), 122 | StrField("Cookie", None, fmt="H"), 123 | StrField("Additional-Headers", None, fmt="H")] 124 | 125 | def do_dissect(self, s): 126 | ''' From the HTTP packet string, populate the scapy object ''' 127 | first_line, body = _dissect_headers(self, s) 128 | Method, Path, HTTPVersion = re.split("\s+", first_line) 129 | 130 | self.setfieldval('Method', Method) 131 | self.setfieldval('Path', Path) 132 | self.setfieldval('Http-Version', HTTPVersion) 133 | return body 134 | 135 | def self_build(self, field_pos_list=None): 136 | ''' Generate the HTTP packet string (the oppposite of do_dissect) ''' 137 | return _self_build(self, field_pos_list) 138 | 139 | 140 | class HTTPResponse(Packet): 141 | 142 | name = "HTTP Response" 143 | fields_desc = [StrField("Status-Line", None, fmt="H"), 144 | StrField("Accept-Ranges", None, fmt="H"), 145 | StrField("Age", None, fmt="H"), 146 | StrField("E-Tag", None, fmt="H"), 147 | StrField("Location", None, fmt="H"), 148 | StrField("Proxy-Authenticate", None, fmt="H"), 149 | StrField("Retry-After", None, fmt="H"), 150 | StrField("Server", None, fmt="H"), 151 | StrField("Vary", None, fmt="H"), 152 | StrField("WWW-Authenticate", None, fmt="H"), 153 | StrField("Cache-Control", None, fmt="H"), 154 | StrField("Connection", None, fmt="H"), 155 | StrField("Date", None, fmt="H"), 156 | StrField("Pragma", None, fmt="H"), 157 | StrField("Trailer", None, fmt="H"), 158 | StrField("Transfer-Encoding", None, fmt="H"), 159 | StrField("Upgrade", None, fmt="H"), 160 | StrField("Via", None, fmt="H"), 161 | StrField("Warning", None, fmt="H"), 162 | StrField("Keep-Alive", None, fmt="H"), 163 | StrField("Allow", None, fmt="H"), 164 | StrField("Content-Encoding", None, fmt="H"), 165 | StrField("Content-Language", None, fmt="H"), 166 | StrField("Content-Length", None, fmt="H"), 167 | StrField("Content-Location", None, fmt="H"), 168 | StrField("Content-MD5", None, fmt="H"), 169 | StrField("Content-Range", None, fmt="H"), 170 | StrField("Content-Type", None, fmt="H"), 171 | StrField("Expires", None, fmt="H"), 172 | StrField("Last-Modified", None, fmt="H"), 173 | StrField("Additional-Headers", None, fmt="H")] 174 | 175 | def do_dissect(self, s): 176 | ''' From the HTTP packet string, populate the scapy object ''' 177 | first_line, body = _dissect_headers(self, s) 178 | self.setfieldval('Status-Line', first_line) 179 | return body 180 | 181 | def self_build(self, field_pos_list=None): 182 | ''' From the HTTP packet string, populate the scapy object ''' 183 | return _self_build(self, field_pos_list) 184 | 185 | 186 | class HTTP(Packet): 187 | 188 | name = "HTTP" 189 | 190 | def do_dissect(self, s): 191 | return s 192 | 193 | def guess_payload_class(self, payload): 194 | ''' Decides if the payload is an HTTP Request or Response, or 195 | something else ''' 196 | try: 197 | prog = re.compile( 198 | r"^(?:OPTIONS|GET|HEAD|POST|PUT|DELETE|TRACE|CONNECT) " 199 | r"(?:.+?) " 200 | r"HTTP/\d\.\d$" 201 | ) 202 | req = payload[:payload.index("\r\n")] 203 | result = prog.match(req) 204 | if result: 205 | return HTTPRequest 206 | else: 207 | prog = re.compile(r"^HTTP/\d\.\d \d\d\d .+?$") 208 | result = prog.match(req) 209 | if result: 210 | return HTTPResponse 211 | except: 212 | pass 213 | return Packet.guess_payload_class(self, payload) 214 | 215 | bind_layers(TCP, HTTP) 216 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/loadpackets.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Gobbler configuration file 4 | 5 | # Welcome to Gobbler, the Scapy pcap parser and dump scripts 6 | # Part of the sniffMyPackets suite http://www.sniffmypackets.net 7 | # Written by @catalyst256 / catalyst256@gmail.com 8 | 9 | import logging 10 | import os 11 | import sys 12 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 13 | from scapy.all import rdpcap 14 | from scapy.error import Scapy_Exception 15 | 16 | 17 | # Add some colouring for printing packets later 18 | YELLOW = '\033[93m' 19 | GREEN = '\033[92m' 20 | END = '\033[0m' 21 | RED = '\033[91m' 22 | 23 | 24 | def loadpackets(pcap): 25 | if os.path.isfile(pcap): 26 | # print GREEN + 'Loading pcap file: ' + pcap + END 27 | try: 28 | p = rdpcap(pcap) 29 | # print YELLOW + 'Number of packets: ' + str(len(p)) + END 30 | return p 31 | except Scapy_Exception as msg: 32 | print RED + str(msg) + END 33 | sys.exit(0) 34 | else: 35 | print RED + 'ERROR: You sure that\'s the right file location???: [' + pcap + ']' + END -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/packetParser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Welcome to Gobbler, the Scapy pcap parser and dump scripts 4 | # Part of the sniffMyPackets suite http://www.sniffmypackets.net 5 | # Written by @catalyst256 / catalyst256@gmail.com 6 | 7 | import datetime 8 | from layers.http import * 9 | from layers.BadLayers import * 10 | from auxtools import error_logging 11 | import logging 12 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 13 | from scapy.all import * 14 | from collections import OrderedDict 15 | 16 | bind_layers(TCP, HTTP) 17 | 18 | 19 | def rename_layer(x, n): 20 | n = n.lower().replace(' ', '_').replace('-', '_').replace('.', '_') + '_' 21 | return dict((n+k.lower(), f(v) if hasattr(v, 'keys') else v) for k, v in x.items()) 22 | 23 | 24 | def find_layers(pkts, pcap, pcap_id, streamid): 25 | packet = OrderedDict() 26 | count = 1 27 | pcap_id = pcap_id.encode('utf-8') 28 | streamid = streamid.encode('utf-8') 29 | try: 30 | for p in pkts: 31 | header = {"Buffer": {"timestamp": datetime.datetime.fromtimestamp(p.time).strftime('%Y-%m-%d %H:%M:%S.%f'), 32 | "packetnumber": count, "PCAP ID": pcap_id, "pcapfile": pcap, "StreamID": streamid}} 33 | packet.update(header) 34 | counter = 0 35 | while True: 36 | layer = p.getlayer(counter) 37 | if layer != None: 38 | i = int(counter) 39 | x = p[0][i].fields 40 | t = exclude_layers(x, layer.name) 41 | s = rename_layer(t, layer.name) 42 | v = '{"' + layer.name.replace('.', '_') + '[' + str(i) + ']' + '":' + str(s) + '}' 43 | s = eval(v) 44 | try: 45 | del s['HTTP[3]'] 46 | del s['HTTP[5]'] 47 | except KeyError: 48 | pass 49 | packet.update(s) 50 | else: 51 | break 52 | counter += 1 53 | count += 1 54 | yield packet 55 | packet.clear() 56 | except Exception as e: 57 | error_logging(str(e), 'PacketParser') 58 | pass 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/pcapstreams.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # This is a modified version of the CORE Security Technologies pcap splitter code. 4 | # This version supports TCP & UDP stream extraction and a modified file naming convention 5 | # Modified by @catalyst256 6 | 7 | # --- Original Licensing stuff --- 8 | # This software is provided under under a slightly modified version 9 | # of the Apache Software License. See the accompanying LICENSE file 10 | # for more information. 11 | 12 | # Copyright (c) 2003 CORE Security Technologies 13 | # Original Authors: 14 | # Alejandro D. Weil 15 | # Javier Kohen 16 | 17 | # Requires pcapy which can be installed by running the following: 18 | # pip install "http://corelabs.coresecurity.com/index.php?module=Wiki&action=attachment&type=tool&page=Pcapy&file=pcapy-0.10.8.tar.gz" 19 | 20 | 21 | import sys 22 | import string 23 | from exceptions import Exception 24 | # from threading import Thread 25 | 26 | import pcapy 27 | from pcapy import open_offline 28 | import impacket 29 | from impacket.ImpactDecoder import EthDecoder, LinuxSLLDecoder 30 | 31 | files = [] 32 | 33 | 34 | class Connection: 35 | """This class can be used as a key in a dictionary to select a connection 36 | given a pair of peers. Two connections are considered the same if both 37 | peers are equal, despite the order in which they were passed to the 38 | class constructor. 39 | """ 40 | 41 | def __init__(self, p1, p2, p3): 42 | """This constructor takes two tuples, one for each peer. The first 43 | element in each tuple is the IP address as a string, and the 44 | second is the port as an integer. 45 | """ 46 | 47 | self.p1 = p1 48 | self.p2 = p2 49 | self.p3 = p3 50 | 51 | def getFilename(self): 52 | """Utility function that returns a filename composed by the IP 53 | addresses and ports of both peers. 54 | """ 55 | return '%s-%s:%d-%s:%d.pcap' % (self.p1, self.p2[0], self.p2[1], self.p3[0], self.p3[1]) 56 | 57 | def __cmp__(self, other): 58 | if (self.p2 == other.p2 and self.p3 == other.p3) or (self.p2 == other.p3 and self.p3 == other.p2): 59 | return 0 60 | else: 61 | return -1 62 | 63 | def __hash__(self): 64 | return (hash(self.p2[0]) ^ hash(self.p2[1]) 65 | ^ hash(self.p3[0]) ^ hash(self.p3[1])) 66 | 67 | 68 | class Decoder: 69 | 70 | def __init__(self, pcapObj, folder): 71 | # Query the type of the link and instantiate a decoder accordingly. 72 | datalink = pcapObj.datalink() 73 | if pcapy.DLT_EN10MB == datalink: 74 | self.decoder = EthDecoder() 75 | elif pcapy.DLT_LINUX_SLL == datalink: 76 | self.decoder = LinuxSLLDecoder() 77 | else: 78 | raise Exception("Datalink type not supported: " % datalink) 79 | 80 | self.pcap = pcapObj 81 | self.connections = {} 82 | folder = folder 83 | 84 | def start(self): 85 | # Sniff ad infinitum. 86 | # PacketHandler shall be invoked by pcap for every packet. 87 | self.pcap.loop(0, self.packetHandler) 88 | 89 | def packetHandler(self, hdr, data): 90 | # Use the ImpactDecoder to turn the rawpacket into a hierarchy 91 | # of ImpactPacket instances. 92 | try: 93 | p = self.decoder.decode(data) 94 | ip = p.child() 95 | protocol = ip.get_ip_p() 96 | # Build a distinctive key for this pair of peers. 97 | if protocol == 6: 98 | tcp = ip.child() 99 | proto = 'TCP' 100 | src = (ip.get_ip_src(), tcp.get_th_sport()) 101 | dst = (ip.get_ip_dst(), tcp.get_th_dport()) 102 | con = Connection(proto, src, dst) 103 | elif protocol == 17: 104 | udp = ip.child() 105 | proto = 'UDP' 106 | src = (ip.get_ip_src(), udp.get_uh_sport()) 107 | dst = (ip.get_ip_dst(), udp.get_uh_dport()) 108 | con = Connection(proto, src, dst) 109 | # If there isn't an entry associated yetwith this connection, 110 | # open a new pcapdumper and create an association. 111 | if not self.connections.has_key(con): 112 | fn = con.getFilename() 113 | fn = '%s/%s' % (folder, fn) 114 | # print "Found a new connection, storing into:", fn 115 | if fn not in files: 116 | files.append(fn) 117 | try: 118 | dumper = self.pcap.dump_open(fn) 119 | except pcapy.PcapError, e: 120 | print "Can't write packet to:", fn 121 | return 122 | self.connections[con] = dumper 123 | 124 | # Write the packet to the corresponding file. 125 | self.connections[con].dump(hdr, data) 126 | except Exception as e: 127 | print str(e) 128 | pass 129 | 130 | 131 | def create_streams(pcap, f): 132 | p = open_offline(pcap) 133 | global folder 134 | folder = f 135 | p.setfilter(r'ip proto \tcp or \udp') 136 | Decoder(p, folder).start() 137 | return files 138 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/pcaptools.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Part of sniffMyPackets framework. 4 | # Generic pcap tools and utilities that SmP uses 5 | 6 | import os 7 | import magic 8 | import datetime 9 | import logging 10 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 11 | from scapy.all import rdpcap, conf 12 | 13 | 14 | def packet_count(pcap): 15 | conf.verb = 0 16 | try: 17 | pkts = rdpcap(pcap) 18 | return len(pkts) 19 | except Exception as e: 20 | return str(e) 21 | 22 | 23 | def check_pcap(pcap): 24 | try: 25 | bad_magic = 'pcap-ng capture file' 26 | m = magic.open(magic.MAGIC_NONE) 27 | m.load() 28 | f = m.file(pcap) 29 | if bad_magic in f: 30 | return 'BAD' 31 | else: 32 | return f 33 | except Exception as e: 34 | return str(e) 35 | 36 | 37 | def count_sessions(pcap): 38 | try: 39 | pkts = rdpcap(pcap) 40 | return len(pkts.sessions()) 41 | except Exception as e: 42 | return str(e) 43 | 44 | 45 | def check_size(pcap): 46 | try: 47 | x = os.path.getsize(pcap) 48 | d = "%0.01f MB" % (x / (1024*1024.0)) 49 | return str(d) 50 | except Exception as e: 51 | return str(e) 52 | 53 | 54 | def get_time(pcap): 55 | try: 56 | p = rdpcap(pcap) 57 | c = len(p) 58 | start = datetime.datetime.fromtimestamp(p[0].time).strftime('%Y-%m-%d %H:%M:%S.%f') 59 | end = datetime.datetime.fromtimestamp(p[c -1].time).strftime('%Y-%m-%d %H:%M:%S.%f') 60 | return [start, end] 61 | except Exception as e: 62 | return str(e) 63 | 64 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SneakersInc/sniffmypacketsv2/55d8ff70eedb4dd948351425c25a1e904ea6d50e/src/sniffmypacketsv2/transforms/common/protocols/__init__.py -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/ftp.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import string 3 | 4 | from scapy.layers.inet import * 5 | 6 | import dissector 7 | 8 | 9 | last_file = "NoName" 10 | 11 | 12 | def name_generator(size=9, chars=string.ascii_uppercase + string.digits): 13 | """ 14 | this method is for generating a randndom name for the downloaded files 15 | @param size: number of random characters 16 | @param chars: type of the random characters 17 | """ 18 | return ''.join(random.choice(chars) for x in range(size)) 19 | 20 | 21 | def clean_file_name(name, path): 22 | """ 23 | this method is for cleaning the carved file name if it has some special chars 24 | which is not allowed in most of the operating systems or if the specified folder 25 | in path variable has another file has the same name. 26 | @param name: the carved file name 27 | @param path: the directory path 28 | """ 29 | ls = list(name) 30 | result = "" 31 | length = len(ls) 32 | files = os.listdir(path) 33 | if len(name) > 25 or name in files or name == "NoName": 34 | return name_generator() 35 | i = 0 36 | while i < length: 37 | if not ls[i].isdigit() and not ls[i].isalpha and not ls[i] == ".": 38 | del(ls[i]) 39 | else: 40 | result = result + ls[i] 41 | i = i + 1 42 | if len(result) > 0: 43 | return result 44 | else: 45 | return name_generator() 46 | 47 | 48 | def add_file(name): 49 | """ 50 | this method is for storing the carved file name. 51 | @param name: the carved file name 52 | """ 53 | global last_file 54 | ls = [] 55 | if "/" in name: 56 | ls = name.split("/") 57 | if len(ls) > 0: 58 | last_file = ls[len(ls) - 1] 59 | 60 | 61 | def get_file(): 62 | """ 63 | this method is for retrieving the stored file name 64 | """ 65 | return last_file 66 | 67 | # list for maintaining the ftp data sessions 68 | ftpdatasessions = [] 69 | 70 | 71 | def is_created_session(Src, Dst, SPort): 72 | """ 73 | this method returns true if the ftp data session is exist 74 | @param Src: source ip address 75 | @param Dst: destination ip address 76 | @param SPort: source port number 77 | """ 78 | i = 0 79 | while i < len(ftpdatasessions): 80 | if str(Src) and str(Dst) and str(SPort) in ftpdatasessions[i]: 81 | return True 82 | i = i + 1 83 | return False 84 | 85 | 86 | def create_session(Src, Dst, SPort): 87 | """ 88 | this method for creating the ftp data sessions 89 | @param Src: source ip address 90 | @param Dst: destination ip address 91 | @param SPort: source port number 92 | """ 93 | if not is_created_session(Src, Dst, SPort): 94 | ftpdatasessions.append([Src, Dst, SPort]) 95 | return True 96 | return False 97 | 98 | 99 | def bind(Port): 100 | """ 101 | ftp data sessions which get establish after do an agreement 102 | on a specific port number at the server, this port number need 103 | to be bounded by using bind_layers() method 104 | @param Port: source port number at the server side 105 | """ 106 | bind_layers(TCP, FTPData, sport=int(Port)) 107 | 108 | 109 | class FTPDataField(XByteField): 110 | """ 111 | this is a field class for handling the ftp data 112 | @attention: this class inherets XByteField 113 | """ 114 | holds_packets = 1 115 | name = "FtpDataField" 116 | myresult = "" 117 | 118 | def __init__(self, name, default): 119 | """ 120 | FTPDataField constructor, for initializing instance variables 121 | @param name: name of the field 122 | @param default: Scapy has many formats to represent the data 123 | internal, human and machine. anyways you may sit this param to None. 124 | """ 125 | self.name = name 126 | self.fmt = "!B" 127 | Field.__init__(self, name, default, "!B") 128 | 129 | def getfield(self, pkt, s): 130 | cstream = -1 131 | if pkt.underlayer.name == "TCP": 132 | cstream = dissector.check_stream(\ 133 | pkt.underlayer.underlayer.fields["src"],\ 134 | pkt.underlayer.underlayer.fields["dst"],\ 135 | pkt.underlayer.fields["sport"],\ 136 | pkt.underlayer.fields["dport"],\ 137 | pkt.underlayer.fields["seq"], s) 138 | if not cstream == -1: 139 | s = cstream 140 | if pkt.underlayer.name == "TCP" and cstream == -1: 141 | return "", "" 142 | name = get_file() 143 | if not dissector.Dissector.default_download_folder_changed: 144 | cwd = os.getcwd() + "/downloaded/" 145 | try: 146 | os.mkdir("downloaded") 147 | except: 148 | None 149 | f = open(cwd + clean_file_name(name, cwd), "wb") 150 | else: 151 | f = open(dissector.Dissector.path +\ 152 | clean_file_name(name, dissector.Dissector.path), "wb") 153 | f.write(s) 154 | f.close() 155 | self.myresult = "" 156 | firstb = struct.unpack(self.fmt, s[0])[0] 157 | self.myresult = "" 158 | for c in s: 159 | ustruct = struct.unpack(self.fmt, c) 160 | byte = base64.standard_b64encode(str(ustruct[0])) 161 | self.myresult = self.myresult + byte 162 | if not is_created_session(pkt.underlayer.underlayer.fields["src"], 163 | pkt.underlayer.underlayer.fields["dst"], 164 | pkt.underlayer.fields["sport"]): 165 | return self.myresult, "" 166 | return "", self.myresult 167 | 168 | 169 | class FTPResArgField(StrField): 170 | """ 171 | class field to handle the ftp responses' arguments 172 | @attention: it inherets StrField which is imported from Scapy 173 | """ 174 | holds_packets = 1 175 | name = "FTPResArgField" 176 | 177 | def getfield(self, pkt, s): 178 | """ 179 | this method will get the packet, takes what does need to be 180 | taken and let the remaining go, so it returns two values. 181 | first value which belongs to this field and the second is 182 | the remaining which does need to be dissected with 183 | other "field classes". 184 | @param pkt: holds the whole packet 185 | @param s: holds only the remaining data which is not dissected yet. 186 | """ 187 | value = "" 188 | if "Entering Passive Mode (" in s: 189 | value = [] 190 | res = s.split("Entering Passive Mode (") 191 | res.remove(res[0]) 192 | res = res[0].split(").") 193 | del(res[len(res)-1]) 194 | res = res[0].split(",") 195 | IP = res[0] + "." + res[1] + "." + res[2] + "." + res[3] 196 | Port = str(int(res[4]) * 256 + int(res[5])) 197 | value.append(("Passive IP Address", IP)) 198 | value.append(("Passive Port Number", Port)) 199 | if(create_session(IP, pkt.underlayer.underlayer.fields["dst"], 200 | Port)): 201 | bind(Port) 202 | return "", value 203 | else: 204 | value = s 205 | return "", value 206 | 207 | def __init__(self, name, default, fmt, remain=0): 208 | """ 209 | FTPResArgField constructor for initializing the instance variables 210 | @param name: name of the field 211 | @param default: Scapy has many formats to represent the data 212 | internal, human and machine. anyways you may sit this param to None. 213 | @param fmt: specifying the format, this has been set to "H" 214 | @param remain: this parameter specifies the size of the remaining 215 | data so make it 0 to handle all of the data. 216 | """ 217 | self.name = name 218 | StrField.__init__(self, name, default, fmt, remain) 219 | 220 | 221 | class FTPResField(StrField): 222 | """ 223 | class field to handle the ftp responses 224 | @attention: it inherets StrField which is imported from Scapy 225 | """ 226 | holds_packets = 1 227 | name = "FTPReField" 228 | 229 | def get_code_msg(self, cn): 230 | """ 231 | method which returns message for a ftp code number 232 | @param cn: code number 233 | """ 234 | codes = { 235 | "110": "Restart marker reply", 236 | "120": "Service ready in nnn minutes", 237 | "125": "Data connection already open; transfer starting", 238 | "150": "File status okay; about to open data connection", 239 | "200": "Command okay", 240 | "202": "Command not implemented, superfluous at this site", 241 | "211": "System status, or system help reply", 242 | "212": "Directory status", 243 | "213": "File status", 244 | "214": "Help message", 245 | "215": "NAME system type", 246 | "220": "Service ready for new user", 247 | "221": "Service closing control connection", 248 | "225": "Data connection open; no transfer in progress", 249 | "226": "Closing data connection", 250 | "227": "Entering Passive Mode", 251 | "230": "User logged in proceed", 252 | "250": "Requested file action okay completed", 253 | "257": "PATHNAME created", 254 | "331": "User name okay need password", 255 | "332": "Need account for login", 256 | "350": "Requested file action pending further information", 257 | "421": "Service not available closing control connection", 258 | "425": "Can't open data connection", 259 | "426": "Connection closed; transfer aborted", 260 | "450": "Requested file action not taken", 261 | "451": "Requested action aborted: local error in processing", 262 | "452": "Requested action not taken. Insufficient storage space in system", 263 | "500": "Syntax error command unrecognized", 264 | "501": "Syntax error in parameters or arguments", 265 | "502": "Command not implemented", 266 | "503": "Bad sequence of commands", 267 | "504": "Command not implemented for that parameter", 268 | "530": "Not logged in", 269 | "532": "Need account for storing files", 270 | "550": "Requested action not taken: File unavailable", 271 | "551": "Requested action aborted: page type unknown", 272 | "552": "Requested file action aborted: Exceeded storage allocation", 273 | "553": "Requested action not taken: File name not allowed", 274 | } 275 | if cn in codes: 276 | return codes[cn] 277 | return "" 278 | 279 | def getfield(self, pkt, s): 280 | """ 281 | this method will get the packet, takes what does need to be 282 | taken and let the remaining go, so it returns two values. 283 | first value which belongs to this field and the second is 284 | the remaining which does need to be dissected with 285 | other "field classes". 286 | @param pkt: holds the whole packet 287 | @param s: holds only the remaining data which is not dissected yet. 288 | """ 289 | remain = "" 290 | value = "" 291 | ls = s.split() 292 | length = len(ls) 293 | if length > 1: 294 | value = self.get_code_msg(ls[0]) + " (" + ls[0] + ")" 295 | if length == 2: 296 | remain = ls[1] 297 | return remain, value 298 | else: 299 | i = 1 300 | remain = "" 301 | while i < length: 302 | if i != 1: 303 | remain = remain + " " + ls[i] 304 | elif i == 1: 305 | remain = remain + ls[i] 306 | i = i + 1 307 | return remain, value 308 | else: 309 | return "", self.get_code_msg(ls[0]) + " (" + ls[0] + ")" 310 | 311 | def __init__(self, name, default, fmt, remain=0): 312 | """ 313 | class constructor for initializing the instance variables 314 | @param name: name of the field 315 | @param default: Scapy has many formats to represent the data 316 | internal, human and machine. anyways you may sit this param to None. 317 | @param fmt: specifying the format, this has been set to "H" 318 | @param remain: this parameter specifies the size of the remaining 319 | data so make it 0 to handle all of the data. 320 | """ 321 | self.name = name 322 | StrField.__init__(self, name, default, fmt, remain) 323 | 324 | 325 | class FTPReqField(StrField): 326 | holds_packets = 1 327 | name = "FTPReqField" 328 | 329 | def getfield(self, pkt, s): 330 | """ 331 | this method will get the packet, takes what does need to be 332 | taken and let the remaining go, so it returns two values. 333 | first value which belongs to this field and the second is 334 | the remaining which does need to be dissected with 335 | other "field classes". 336 | @param pkt: holds the whole packet 337 | @param s: holds only the remaining data which is not dissected yet. 338 | """ 339 | remain = "" 340 | value = "" 341 | ls = s.split() 342 | if ls[0].lower() == "retr": 343 | c = 1 344 | file = "" 345 | while c < len(ls): 346 | file = file + ls[c] 347 | c = c + 1 348 | if len(file) > 0: 349 | add_file(file) 350 | length = len(ls) 351 | if length > 1: 352 | value = ls[0] 353 | if length == 2: 354 | remain = ls[1] 355 | return remain, value 356 | else: 357 | i = 1 358 | remain = "" 359 | while i < length: 360 | remain = remain + ls[i] + " " 361 | i = i + 1 362 | return remain, value 363 | else: 364 | return "", ls[0] 365 | 366 | def __init__(self, name, default, fmt, remain=0): 367 | """ 368 | class constructor for initializing the instance variables 369 | @param name: name of the field 370 | @param default: Scapy has many formats to represent the data 371 | internal, human and machine. anyways you may sit this param to None. 372 | @param fmt: specifying the format, this has been set to "H" 373 | @param remain: this parameter specifies the size of the remaining 374 | data so make it 0 to handle all of the data. 375 | """ 376 | self.name = name 377 | StrField.__init__(self, name, default, fmt, remain) 378 | 379 | 380 | class FTPData(Packet): 381 | """ 382 | class for dissecting the ftp data 383 | @attention: it inherets Packet class from Scapy library 384 | """ 385 | name = "ftp" 386 | fields_desc = [FTPDataField("data", "")] 387 | 388 | 389 | class FTPResponse(Packet): 390 | """ 391 | class for dissecting the ftp responses 392 | @attention: it inherets Packet class from Scapy library 393 | """ 394 | name = "ftp" 395 | fields_desc = [FTPResField("command", "", "H"), 396 | FTPResArgField("argument", "", "H")] 397 | 398 | 399 | class FTPRequest(Packet): 400 | """ 401 | class for dissecting the ftp requests 402 | @attention: it inherets Packet class from Scapy library 403 | """ 404 | name = "ftp" 405 | fields_desc = [FTPReqField("command", "", "H"), 406 | StrField("argument", "", "H")] 407 | 408 | bind_layers(TCP, FTPResponse, sport=21) 409 | bind_layers(TCP, FTPRequest, dport=21) 410 | bind_layers(TCP, FTPData, dport=20) 411 | bind_layers(TCP, FTPData, dport=20) 412 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/http.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import os 3 | import string 4 | import random 5 | from scapy.packet import * 6 | from scapy.fields import * 7 | from scapy.ansmachine import * 8 | from scapy.layers.inet import * 9 | import dissector 10 | 11 | 12 | downloaded_files = [] 13 | 14 | 15 | def name_generator(size=9, chars=string.ascii_uppercase + string.digits): 16 | """ 17 | this method is for generating a randndom name for the downloaded files 18 | @param size: number of random characters 19 | @param chars: type of the random characters 20 | """ 21 | return ''.join(random.choice(chars) for x in range(size)) 22 | 23 | 24 | def clean_file_name(name, path): 25 | """ 26 | this method is for cleaning the carved file name if it has some special chars 27 | which is not allowed in most of the operating systems or if the specified folder 28 | in path variable has another file has the same name. 29 | @param name: the carved file name 30 | @param path: the directory path 31 | """ 32 | ls = list(name) 33 | result = "" 34 | length = len(ls) 35 | files = os.listdir(path) 36 | if len(name) > 25 or name in files or name == "NoName": 37 | return name_generator() 38 | i = 0 39 | while i < length: 40 | if ls[i].isalnum() or ls[i] == ".": 41 | result = result + ls[i] 42 | i = i + 1 43 | if len(result) > 0: 44 | return result 45 | else: 46 | return name_generator() 47 | 48 | 49 | def add_file(Src, Dst, SPort, DPort, name, seq): 50 | """ 51 | this method is for storing the carved file name. 52 | @param Src: source ip address 53 | @param Dst: destination ip address 54 | @param SPort: source port number 55 | @param DPort: destination port number 56 | @param name: the carved file name 57 | @param seq: sequence number 58 | """ 59 | downloaded_files.append((Src, Dst, SPort, DPort, name[1:], seq)) 60 | 61 | 62 | def get_file(Src, Dst, SPort, DPort, ack): 63 | """ 64 | this method is for retrieving the stored file name 65 | @param Src: source ip address 66 | @param Dst: destination ip address 67 | @param SPort: source port number 68 | @param DPort: destination port number 69 | @param ack: acknowledgment number 70 | """ 71 | for element in downloaded_files: 72 | if Src == element[1] and Dst == element[0] and\ 73 | SPort == element[3] and DPort == element[2] and\ 74 | ack == element[5]: 75 | return element[4] 76 | return "NoName" 77 | 78 | 79 | class HTTPReqField(StrField): 80 | """ 81 | field class for handling http requests 82 | @attention: it inherets StrField from Scapy library 83 | """ 84 | holds_packets = 1 85 | name = "HTTPReqField" 86 | 87 | def getfield(self, pkt, s): 88 | """ 89 | this method will get the packet, takes what does need to be 90 | taken and let the remaining go, so it returns two values. 91 | first value which belongs to this field and the second is 92 | the remaining which does need to be dissected with 93 | other "field classes". 94 | @param pkt: holds the whole packet 95 | @param s: holds only the remaining data which is not dissected yet. 96 | """ 97 | cstream = -1 98 | if pkt.underlayer.name == "TCP": 99 | cstream = dissector.check_stream(\ 100 | pkt.underlayer.underlayer.fields["src"],\ 101 | pkt.underlayer.underlayer.fields["dst"],\ 102 | pkt.underlayer.fields["sport"], pkt.underlayer.fields["dport"],\ 103 | pkt.underlayer.fields["seq"], s) 104 | if not cstream == -1: 105 | s = cstream 106 | if pkt.underlayer.name == "TCP" and cstream == -1: 107 | return "", "" 108 | remain = "" 109 | value = "" 110 | if self.name == "request-line: ": 111 | ls = s.splitlines(True) 112 | f = ls[0].split() 113 | length = len(f) 114 | if length == 3: 115 | value = "Method:" + f[0] + ", Request-URI:" +\ 116 | f[1] + ", HTTP-Version:" + f[2] 117 | HTTPMethodsRFC2616 = ['get','post','options','head','put','delete','trace','connect'] 118 | #HTTP methods as per rfc2616 http://www.ietf.org/rfc/rfc2616 119 | #There are other methods in other RFCs but nobody cares about those. 120 | if f[0].lower() in HTTPMethodsRFC2616: 121 | add_file(pkt.underlayer.underlayer.fields["src"],\ 122 | pkt.underlayer.underlayer.fields["dst"],\ 123 | pkt.underlayer.fields["sport"],\ 124 | pkt.underlayer.fields["dport"], f[1],\ 125 | pkt.underlayer.fields["seq"] + len(s)) 126 | ls.remove(ls[0]) 127 | for element in ls: 128 | remain = remain + element 129 | return remain, value 130 | return s, "" 131 | 132 | 133 | class HTTPResField(StrField): 134 | """ 135 | field class for handling http requests 136 | @attention: it inherets StrField from Scapy library 137 | """ 138 | holds_packets = 1 139 | name = "HTTPResField" 140 | fin = False 141 | 142 | def get_code_msg(self, cn): 143 | """ 144 | method returns the message for the http code number 145 | @param cn: code number 146 | """ 147 | codes = { 148 | "100": "Continue", 149 | "101": "Switching Protocols", 150 | "102": "Processing", 151 | "199": "Informational - Others", 152 | "200": "OK", 153 | "201": "Created", 154 | "202": "Accepted", 155 | "203": "Non-Authoritative Information", 156 | "204": "No Content", 157 | "205": "Reset Content", 158 | "206": "Partial Content", 159 | "207": "Multi-Status", 160 | "299": "Success - Others", 161 | "300": "Multiple Choices", 162 | "301": "Moved Permanently", 163 | "302": "Moved Temporarily", 164 | "303": "See Other", 165 | "304": "Not Modified", 166 | "305": "Use Proxy", 167 | "306": "(Unused)", 168 | "307": "Temporary Redirect", 169 | "399": "Redirection - Others", 170 | "400": "Bad Request", 171 | "401": "Unauthorized", 172 | "402": "Payment Required", 173 | "403": "Forbidden", 174 | "404": "Not Found", 175 | "405": "Method Not Allowed", 176 | "406": "Not Acceptable", 177 | "407": "Proxy Authentication Required", 178 | "408": "Request Time-out", 179 | "409": "Conflict", 180 | "410": "Gone", 181 | "411": "Length Required", 182 | "412": "Precondition Failed", 183 | "413": "Request Entity Too Large", 184 | "414": "Request-URI Too Large", 185 | "415": "Unsupported Media Type", 186 | "416": "Requested Range Not Satisfiable", 187 | "417": "Expectation Failed", 188 | "422": "Unprocessable Entity", 189 | "423": "Locked", 190 | "424": "Failed Dependency", 191 | "499": "Client Error - Others", 192 | "500": "Internal Server Error", 193 | "501": "Not Implemented", 194 | "502": "Bad Gateway", 195 | "503": "Service Unavailable", 196 | "504": "Gateway Time-out", 197 | "505": "HTTP Version not supported", 198 | "599": "Server Error - Others"} 199 | 200 | if cn in codes: 201 | return codes[cn] 202 | return "" 203 | 204 | def getfield(self, pkt, s): 205 | """ 206 | this method will get the packet, takes what does need to be 207 | taken and let the remaining go, so it returns two values. 208 | first value which belongs to this field and the second is 209 | the remaining which does need to be dissected with 210 | other "field classes". 211 | @param pkt: holds the whole packet 212 | @param s: holds only the remaining data which is not dissected yet. 213 | """ 214 | seq = pkt.underlayer.fields["seq"] 215 | cstream = -1 216 | if pkt.underlayer.name == "TCP": 217 | cstream = dissector.check_stream(\ 218 | pkt.underlayer.underlayer.fields["src"],\ 219 | pkt.underlayer.underlayer.fields["dst"],\ 220 | pkt.underlayer.fields["sport"],\ 221 | pkt.underlayer.fields["dport"],\ 222 | pkt.underlayer.fields["seq"], s) 223 | if not cstream == -1: 224 | s = cstream 225 | if pkt.underlayer.name == "TCP" and cstream == -1: 226 | return "", "" 227 | remain = "" 228 | value = "" 229 | if self.name == "status-line: " and s.startswith("HTTP/"): 230 | ls = s.splitlines(True) 231 | f = ls[0].split() 232 | length = len(f) 233 | if length == 3: 234 | value = "HTTP-Version:" + f[0] + ", Status-Code:" +\ 235 | f[1] + ", Reason-Phrase:" + f[2] 236 | ls.remove(ls[0]) 237 | for element in ls: 238 | remain = remain + element 239 | return remain, value 240 | return s, "" 241 | 242 | 243 | #class HTTPMsgField(XByteField): 244 | class HTTPMsgField(XByteField): 245 | """ 246 | field class for handling http body 247 | @attention: it inherets XByteField from Scapy library 248 | """ 249 | holds_packets = 1 250 | name = "HTTPMsgField" 251 | myresult = "" 252 | 253 | def __init__(self, name, default): 254 | """ 255 | class constructor, for initializing instance variables 256 | @param name: name of the field 257 | @param default: Scapy has many formats to represent the data 258 | internal, human and machine. anyways you may sit this param to None. 259 | """ 260 | self.name = name 261 | self.fmt = "!B" 262 | Field.__init__(self, name, default, "!B") 263 | 264 | def getfield(self, pkt, s): 265 | """ 266 | this method will get the packet, takes what does need to be 267 | taken and let the remaining go, so it returns two values. 268 | first value which belongs to this field and the second is 269 | the remaining which does need to be dissected with 270 | other "field classes". 271 | @param pkt: holds the whole packet 272 | @param s: holds only the remaining data which is not dissected yet. 273 | """ 274 | if s.startswith("\r\n"): 275 | s = s.lstrip("\r\n") 276 | if s == "": 277 | return "", "" 278 | name = get_file(pkt.underlayer.underlayer.fields["src"],\ 279 | pkt.underlayer.underlayer.fields["dst"],\ 280 | pkt.underlayer.fields["sport"],\ 281 | pkt.underlayer.fields["dport"],\ 282 | pkt.underlayer.fields["ack"]) 283 | if pkt.underlayer.fields["sport"] == 80: 284 | if not dissector.Dissector.default_download_folder_changed: 285 | cwd = os.getcwd() + "/downloaded/" 286 | try: 287 | os.mkdir("downloaded") 288 | except: 289 | None 290 | f = open(cwd + clean_file_name(name, cwd), "wb") 291 | else: 292 | f = open(dissector.Dissector.path +\ 293 | clean_file_name(name, dissector.Dissector.path), "wb") 294 | f.write(s) 295 | f.close() 296 | self.myresult = "" 297 | for c in s: 298 | self.myresult = self.myresult + base64.standard_b64encode(c) 299 | 300 | if self.myresult[-1:] == " ": 301 | self.myresult = self.myresult.rstrip() 302 | return "", self.myresult 303 | 304 | 305 | class HTTPField(StrField): 306 | """ 307 | field class for handling http fields 308 | @attention: it inherets StrField from Scapy library 309 | """ 310 | holds_packets = 1 311 | name = "HTTPField" 312 | 313 | def getfield(self, pkt, s): 314 | """ 315 | this method will get the packet, takes what does need to be 316 | taken and let the remaining go, so it returns two values. 317 | first value which belongs to this field and the second is 318 | the remaining which does need to be dissected with 319 | other "field classes". 320 | @param pkt: holds the whole packet 321 | @param s: holds only the remaining data which is not dissected yet. 322 | """ 323 | if self.name == "unknown-header(s): ": 324 | remain = "" 325 | value = [] 326 | ls = s.splitlines(True) 327 | i = -1 328 | for element in ls: 329 | i = i + 1 330 | if element == "\r\n": 331 | return s, [] 332 | elif element != "\r\n"\ 333 | and (": " in element[:10])\ 334 | and (element[-2:] == "\r\n"): 335 | value.append(element) 336 | ls.remove(ls[i]) 337 | remain = "" 338 | unknown = True 339 | for element in ls: 340 | if element != "\r\n" and (": " in element[:15])\ 341 | and (element[-2:] == "\r\n") and unknown: 342 | value.append(element) 343 | else: 344 | unknown = False 345 | remain = remain + element 346 | return remain, value 347 | return s, [] 348 | 349 | remain = "" 350 | value = "" 351 | ls = s.splitlines(True) 352 | i = -1 353 | for element in ls: 354 | i = i + 1 355 | if element.upper().startswith(self.name.upper()): 356 | value = element 357 | value = value.strip(self.name) 358 | ls.remove(ls[i]) 359 | remain = "" 360 | for element in ls: 361 | remain = remain + element 362 | return remain, value 363 | return s, "" 364 | 365 | def __init__(self, name, default, fmt, remain=0): 366 | """ 367 | class constructor for initializing the instance variables 368 | @param name: name of the field 369 | @param default: Scapy has many formats to represent the data 370 | internal, human and machine. anyways you may sit this param to None. 371 | @param fmt: specifying the format, this has been set to "H" 372 | @param remain: this parameter specifies the size of the remaining 373 | data so make it 0 to handle all of the data. 374 | """ 375 | self.name = name 376 | StrField.__init__(self, name, default, fmt, remain) 377 | 378 | 379 | class HTTPRequest(Packet): 380 | """ 381 | class for handling http requests 382 | @attention: it inherets Packet from Scapy library 383 | """ 384 | name = "http" 385 | fields_desc = [HTTPReqField("request-line: ", "", "H"), 386 | HTTPField("cache-control: ", "", "H"), 387 | HTTPField("connection: ", "", "H"), 388 | HTTPField("date: ", "", "H"), 389 | HTTPField("pragma: ", "", "H"), 390 | HTTPField("trailer: ", "", "H"), 391 | HTTPField("transfer-encoding: ", "", "H"), 392 | HTTPField("upgrade: ", "", "H"), 393 | HTTPField("dnt: ", "", "H"), 394 | HTTPField("x-requested-with: ", "", "H"), 395 | HTTPField("via: ", "", "H"), 396 | HTTPField("Warning: ", "", "H"), 397 | HTTPField("accept: ", "", "H"), 398 | HTTPField("accept-encoding: ", "", "H"), 399 | HTTPField("accept-language: ", "", "H"), 400 | HTTPField("content-length: ", "", "H"), 401 | HTTPField("accept-charset: ", "", "H"), 402 | HTTPField("expect: ", "", "H"), 403 | HTTPField("authorization: ", "", "H"), 404 | HTTPField("accept-datetime: ", "", "H"), 405 | HTTPField("from: ", "", "H"), 406 | HTTPField("host: ", "", "H"), 407 | HTTPField("if-match: ", "", "H"), 408 | HTTPField("if-modified-since: ", "", "H"), 409 | HTTPField("iIf-none-match: ", "", "H"), 410 | HTTPField("if-range: ", "", "H"), 411 | HTTPField("if-unmodified-since: ", "", "H"), 412 | HTTPField("max-forwards: ", "", "H"), 413 | HTTPField("proxy-authorization: ", "", "H"), 414 | HTTPField("range: ", "", "H"), 415 | HTTPField("referer: ", "", "H"), 416 | HTTPField("te: ", "", "H"), 417 | HTTPField("user-agent: ", "", "H"), 418 | HTTPField("link: ", "", "H"), 419 | HTTPField("mime-version: ", "", "H"), 420 | HTTPField("title: ", "", "H"), 421 | HTTPField("uri: ", "", "H"), 422 | HTTPField("cookie: ", "", "H"), 423 | HTTPField("set-cookie: ", "", "H"), 424 | HTTPField("x-forwarded-for: ", "", "H"), 425 | HTTPField("keep-alive: ", "", "H"), 426 | HTTPField("unknown-header(s): ", "", "H"), 427 | HTTPMsgField("message-body: ", "")] 428 | 429 | 430 | class HTTPResponse(Packet): 431 | """ 432 | class for handling http responses 433 | @attention: it inherets Packet from Scapy library 434 | """ 435 | name = "http" 436 | fields_desc = [HTTPResField("status-line: ", "", "H"),#responses123 437 | HTTPField("cache-control: ", "", "H"), 438 | HTTPField("connection: ", "", "H"), 439 | HTTPField("date: ", "", "H"), 440 | HTTPField("pragma: ", "", "H"), 441 | HTTPField("access-control-allow-origin: ", "", "H"), 442 | HTTPField("trailer: ", "", "H"), 443 | HTTPField("transfer-encoding: ", "", "H"), 444 | HTTPField("upgrade: ", "", "H"), 445 | HTTPField("via: ", "", "H"), 446 | HTTPField("warning: ", "", "H"), 447 | HTTPField("accept-ranges: ", "", "H"), 448 | HTTPField("age: ", "", "H"), 449 | HTTPField("etag: ", "", "H"), 450 | HTTPField("location: ", "", "H"), 451 | HTTPField("proxy-authenticate: ", "", "H"), 452 | HTTPField("retry-after: ", "", "H"), 453 | HTTPField("server: ", "", "H"), 454 | HTTPField("vary: ", "", "H"), 455 | HTTPField("allow: ", "", "H"), 456 | HTTPField("content-encoding: ", "", "H"), 457 | HTTPField("content-language: ", "", "H"), 458 | HTTPField("content-length: ", "", "H"), 459 | HTTPField("content-disposition: ", "", "H"), 460 | HTTPField("strict-transport-security: ", "", "H"), 461 | HTTPField("www-authenticate: ", "", "H"), 462 | HTTPField("x-frame-options: ", "", "H"), 463 | HTTPField("x-xss-protection: ", "", "H"), 464 | HTTPField("x-powered-by: ", "", "H"), 465 | HTTPField("content-security-policy: ", "", "H"), 466 | HTTPField("x-content-security-policy: ", "", "H"), 467 | HTTPField("x-webkit-csp: ", "", "H"), 468 | HTTPField("x-ua-compatible: ", "", "H"), 469 | HTTPField("x-content-type-options: ", "", "H"), 470 | HTTPField("x-ua-compatible: ", "", "H"), 471 | HTTPField("refresh: ", "", "H"), 472 | HTTPField("content-md5: ", "", "H"), 473 | HTTPField("content-range: ", "", "H"), 474 | HTTPField("content-type: ", "", "H"), 475 | HTTPField("expires: ", "", "H"), 476 | HTTPField("last-modified: ", "", "H"), 477 | HTTPField("extension-header: ", "", "H"), 478 | HTTPField("link: ", "", "H"), 479 | HTTPField("mime-version: ", "", "H"), 480 | HTTPField("retry-after: ", "", "H"), 481 | HTTPField("title: ", "", "H"), 482 | HTTPField("uri: ", "", "H"), 483 | HTTPField("public: ", "", "H"), 484 | HTTPField("accept-patch: ", "", "H"), 485 | HTTPField("cookie: ", "", "H"), 486 | HTTPField("set-cookie: ", "", "H"), 487 | #HTTPField("x-forwarded-for: ", "", "H"), X-Forwarded for is not a response header, it's a request 488 | HTTPField("keep-alive: ", "", "H"), 489 | HTTPField("unknown-header(s): ", "", "H"), 490 | HTTPMsgField("message-body: ", "")] 491 | 492 | 493 | bind_layers(TCP, HTTPResponse, sport=80) 494 | bind_layers(TCP, HTTPRequest, dport=80) -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/imap.py: -------------------------------------------------------------------------------- 1 | from scapy.layers.inet import * 2 | from scapy.packet import * 3 | from scapy.fields import * 4 | 5 | import dissector 6 | 7 | 8 | class IMAPField(StrField): 9 | """ 10 | field class for handling imap packets 11 | @attention: it inherets StrField from Scapy library 12 | """ 13 | holds_packets = 1 14 | name = "IMAPField" 15 | 16 | def getfield(self, pkt, s): 17 | """ 18 | this method will get the packet, takes what does need to be 19 | taken and let the remaining go, so it returns two values. 20 | first value which belongs to this field and the second is 21 | the remaining which does need to be dissected with 22 | other "field classes". 23 | @param pkt: holds the whole packet 24 | @param s: holds only the remaining data which is not dissected yet. 25 | """ 26 | cstream = -1 27 | if pkt.underlayer.name == "TCP": 28 | cstream = dissector.check_stream(\ 29 | pkt.underlayer.underlayer.fields["src"],\ 30 | pkt.underlayer.underlayer.fields["dst"],\ 31 | pkt.underlayer.fields["sport"],\ 32 | pkt.underlayer.fields["dport"],\ 33 | pkt.underlayer.fields["seq"], s) 34 | if not cstream == -1: 35 | s = cstream 36 | remain = "" 37 | value = "" 38 | ls = s.splitlines() 39 | myresult = "" 40 | lslen = len(ls) 41 | i = 0 42 | k = 0 43 | for line in ls: 44 | k = k + 1 45 | ls2 = line.split() 46 | length = len(ls2) 47 | if length > 1: 48 | value = ls2[0] 49 | c = 1 50 | remain = "" 51 | while c < length: 52 | remain = remain + ls2[c] + " " 53 | c = c + 1 54 | if self.name.startswith("request"): 55 | myresult = myresult + "Request Tag: " +\ 56 | value + ", Request Argument: " + remain 57 | if k < lslen: 58 | myresult = myresult + " | " 59 | if self.name.startswith("response"): 60 | myresult = myresult + "Response Tag: " +\ 61 | value + ", Response Argument: " + remain 62 | if k < lslen: 63 | myresult = myresult + " | " 64 | i = i + 1 65 | if i == lslen: 66 | return "", myresult 67 | 68 | def __init__(self, name, default, fmt, remain=0): 69 | """ 70 | class constructor for initializing the instance variables 71 | @param name: name of the field 72 | @param default: Scapy has many formats to represent the data 73 | internal, human and machine. anyways you may sit this param to None. 74 | @param fmt: specifying the format, this has been set to "H" 75 | @param remain: this parameter specifies the size of the remaining 76 | data so make it 0 to handle all of the data. 77 | """ 78 | self.name = name 79 | StrField.__init__(self, name, default, fmt, remain) 80 | 81 | 82 | class IMAPRes(Packet): 83 | """ 84 | class for handling imap responses 85 | @attention: it inherets Packet from Scapy library 86 | """ 87 | name = "imap" 88 | fields_desc = [IMAPField("response", "", "H")] 89 | 90 | 91 | class IMAPReq(Packet): 92 | """ 93 | class for handling imap requests 94 | @attention: it inherets Packet from Scapy library 95 | """ 96 | name = "imap" 97 | fields_desc = [IMAPField("request", "", "H")] 98 | 99 | 100 | bind_layers(TCP, IMAPReq, dport=143) 101 | bind_layers(TCP, IMAPRes, sport=143) 102 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/irc.py: -------------------------------------------------------------------------------- 1 | from scapy.layers.inet import * 2 | 3 | import dissector 4 | 5 | 6 | class IRCResField(StrField): 7 | """ 8 | field class for handling irc responses 9 | @attention: it inherets StrField from Scapy library 10 | """ 11 | holds_packets = 1 12 | name = "IRCResField" 13 | 14 | def getfield(self, pkt, s): 15 | """ 16 | this method will get the packet, takes what does need to be 17 | taken and let the remaining go, so it returns two values. 18 | first value which belongs to this field and the second is 19 | the remaining which does need to be dissected with 20 | other "field classes". 21 | @param pkt: holds the whole packet 22 | @param s: holds only the remaining data which is not dissected yet. 23 | """ 24 | cstream = -1 25 | if pkt.underlayer.name == "TCP": 26 | cstream = dissector.check_stream(\ 27 | pkt.underlayer.underlayer.fields["src"],\ 28 | pkt.underlayer.underlayer.fields["dst"],\ 29 | pkt.underlayer.fields["sport"],\ 30 | pkt.underlayer.fields["dport"],\ 31 | pkt.underlayer.fields["seq"], s) 32 | if not cstream == -1: 33 | s = cstream 34 | value = "" 35 | ls = s.split("\r\n") 36 | length = len(ls) 37 | if length == 1: 38 | return "", value 39 | elif length > 1: 40 | value = "" 41 | value = value + "response: " + ls[0] 42 | i = 1 43 | while i < length - 1: 44 | value = value + " response: " + ls[i] 45 | if i < length - 2: 46 | value = value + " | " 47 | i = i + 1 48 | return "", value 49 | else: 50 | return "", "" 51 | 52 | def __init__(self, name, default, fmt, remain=0): 53 | """ 54 | class constructor for initializing the instance variables 55 | @param name: name of the field 56 | @param default: Scapy has many formats to represent the data 57 | internal, human and machine. anyways you may sit this param to None. 58 | @param fmt: specifying the format, this has been set to "H" 59 | @param remain: this parameter specifies the size of the remaining 60 | data so make it 0 to handle all of the data. 61 | """ 62 | self.name = name 63 | StrField.__init__(self, name, default, fmt, remain) 64 | 65 | 66 | class IRCReqField(StrField): 67 | """ 68 | field class for handling irc requests 69 | @attention: it inherets StrField from Scapy library 70 | """ 71 | holds_packets = 1 72 | name = "IRCReqField" 73 | 74 | def getfield(self, pkt, s): 75 | """ 76 | this method will get the packet, takes what does need to be 77 | taken and let the remaining go, so it returns two values. 78 | first value which belongs to this field and the second is 79 | the remaining which does need to be dissected with 80 | other "field classes". 81 | @param pkt: holds the whole packet 82 | @param s: holds only the remaining data which is not dissected yet. 83 | """ 84 | cstream = -1 85 | if pkt.underlayer.name == "TCP": 86 | cstream = dissector.check_stream(\ 87 | pkt.underlayer.underlayer.fields["src"],\ 88 | pkt.underlayer.underlayer.fields["dst"],\ 89 | pkt.underlayer.fields["sport"],\ 90 | pkt.underlayer.fields["dport"],\ 91 | pkt.underlayer.fields["seq"], s) 92 | if not cstream == -1: 93 | s = cstream 94 | remain = "" 95 | value = "" 96 | ls = s.split() 97 | length = len(ls) 98 | if length > 1: 99 | value = "command: " + ls[0] + "," 100 | if length == 2: 101 | remain = ls[1] 102 | value = value + " Parameters: " + remain 103 | return "", value 104 | else: 105 | i = 1 106 | remain = "" 107 | while i < length: 108 | if i != 1: 109 | remain = remain + " " + ls[i] 110 | else: 111 | remain = remain + ls[i] 112 | i = i + 1 113 | value = value + " Parameters: " + remain 114 | return "", value 115 | else: 116 | return "", ls[0] 117 | 118 | def __init__(self, name, default, fmt, remain=0): 119 | """ 120 | class constructor for initializing the instance variables 121 | @param name: name of the field 122 | @param default: Scapy has many formats to represent the data 123 | internal, human and machine. anyways you may sit this param to None. 124 | @param fmt: specifying the format, this has been set to "H" 125 | @param remain: this parameter specifies the size of the remaining 126 | data so make it 0 to handle all of the data. 127 | """ 128 | self.name = name 129 | StrField.__init__(self, name, default, fmt, remain) 130 | 131 | 132 | class IRCRes(Packet): 133 | """ 134 | class for handling irc responses 135 | @attention: it inherets Packet from Scapy library 136 | """ 137 | name = "irc" 138 | fields_desc = [IRCResField("response", "", "H")] 139 | 140 | 141 | class IRCReq(Packet): 142 | """ 143 | class for handling irc requests 144 | @attention: it inherets Packet from Scapy library 145 | """ 146 | name = "irc" 147 | fields_desc = [IRCReqField("command", "", "H")] 148 | 149 | bind_layers(TCP, IRCReq, dport=6660) 150 | bind_layers(TCP, IRCReq, dport=6661) 151 | bind_layers(TCP, IRCReq, dport=6662) 152 | bind_layers(TCP, IRCReq, dport=6663) 153 | bind_layers(TCP, IRCReq, dport=6664) 154 | bind_layers(TCP, IRCReq, dport=6665) 155 | bind_layers(TCP, IRCReq, dport=6666) 156 | bind_layers(TCP, IRCReq, dport=6667) 157 | bind_layers(TCP, IRCReq, dport=6668) 158 | bind_layers(TCP, IRCReq, dport=6669) 159 | bind_layers(TCP, IRCReq, dport=7000) 160 | bind_layers(TCP, IRCReq, dport=194) 161 | bind_layers(TCP, IRCReq, dport=6697) 162 | 163 | 164 | bind_layers(TCP, IRCRes, sport=6660) 165 | bind_layers(TCP, IRCRes, sport=6661) 166 | bind_layers(TCP, IRCRes, sport=6662) 167 | bind_layers(TCP, IRCRes, sport=6663) 168 | bind_layers(TCP, IRCRes, sport=6664) 169 | bind_layers(TCP, IRCRes, sport=6665) 170 | bind_layers(TCP, IRCRes, sport=6666) 171 | bind_layers(TCP, IRCRes, sport=6667) 172 | bind_layers(TCP, IRCRes, sport=6668) 173 | bind_layers(TCP, IRCRes, sport=6669) 174 | bind_layers(TCP, IRCRes, sport=7000) 175 | bind_layers(TCP, IRCRes, sport=194) 176 | bind_layers(TCP, IRCRes, sport=6697) 177 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/pop.py: -------------------------------------------------------------------------------- 1 | from scapy.layers.inet import * 2 | 3 | import dissector 4 | 5 | 6 | class POPField(StrField): 7 | """ 8 | field class for handling pop requests 9 | @attention: it inherets StrField from Scapy library 10 | """ 11 | holds_packets = 1 12 | name = "POPField" 13 | 14 | def getfield(self, pkt, s): 15 | """ 16 | this method will get the packet, takes what does need to be 17 | taken and let the remaining go, so it returns two values. 18 | first value which belongs to this field and the second is 19 | the remaining which does need to be dissected with 20 | other "field classes". 21 | @param pkt: holds the whole packet 22 | @param s: holds only the remaining data which is not dissected yet. 23 | """ 24 | cstream = -1 25 | if pkt.underlayer.name == "TCP": 26 | cstream = dissector.check_stream(\ 27 | pkt.underlayer.underlayer.fields["src"],\ 28 | pkt.underlayer.underlayer.fields["dst"],\ 29 | pkt.underlayer.fields["sport"],\ 30 | pkt.underlayer.fields["dport"],\ 31 | pkt.underlayer.fields["seq"], s) 32 | if not cstream == -1: 33 | s = cstream 34 | remain = "" 35 | value = "" 36 | ls = s.splitlines() 37 | myresult = [] 38 | lslen = len(ls) 39 | i = 0 40 | k = 0 41 | for line in ls: 42 | k = k + 1 43 | ls2 = line.split() 44 | length = len(ls2) 45 | if length > 1: 46 | value = ls2[0] 47 | c = 1 48 | remain = "" 49 | while c < length: 50 | remain = remain + ls2[c] + " " 51 | c = c + 1 52 | if self.name.startswith("request"): 53 | myresult = myresult + "Request Command: " + value +\ 54 | ", Request Parameter(s): " + remain 55 | if k < lslen: 56 | myresult = myresult + " | " 57 | if self.name.startswith("response"): 58 | myresult = myresult + "Response Indicator: " + value +\ 59 | ", Response Parameter(s): " + remain 60 | if k < lslen: 61 | myresult = myresult + " | " 62 | i = i + 1 63 | if i == lslen: 64 | return "", myresult 65 | 66 | def __init__(self, name, default, fmt, remain=0): 67 | """ 68 | class constructor for initializing the instance variables 69 | @param name: name of the field 70 | @param default: Scapy has many formats to represent the data 71 | internal, human and machine. anyways you may sit this param to None. 72 | @param fmt: specifying the format, this has been set to "H" 73 | @param remain: this parameter specifies the size of the remaining 74 | data so make it 0 to handle all of the data. 75 | """ 76 | self.name = name 77 | StrField.__init__(self, name, default, fmt, remain) 78 | 79 | 80 | class POPRes(Packet): 81 | """ 82 | class for handling pop responses 83 | @attention: it inherets Packet from Scapy library 84 | """ 85 | name = "pop" 86 | fields_desc = [POPField("response", "", "H")] 87 | 88 | 89 | class POPReq(Packet): 90 | """ 91 | class for handling pop requests 92 | @attention: it inherets Packet from Scapy library 93 | """ 94 | name = "pop" 95 | fields_desc = [POPField("request", "", "H")] 96 | 97 | 98 | bind_layers(TCP, POPReq, dport=110) 99 | bind_layers(TCP, POPRes, sport=110) 100 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/sip.py: -------------------------------------------------------------------------------- 1 | import base64 2 | 3 | from scapy.layers.inet import * 4 | from scapy.layers.dns import * 5 | import dissector 6 | 7 | 8 | class SIPStartField(StrField): 9 | """ 10 | field class for handling sip start field 11 | @attention: it inherets StrField from Scapy library 12 | """ 13 | holds_packets = 1 14 | name = "SIPStartField" 15 | 16 | def getfield(self, pkt, s): 17 | """ 18 | this method will get the packet, takes what does need to be 19 | taken and let the remaining go, so it returns two values. 20 | first value which belongs to this field and the second is 21 | the remaining which does need to be dissected with 22 | other "field classes". 23 | @param pkt: holds the whole packet 24 | @param s: holds only the remaining data which is not dissected yet. 25 | """ 26 | cstream = -1 27 | if pkt.underlayer.name == "TCP": 28 | cstream = dissector.check_stream(\ 29 | pkt.underlayer.underlayer.fields["src"],\ 30 | pkt.underlayer.underlayer.fields["dst"],\ 31 | pkt.underlayer.fields["sport"],\ 32 | pkt.underlayer.fields["dport"],\ 33 | pkt.underlayer.fields["seq"], s) 34 | if not cstream == -1: 35 | s = cstream 36 | remain = "" 37 | value = "" 38 | ls = s.splitlines(True) 39 | f = ls[0].split() 40 | if "SIP" in f[0]: 41 | ls = s.splitlines(True) 42 | f = ls[0].split() 43 | length = len(f) 44 | value = "" 45 | if length == 3: 46 | value = "SIP-Version:" + f[0] + ", Status-Code:" +\ 47 | f[1] + ", Reason-Phrase:" + f[2] 48 | ls.remove(ls[0]) 49 | for element in ls: 50 | remain = remain + element 51 | else: 52 | value = ls[0] 53 | ls.remove(ls[0]) 54 | for element in ls: 55 | remain = remain + element 56 | return remain, value 57 | elif "SIP" in f[2]: 58 | ls = s.splitlines(True) 59 | f = ls[0].split() 60 | length = len(f) 61 | value = [] 62 | if length == 3: 63 | value = "Method:" + f[0] + ", Request-URI:" +\ 64 | f[1] + ", SIP-Version:" + f[2] 65 | ls.remove(ls[0]) 66 | for element in ls: 67 | remain = remain + element 68 | else: 69 | value = ls[0] 70 | ls.remove(ls[0]) 71 | for element in ls: 72 | remain = remain + element 73 | return remain, value 74 | else: 75 | return s, "" 76 | 77 | 78 | class SIPMsgField(StrField): 79 | """ 80 | field class for handling the body of sip packets 81 | @attention: it inherets StrField from Scapy library 82 | """ 83 | holds_packets = 1 84 | name = "SIPMsgField" 85 | myresult = "" 86 | 87 | def __init__(self, name, default): 88 | """ 89 | class constructor, for initializing instance variables 90 | @param name: name of the field 91 | @param default: Scapy has many formats to represent the data 92 | internal, human and machine. anyways you may sit this param to None. 93 | """ 94 | self.name = name 95 | self.fmt = "!B" 96 | Field.__init__(self, name, default, "!B") 97 | 98 | def getfield(self, pkt, s): 99 | """ 100 | this method will get the packet, takes what does need to be 101 | taken and let the remaining go, so it returns two values. 102 | first value which belongs to this field and the second is 103 | the remaining which does need to be dissected with 104 | other "field classes". 105 | @param pkt: holds the whole packet 106 | @param s: holds only the remaining data which is not dissected yet. 107 | """ 108 | if s.startswith("\r\n"): 109 | s = s.lstrip("\r\n") 110 | if s == "": 111 | return "", "" 112 | self.myresult = "" 113 | for c in s: 114 | self.myresult = self.myresult + base64.standard_b64encode(c) 115 | return "", self.myresult 116 | 117 | 118 | class SIPField(StrField): 119 | """ 120 | field class for handling the body of sip fields 121 | @attention: it inherets StrField from Scapy library 122 | """ 123 | holds_packets = 1 124 | name = "SIPField" 125 | 126 | def getfield(self, pkt, s): 127 | """ 128 | this method will get the packet, takes what does need to be 129 | taken and let the remaining go, so it returns two values. 130 | first value which belongs to this field and the second is 131 | the remaining which does need to be dissected with 132 | other "field classes". 133 | @param pkt: holds the whole packet 134 | @param s: holds only the remaining data which is not dissected yet. 135 | """ 136 | if self.name == "unknown-header(s): ": 137 | remain = "" 138 | value = [] 139 | ls = s.splitlines(True) 140 | i = -1 141 | for element in ls: 142 | i = i + 1 143 | if element == "\r\n": 144 | return s, [] 145 | elif element != "\r\n" and (": " in element[:10])\ 146 | and (element[-2:] == "\r\n"): 147 | value.append(element) 148 | ls.remove(ls[i]) 149 | remain = "" 150 | unknown = True 151 | for element in ls: 152 | if element != "\r\n" and (": " in element[:15])\ 153 | and (element[-2:] == "\r\n") and unknown: 154 | value.append(element) 155 | else: 156 | unknow = False 157 | remain = remain + element 158 | return remain, value 159 | return s, [] 160 | 161 | remain = "" 162 | value = "" 163 | ls = s.splitlines(True) 164 | i = -1 165 | for element in ls: 166 | i = i + 1 167 | if element.upper().startswith(self.name.upper()): 168 | value = element 169 | value = value.strip(self.name) 170 | ls.remove(ls[i]) 171 | remain = "" 172 | for element in ls: 173 | remain = remain + element 174 | return remain, value[len(self.name) + 1:] 175 | return s, "" 176 | 177 | def __init__(self, name, default, fmt, remain=0): 178 | """ 179 | class constructor for initializing the instance variables 180 | @param name: name of the field 181 | @param default: Scapy has many formats to represent the data 182 | internal, human and machine. anyways you may sit this param to None. 183 | @param fmt: specifying the format, this has been set to "H" 184 | @param remain: this parameter specifies the size of the remaining 185 | data so make it 0 to handle all of the data. 186 | """ 187 | self.name = name 188 | StrField.__init__(self, name, default, fmt, remain) 189 | 190 | 191 | class SIP(Packet): 192 | """ 193 | class for handling the body of sip packets 194 | @attention: it inherets Packet from Scapy library 195 | """ 196 | name = "sip" 197 | fields_desc = [SIPStartField("start-line: ", "", "H"), 198 | SIPField("accept: ", "", "H"), 199 | SIPField("accept-contact: ", "", "H"), 200 | SIPField("accept-encoding: ", "", "H"), 201 | SIPField("accept-language: ", "", "H"), 202 | SIPField("accept-resource-priority: ", "", "H"), 203 | SIPField("alert-info: ", "", "H"), 204 | SIPField("allow: ", "", "H"), 205 | SIPField("allow-events: ", "", "H"), 206 | SIPField("authentication-info: ", "", "H"), 207 | SIPField("authorization: ", "", "H"), 208 | SIPField("call-id: ", "", "H"), 209 | SIPField("call-info: ", "", "H"), 210 | SIPField("contact: ", "", "H"), 211 | SIPField("content-disposition: ", "", "H"), 212 | SIPField("content-encoding: ", "", "H"), 213 | SIPField("content-language: ", "", "H"), 214 | SIPField("content-length: ", "", "H"), 215 | SIPField("content-type: ", "", "H"), 216 | SIPField("cseq: ", "", "H"), 217 | SIPField("date: ", "", "H"), 218 | SIPField("error-info: ", "", "H"), 219 | SIPField("event: ", "", "H"), 220 | SIPField("expires: ", "", "H"), 221 | SIPField("from: ", "", "H"), 222 | SIPField("in-reply-to: ", "", "H"), 223 | SIPField("join: ", "", "H"), 224 | SIPField("max-forwards: ", "", "H"), 225 | SIPField("mime-version: ", "", "H"), 226 | SIPField("min-expires: ", "", "H"), 227 | SIPField("min-se: ", "", "H"), 228 | SIPField("organization: ", "", "H"), 229 | SIPField("p-access-network-info: ", "", "H"), 230 | SIPField("p-asserted-identity: ", "", "H"), 231 | SIPField("p-associated-uri: ", "", "H"), 232 | SIPField("p-called-party-id: ", "", "H"), 233 | SIPField("p-charging-function-addresses: ", "", "H"), 234 | SIPField("p-charging-vector: ", "", "H"), 235 | SIPField("p-dcs-trace-party-id: ", "", "H"), 236 | SIPField("p-dcs-osps: ", "", "H"), 237 | SIPField("p-dcs-billing-info: ", "", "H"), 238 | SIPField("p-dcs-laes: ", "", "H"), 239 | SIPField("p-dcs-redirect: ", "", "H"), 240 | SIPField("p-media-authorization: ", "", "H"), 241 | SIPField("p-preferred-identity: ", "", "H"), 242 | SIPField("p-visited-network-id: ", "", "H"), 243 | SIPField("path: ", "", "H"), 244 | SIPField("priority: ", "", "H"), 245 | SIPField("privacy: ", "", "H"), 246 | SIPField("proxy-authenticate: ", "", "H"), 247 | SIPField("proxy-authorization: ", "", "H"), 248 | SIPField("proxy-require: ", "", "H"), 249 | SIPField("rack: ", "", "H"), 250 | SIPField("reason: ", "", "H"), 251 | SIPField("record-route: ", "", "H"), 252 | SIPField("referred-by: ", "", "H"), 253 | SIPField("reject-contact: ", "", "H"), 254 | SIPField("replaces: ", "", "H"), 255 | SIPField("reply-to: ", "", "H"), 256 | SIPField("request-disposition: ", "", "H"), 257 | SIPField("require: ", "", "H"), 258 | SIPField("resource-priority: ", "", "H"), 259 | SIPField("retry-after: ", "", "H"), 260 | SIPField("route: ", "", "H"), 261 | SIPField("rseq: ", "", "H"), 262 | SIPField("security-client: ", "", "H"), 263 | SIPField("security-server: ", "", "H"), 264 | SIPField("security-verify: ", "", "H"), 265 | SIPField("server: ", "", "H"), 266 | SIPField("service-route: ", "", "H"), 267 | SIPField("session-expires: ", "", "H"), 268 | SIPField("sip-etag: ", "", "H"), 269 | SIPField("sip-if-match: ", "", "H"), 270 | SIPField("subject: ", "", "H"), 271 | SIPField("subscription-state: ", "", "H"), 272 | SIPField("supported: ", "", "H"), 273 | SIPField("timestamp: ", "", "H"), 274 | SIPField("to: ", "", "H"), 275 | SIPField("unsupported: ", "", "H"), 276 | SIPField("user-agent: ", "", "H"), 277 | SIPField("via: ", "", "H"), 278 | SIPField("warning: ", "", "H"), 279 | SIPField("www-authenticate: ", "", "H"), 280 | SIPField("refer-to: ", "", "H"), 281 | SIPField("history-info: ", "", "H"), 282 | SIPField("unknown-header(s): ", "", "H"), 283 | SIPMsgField("message-body: ", "")] 284 | 285 | bind_layers(TCP, SIP, sport=5060) 286 | bind_layers(TCP, SIP, dport=5060) 287 | bind_layers(UDP, SIP, sport=5060) 288 | bind_layers(UDP, SIP, dport=5060) 289 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/smtp.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import string 3 | 4 | from scapy.layers.inet import * 5 | from scapy.packet import * 6 | from scapy.fields import * 7 | 8 | import dissector 9 | 10 | 11 | def name_generator(size=9, chars=string.ascii_uppercase + string.digits): 12 | """ 13 | this method is for generating a randndom name for the downloaded files 14 | @param size: number of random characters 15 | @param chars: type of the random characters 16 | """ 17 | return ''.join(random.choice(chars) for x in range(size)) 18 | 19 | 20 | src = "" 21 | dst = "" 22 | sport = "" 23 | dport = "" 24 | seq = "" 25 | 26 | # holds smtp sessions 27 | bounded = [] 28 | 29 | 30 | def get_tcp_ip(): 31 | """ 32 | this method is for retrieving the ip and tcp values 33 | """ 34 | return src, dst, sport, dport, seq 35 | 36 | 37 | def set_tcp_ip(srcp, dstp, sportp, dportp, seqp): 38 | """ 39 | this method is for set values in the global variables for tcp/ip 40 | @param srcp: source ip address 41 | @param dstp: destination ip address 42 | @param sportp: source port number 43 | @param dPortp: destination port number 44 | @param seqp: sequence number 45 | """ 46 | global src, dst, sport, dport, seq 47 | src = srcp 48 | dst = dstp 49 | sport = sportp 50 | dport = dportp 51 | seq = seqp 52 | 53 | 54 | def bind(Src, Dst, Port): 55 | """ 56 | method for creating smtp data sessions 57 | @param Src: source ip address 58 | @param Dst: destination ip address 59 | @param Port: source port number 60 | """ 61 | bounded.append([Src, Dst, Port]) 62 | 63 | 64 | def unbind(Src, Dst, Port): 65 | """ 66 | do the opposite of bind() 67 | """ 68 | if [Src, Dst, Port] in bounded: 69 | bounded.remove([Src, Dst, Port]) 70 | 71 | 72 | def is_bounded(Src, Dst, Port): 73 | """ 74 | returns true if the session is already bounded 75 | @param Src: source ip address 76 | @param Dst: destination ip address 77 | @param Port: source port number 78 | """ 79 | if [Src, Dst, Port] in bounded: 80 | return True 81 | return False 82 | 83 | 84 | class SMTPDataField(XByteField): 85 | """ 86 | this is a field class for handling the smtp data 87 | @attention: this class inherets XByteField 88 | """ 89 | holds_packets = 1 90 | name = "SMTPDataField" 91 | myresult = "" 92 | 93 | def __init__(self, name, default): 94 | """ 95 | class constructor, for initializing instance variables 96 | @param name: name of the field 97 | @param default: Scapy has many formats to represent the data 98 | internal, human and machine. anyways you may sit this param to None. 99 | """ 100 | self.name = name 101 | self.fmt = "!B" 102 | Field.__init__(self, name, default, "!B") 103 | 104 | def getfield(self, pkt, s): 105 | """ 106 | this method will get the packet, takes what does need to be 107 | taken and let the remaining go, so it returns two values. 108 | first value which belongs to this field and the second is 109 | the remaining which does need to be dissected with 110 | other "field classes". 111 | @param pkt: holds the whole packet 112 | @param s: holds only the remaining data which is not dissected yet. 113 | """ 114 | 115 | src, dst, sport, dport, seq = get_tcp_ip() 116 | 117 | cstream = -1 118 | cstream = dissector.check_stream(src, dst, sport, dport, seq, s) 119 | if not cstream == -1: 120 | s = cstream 121 | if cstream == -1: 122 | return "", "" 123 | 124 | name = name_generator() 125 | if not dissector.Dissector.default_download_folder_changed: 126 | cwd = os.getcwd() + "/downloaded/" 127 | try: 128 | os.mkdir("downloaded") 129 | except: 130 | None 131 | f = open(cwd + name, "wb") 132 | else: 133 | f = open(dissector.Dissector.path + name, "wb") 134 | f.write(s) 135 | f.close() 136 | self.myresult = "" 137 | for c in s: 138 | self.myresult = self.myresult + base64.standard_b64encode(c) 139 | return "", self.myresult 140 | 141 | 142 | class SMTPResField(StrField): 143 | """ 144 | this is a field class for handling the smtp data 145 | @attention: this class inherets StrField 146 | """ 147 | holds_packets = 1 148 | name = "SMTPReField" 149 | 150 | def get_code_msg(self, cn): 151 | """ 152 | method returns a message for every a specific code number 153 | @param cn: code number 154 | """ 155 | codes = { 156 | "500": "Syntax error, command unrecognized", 157 | "501": "Syntax error in parameters or arguments", 158 | "502": "Command not implemented", 159 | "503": "Bad sequence of commands", 160 | "504": "Command parameter not implemented", 161 | "211": "System status, or system help reply", 162 | "214": "Help message", 163 | "220": " Service ready", 164 | "221": " Service closing transmission channel", 165 | "421": " Service not available,\ 166 | closing transmission channel", 167 | "250": "Requested mail action okay, completed", 168 | "251": "User not local; will forward to ", 169 | "450": "Requested mail action not taken: mailbox unavailable", 170 | "550": "Requested action not taken: mailbox unavailable", 171 | "451": "Requested action aborted: error in processing", 172 | "551": "User not local; please try ", 173 | "452": "Requested action not taken: insufficient system\ 174 | storage", 175 | "552": "Requested mail action aborted: exceeded storage\ 176 | allocation", 177 | "553": "Requested action not taken: mailbox name not allowed", 178 | "354": "Start mail input; end with .", 179 | "554": "Transaction failed", 180 | "211": "System status, or system help reply", 181 | "214": "Help message", 182 | "220": " Service ready", 183 | "221": " Service closing transmission channel", 184 | "250": "Requested mail action okay, completed", 185 | "251": "User not local; will forward to ", 186 | "354": "Start mail input; end with .", 187 | "421": " Service not available, closing \ 188 | transmission channel", 189 | "450": "Requested mail action not taken: mailbox unavailable", 190 | "451": "Requested action aborted: local error in processing", 191 | "452": "Requested action not taken: insufficient system\ 192 | storage", 193 | "500": "Syntax error, command unrecognized", 194 | "501": "Syntax error in parameters or arguments", 195 | "502": "Command not implemented", 196 | "503": "Bad sequence of commands", 197 | "504": "Command parameter not implemented", 198 | "550": "Requested action not taken: mailbox unavailable", 199 | "551": "User not local; please try ", 200 | "552": "Requested mail action aborted: exceeded storage\ 201 | allocation", 202 | "553": "Requested action not taken: mailbox name not allowed", 203 | "554": "Transaction failed"} 204 | if cn in codes: 205 | return codes[cn] 206 | return "Unknown Response Code" 207 | 208 | def getfield(self, pkt, s): 209 | """ 210 | this method will get the packet, takes what does need to be 211 | taken and let the remaining go, so it returns two values. 212 | first value which belongs to this field and the second is 213 | the remaining which does need to be dissected with 214 | other "field classes". 215 | @param pkt: holds the whole packet 216 | @param s: holds only the remaining data which is not dissected yet. 217 | """ 218 | cstream = -1 219 | if pkt.underlayer.name == "TCP": 220 | cstream = dissector.check_stream(\ 221 | pkt.underlayer.underlayer.fields["src"],\ 222 | pkt.underlayer.underlayer.fields["dst"],\ 223 | pkt.underlayer.fields["sport"],\ 224 | pkt.underlayer.fields["dport"],\ 225 | pkt.underlayer.fields["seq"], s) 226 | if not cstream == -1: 227 | s = cstream 228 | remain = "" 229 | value = "" 230 | ls = s.splitlines() 231 | length = len(ls) 232 | if length == 1: 233 | value = ls[0] 234 | arguments = "" 235 | first = True 236 | res = value.split(" ") 237 | for arg in res: 238 | if not first: 239 | arguments = arguments + arg + " " 240 | first = False 241 | if "-" in res[0]: 242 | value = "(" + res[0][:3] + ") " +\ 243 | self.get_code_msg(res[0][:3]) + " " + res[0][3:] 244 | else: 245 | value = "(" + res[0] + ") " + self.get_code_msg(res[0]) 246 | return arguments[:-1], [value] 247 | 248 | if length > 1: 249 | reponses = [] 250 | for element in ls: 251 | element = element.split(" ") 252 | arguments = "" 253 | first = True 254 | for arg in element: 255 | if not first: 256 | arguments = arguments + arg + " " 257 | first = False 258 | if "-" in element[0]: 259 | reponses.append(["(" + element[0][:3] + ") " + 260 | self.get_code_msg(element[0][:3]) + 261 | " " + element[0][3:], arguments[:-1]]) 262 | else: 263 | reponses.append(["(" + element[0] + ") " + 264 | self.get_code_msg(element[0][:-1]), 265 | arguments]) 266 | return "", reponses 267 | return "", "" 268 | 269 | def __init__(self, name, default, fmt, remain=0): 270 | """ 271 | class constructor for initializing the instance variables 272 | @param name: name of the field 273 | @param default: Scapy has many formats to represent the data 274 | internal, human and machine. anyways you may sit this param to None. 275 | @param fmt: specifying the format, this has been set to "H" 276 | @param remain: this parameter specifies the size of the remaining 277 | data so make it 0 to handle all of the data. 278 | """ 279 | self.name = name 280 | StrField.__init__(self, name, default, fmt, remain) 281 | 282 | 283 | class SMTPReqField(StrField): 284 | holds_packets = 1 285 | name = "SMTPReqField" 286 | 287 | def getfield(self, pkt, s): 288 | """ 289 | this method will get the packet, takes what does need to be 290 | taken and let the remaining go, so it returns two values. 291 | first value which belongs to this field and the second is 292 | the remaining which does need to be dissected with 293 | other "field classes". 294 | @param pkt: holds the whole packet 295 | @param s: holds only the remaining data which is not dissected yet. 296 | """ 297 | cstream = -1 298 | if pkt.underlayer.name == "TCP": 299 | cstream = dissector.check_stream(\ 300 | pkt.underlayer.underlayer.fields["src"],\ 301 | pkt.underlayer.underlayer.fields["dst"],\ 302 | pkt.underlayer.fields["sport"],\ 303 | pkt.underlayer.fields["dport"],\ 304 | pkt.underlayer.fields["seq"], s) 305 | if not cstream == -1: 306 | s = cstream 307 | remain = "" 308 | value = "" 309 | ls = s.split() 310 | length = len(ls) 311 | if ls[0].upper() == "DATA": 312 | bind(pkt.underlayer.underlayer.fields["src"], 313 | pkt.underlayer.underlayer.fields["dst"], 314 | pkt.underlayer.fields["sport"]) 315 | return "", "DATA" 316 | if ls[0].upper() == "QUIT": 317 | unbind(pkt.underlayer.underlayer.fields["src"], 318 | pkt.underlayer.underlayer.fields["dst"], 319 | pkt.underlayer.fields["sport"]) 320 | return "", "QUIT" 321 | if is_bounded(pkt.underlayer.underlayer.fields["src"], 322 | pkt.underlayer.underlayer.fields["dst"], 323 | pkt.underlayer.fields["sport"]): 324 | set_tcp_ip(pkt.underlayer.underlayer.fields["src"], 325 | pkt.underlayer.underlayer.fields["dst"], 326 | pkt.underlayer.fields["sport"],\ 327 | pkt.underlayer.fields["dport"],\ 328 | pkt.underlayer.fields["seq"]) 329 | smtpd = SMTPData(s).fields["data"] 330 | return "", ["DATA", smtpd] 331 | 332 | if length > 1: 333 | value = ls[0] 334 | if length == 2: 335 | remain = ls[1] 336 | return remain, value 337 | else: 338 | i = 1 339 | remain = ' ' 340 | while i < length: 341 | remain = remain + ls[i] + ' ' 342 | i = i + 1 343 | return remain[:-1], value 344 | else: 345 | return "", ls[0] 346 | 347 | def __init__(self, name, default, fmt, remain=0): 348 | """ 349 | class constructor for initializing the instance variables 350 | @param name: name of the field 351 | @param default: Scapy has many formats to represent the data 352 | internal, human and machine. anyways you may sit this param to None. 353 | @param fmt: specifying the format, this has been set to "H" 354 | @param remain: this parameter specifies the size of the remaining 355 | data so make it 0 to handle all of the data. 356 | """ 357 | self.name = name 358 | StrField.__init__(self, name, default, fmt, remain) 359 | 360 | 361 | class SMTPData(Packet): 362 | """ 363 | class for handling the smtp data 364 | @attention: this class inherets Packet 365 | """ 366 | 367 | name = "smtp" 368 | fields_desc = [SMTPDataField("data", "")] 369 | 370 | 371 | class SMTPResponse(Packet): 372 | """ 373 | class for handling the smtp responses 374 | @attention: this class inherets Packet 375 | """ 376 | name = "smtp" 377 | fields_desc = [SMTPResField("response", "", "H"), 378 | StrField("argument", "", "H")] 379 | 380 | 381 | class SMTPRequest(Packet): 382 | """ 383 | class for handling the smtp requests 384 | @attention: this class inherets Packet 385 | """ 386 | name = "smtp" 387 | fields_desc = [SMTPReqField("command", '', "H"), 388 | StrField("argument", '', "H")] 389 | 390 | bind_layers(TCP, SMTPResponse, sport=25) 391 | bind_layers(TCP, SMTPRequest, dport=25) 392 | bind_layers(TCP, SMTPResponse, sport=587) 393 | bind_layers(TCP, SMTPRequest, dport=587) 394 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/common/protocols/telnet.py: -------------------------------------------------------------------------------- 1 | import base64 2 | 3 | from scapy.layers.inet import * 4 | 5 | import dissector 6 | 7 | 8 | class TELNETField(XByteField): 9 | """ 10 | field class for handling the telnet packets 11 | @attention: this class inherets XByteField 12 | """ 13 | holds_packets = 1 14 | name = "TELNETField" 15 | myresult = "" 16 | 17 | def __init__(self, name, default): 18 | """ 19 | class constructor, for initializing instance variables 20 | @param name: name of the field 21 | @param default: Scapy has many formats to represent the data 22 | internal, human and machine. anyways you may sit this param to None. 23 | """ 24 | self.name = name 25 | self.fmt = "!B" 26 | Field.__init__(self, name, default, "!B") 27 | 28 | def get_code_msg(self, cn): 29 | """ 30 | method returns a message for every a specific code number 31 | @param cn: code number 32 | """ 33 | codes = {0: "TRANSMIT-BINARY", 1: "ECHO", 34 | 3: "SUPPRESS-GO-AHEAD", 35 | 5: "STATUS", 6: "TIMING-MARK", 36 | 7: "RCTE", 10: "NAOCRD", 37 | 11: "NAOHTS", 12: "NAOHTD", 38 | 13: "NAOFFD", 14: "NAOVTS", 39 | 15: "NAOVTD", 16: "NAOLFD", 40 | 17: "EXTEND-ASCII", 41 | 18: "LOGOUT", 19: "BM", 20: "DET", 21: "SUPDUP", 42 | 22: "SUPDUP-OUTPUT", 23: "SEND-LOCATION", 43 | 24: "TERMINAL-TYPE", 25: "END-OF-RECORD", 44 | 26: "TUID", 27: "OUTMRK", 28: "TTYLOC", 29: "3270-REGIME", 45 | 30: "X.3-PAD", 31: "NAWS", 32: "TERMINAL-SPEED", 46 | 33: "TOGGLE-FLOW-CONTROL", 34: "LINEMODE", 47 | 35: "X-DISPLAY-LOCATION", 48 | 36: "ENVIRON", 37: "AUTHENTICATION", 38: "ENCRYPT", 49 | 39: "NEW-ENVIRON", 40: "TN3270E", 44: "COM-PORT-OPTION", 50 | 236: "End of Record", 237: "Suspend Current Process", 51 | 238: "Abort Process", 239: "End of File", 240: "SE", 52 | 241: "NOP", 242: "Data Mark", 243: "Break", 53 | 244: "Interrupt Process", 245: "Abort output", 54 | 246: "Are You There", 247: "Erase character", 55 | 248: "Erase Line", 249: "Go ahead", 250: "SB", 251: "WILL", 56 | 252: "WON'T", 253: "DO", 254: "DON'T", 255: "Command"} 57 | if cn in codes: 58 | return codes[cn] + " " 59 | return "UnknownCode[" + str(cn) + "] " 60 | 61 | def getfield(self, pkt, s): 62 | """ 63 | this method will get the packet, takes what does need to be 64 | taken and let the remaining go, so it returns two values. 65 | first value which belongs to this field and the second is 66 | the remaining which does need to be dissected with 67 | other "field classes". 68 | @param pkt: holds the whole packet 69 | @param s: holds only the remaining data which is not dissected yet. 70 | """ 71 | cstream = -1 72 | if pkt.underlayer.name == "TCP": 73 | cstream = dissector.check_stream(\ 74 | pkt.underlayer.underlayer.fields["src"],\ 75 | pkt.underlayer.underlayer.fields["dst"],\ 76 | pkt.underlayer.fields["sport"],\ 77 | pkt.underlayer.fields["dport"],\ 78 | pkt.underlayer.fields["seq"], s) 79 | if not cstream == -1: 80 | s = cstream 81 | self.myresult = "" 82 | subOptions = False 83 | resultlist = [] 84 | firstb = struct.unpack(self.fmt, s[0])[0] 85 | if firstb != 255: 86 | self.myresult = "" 87 | for c in s: 88 | self.myresult = self.myresult + base64.standard_b64encode(c) 89 | return "", "data " + self.myresult 90 | 91 | for c in s: 92 | ustruct = struct.unpack(self.fmt, c) 93 | command = self.get_code_msg(ustruct[0]) 94 | if command == "SB ": 95 | subOptions = True 96 | self.myresult = self.myresult + "SB " 97 | continue 98 | if command == "SE ": 99 | subOptions = False 100 | self.myresult = self.myresult = self.myresult + "SE " 101 | continue 102 | if subOptions: 103 | self.myresult = self.myresult +\ 104 | "subop(" + str(ustruct[0]) + ") " 105 | continue 106 | else: 107 | self.myresult = self.myresult + command 108 | comlist = self.myresult.split("Command ") 109 | for element in comlist: 110 | if element != "": 111 | resultlist.append(("command", element)) 112 | #return "", resultlist 113 | return "", self.myresult 114 | 115 | 116 | class TELNET(Packet): 117 | """ 118 | field class for handling the telnet packets 119 | @attention: this class inherets Packet 120 | """ 121 | name = "telnet" 122 | fields_desc = [TELNETField("telnetpayload", "")] 123 | 124 | bind_layers(TCP, TELNET, dport=23) 125 | bind_layers(TCP, TELNET, sport=23) 126 | """ 127 | pkts = rdpcap("/root/Desktop/telnet-cooked.pcap") 128 | for pkt in pkts: 129 | pkt.show() 130 | """ 131 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/emailaddress.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect, find_session 4 | from common.hashmethods import * 5 | from common.entities import pcapFile 6 | import logging 7 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 8 | from scapy.all import * 9 | from canari.maltego.entities import EmailAddress 10 | from canari.maltego.message import UIMessage 11 | from canari.framework import configure 12 | import re 13 | from canari.config import config 14 | 15 | __author__ = 'catalyst256' 16 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 17 | __credits__ = [] 18 | 19 | __license__ = 'GPL' 20 | __version__ = '0.1' 21 | __maintainer__ = 'catalyst256' 22 | __email__ = 'catalyst256@gmail.com' 23 | __status__ = 'Development' 24 | 25 | __all__ = [ 26 | 'dotransform' 27 | ] 28 | 29 | @configure( 30 | label='Extract Email Address(s)', 31 | description='Extract email addresses from a pcap file', 32 | uuids=['sniffMyPacketsv2.v2.pcap_2_emailaddr'], 33 | inputs=[('[SmP] - Email', pcapFile)], 34 | debug=True 35 | ) 36 | def dotransform(request, response): 37 | pcap = request.value 38 | 39 | lookfor = ['MAIL FROM:', 'RCPT TO:'] 40 | pkts = rdpcap(pcap) 41 | usedb = config['working/usedb'] 42 | # Check to see if we are using the database or not 43 | if usedb > 0: 44 | d = mongo_connect() 45 | c = d['CREDS'] 46 | # Hash the pcap file 47 | try: 48 | md5pcap = md5_for_file(pcap) 49 | except Exception as e: 50 | return response + UIMessage(str(e)) 51 | x = find_session(md5pcap) 52 | pcap_id = x[0] 53 | else: 54 | pass 55 | addr = [] 56 | try: 57 | for p in pkts: 58 | for m in lookfor: 59 | if p.haslayer(TCP) and p.haslayer(Raw): 60 | raw = p[Raw].load 61 | if m in raw: 62 | for s in re.finditer('<([\S.-]+@[\S-]+)>', raw): 63 | addr.append(s.group(1)) 64 | except Exception as e: 65 | return response + UIMessage(str(e)) 66 | 67 | for x in addr: 68 | if usedb > 0: 69 | data = {'PCAP ID': pcap_id, 'Type': 'Email Address', 'Record': x} 70 | t = d.CREDS.find({'Record': x}).count() 71 | if t > 0: 72 | pass 73 | else: 74 | c.insert(data) 75 | else: 76 | pass 77 | e = EmailAddress(x) 78 | response += e 79 | return response 80 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/emailattachments.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import uuid 5 | import email 6 | import mimetypes 7 | from common.dbconnect import mongo_connect 8 | from common.entities import Artifact, EmailAttachment 9 | from canari.maltego.message import UIMessage 10 | from canari.framework import configure 11 | from canari.config import config 12 | 13 | __author__ = 'catalyst256' 14 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 15 | __credits__ = [] 16 | 17 | __license__ = 'GPL' 18 | __version__ = '0.1' 19 | __maintainer__ = 'catalyst256' 20 | __email__ = 'catalyst256@gmail.com' 21 | __status__ = 'Development' 22 | 23 | __all__ = [ 24 | 'dotransform' 25 | ] 26 | 27 | 28 | @configure( 29 | label='Extract Email Attachments', 30 | description='Extract Email attachments from an artifact', 31 | uuids=['sniffMyPacketsv2.v2.artifact_2_email_attachment'], 32 | inputs=[('[SmP] - Email', Artifact)], 33 | debug=True 34 | ) 35 | def dotransform(request, response): 36 | 37 | f = request.value 38 | usedb = config['working/usedb'] 39 | # Check to see if we are using the database or not 40 | if usedb > 0: 41 | d = mongo_connect() 42 | folder = [] 43 | # Check the pcap file doesn't exist in the database already (based on MD5 hash) 44 | try: 45 | s = d.ARTIFACTS.find({"File Name": f}).count() 46 | if s > 0: 47 | r = d.ARTIFACTS.find({"File Name": f}, {"Path": 1, "_id": 0}) 48 | for i in r: 49 | folder = i['Path'] 50 | else: 51 | return response + UIMessage('File not found!!') 52 | except Exception as e: 53 | return response + UIMessage(str(e)) 54 | else: 55 | folder = request.fields['path'] 56 | 57 | msgdata = [] 58 | lookfor = 'DATA' 59 | file = '%s/%s' % (folder, f) 60 | 61 | # split the original file into two parts, message and header and save as lists 62 | with open(file, mode='r') as msgfile: 63 | reader = msgfile.read() 64 | for i, part in enumerate(reader.split(lookfor)): 65 | if i == 1: 66 | msgdata.append(part.strip()) 67 | 68 | save_files = [] 69 | 70 | for item in msgdata: 71 | newfolder = '%s/email-messages' % folder 72 | if not os.path.exists(newfolder): 73 | os.makedirs(newfolder) 74 | filename = newfolder + '/' + 'msgdata.msg' 75 | fb = open(filename, 'w') 76 | fb.write('%s\n' % item) 77 | fb.close() 78 | if filename not in save_files: 79 | save_files.append(filename) 80 | 81 | fp = open(filename) 82 | msg = email.message_from_file(fp) 83 | fp.close() 84 | 85 | counter = 1 86 | for part in msg.walk(): 87 | if part.get_content_maintype() == 'multipart': 88 | continue 89 | filename = part.get_filename() 90 | if not filename: 91 | ext = mimetypes.guess_extension(part.get_content_type()) 92 | if not ext: 93 | ext = '.bin' 94 | filename = 'part-%03d%s' % (counter, ext) 95 | counter += 1 96 | 97 | savefile = newfolder + '/' + filename 98 | fp = open(savefile, 'wb') 99 | fp.write(part.get_payload(decode=True)) 100 | fp.close() 101 | if savefile not in save_files: 102 | save_files.append(savefile) 103 | 104 | # Create the Maltego entity 105 | for s in save_files: 106 | e = EmailAttachment(s) 107 | response += e 108 | return response -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/extract-artifacts.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | 4 | from canari.framework import configure 5 | from common.dbconnect import mongo_connect, find_session 6 | from common.hashmethods import * 7 | from common.auxtools import check_file 8 | from common.protocols.dissector import * 9 | from common.entities import pcapFile, Artifact 10 | from canari.config import config 11 | import uuid 12 | from canari.maltego.message import Field, UIMessage 13 | import glob 14 | 15 | 16 | __author__ = 'catalyst256' 17 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 18 | __credits__ = [] 19 | 20 | __license__ = 'GPL' 21 | __version__ = '0.1' 22 | __maintainer__ = 'catalyst256' 23 | __email__ = 'catalyst256@gmail.com' 24 | __status__ = 'Development' 25 | 26 | __all__ = [ 27 | 'dotransform' 28 | ] 29 | 30 | 31 | 32 | @configure( 33 | label='Extract files', 34 | description='Extract files from pcap file', 35 | uuids=['sniffMyPacketsv2.v2.pcap_2_artifacts'], 36 | inputs=[('[SmP] - Artifacts', pcapFile)], 37 | debug=True 38 | ) 39 | def dotransform(request, response): 40 | 41 | pcap = request.value 42 | usedb = config['working/usedb'] 43 | # Check to see if we are using the database or not 44 | if usedb > 0: 45 | # Connect to the database so we can insert the record created below 46 | d = mongo_connect() 47 | c = d['ARTIFACTS'] 48 | # Hash the pcap file 49 | try: 50 | md5pcap = md5_for_file(pcap) 51 | except Exception as e: 52 | return response + UIMessage(str(e)) 53 | x = find_session(md5pcap) 54 | pcap_id = x[0] 55 | folder = x[2] 56 | else: 57 | w = config['working/directory'].strip('\'') 58 | try: 59 | if w != '': 60 | w = w + '/' + str(uuid.uuid4())[:12].replace('-', '') 61 | if not os.path.exists(w): 62 | os.makedirs(w) 63 | folder = w 64 | else: 65 | return response + UIMessage('No working directory set, check your config file') 66 | except Exception as e: 67 | return response + UIMessage(e) 68 | 69 | folder = '%s/%s' % (folder, 'artifacts') 70 | 71 | if not os.path.exists(folder): 72 | os.makedirs(folder) 73 | 74 | dissector = Dissector() # instance of dissector class 75 | dissector.change_dfolder(folder) 76 | dissector.dissect_pkts(pcap) 77 | list_files = glob.glob(folder+'/*') 78 | # print list_files 79 | 80 | # Loop through the stored files and create the database/maltego objects 81 | for g in list_files: 82 | try: 83 | md5hash = md5_for_file(g) 84 | sha1hash = sha1_for_file(g) 85 | ftype = check_file(g) 86 | n = len(folder) + 1 87 | l = len(g) 88 | filename = g[n:l] 89 | if usedb > 0: 90 | data = {'PCAP ID': pcap_id, 'Path': folder, 'File Name': filename, 'File Type': ftype, 'MD5 Hash': md5hash, 91 | 'SHA1 Hash': sha1hash} 92 | t = d.ARTIFACTS.find({'MD5 Hash': md5hash, "File Name": filename}).count() 93 | if t > 0: 94 | pass 95 | else: 96 | c.insert(data) 97 | else: 98 | pass 99 | 100 | # Create the Maltego entities 101 | a = Artifact(filename) 102 | a.ftype = ftype 103 | a.fhash = md5hash 104 | a += Field('path', folder, displayname='Path') 105 | response += a 106 | except Exception as e: 107 | print str(e) 108 | 109 | return response 110 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/find-smtp-creds.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect, find_session 4 | from common.hashmethods import * 5 | from canari.maltego.message import UIMessage 6 | from common.findcreds import smtp_creds 7 | from common.entities import pcapFile, Credential 8 | from canari.framework import configure 9 | from canari.config import config 10 | 11 | __author__ = 'catalyst256' 12 | __copyright__ = 'Copyright 2015, sniffmypacketsv2 Project' 13 | __credits__ = [] 14 | 15 | __license__ = 'GPL' 16 | __version__ = '0.1' 17 | __maintainer__ = 'catalyst256' 18 | __email__ = 'catalyst256@gmail.com' 19 | __status__ = 'Development' 20 | 21 | __all__ = [ 22 | 'dotransform' 23 | ] 24 | 25 | 26 | @configure( 27 | label='Find SMTP Credentials', 28 | description='Look for SMTP Credentials', 29 | uuids=['sniffMyPacketsv2.v2.find_smtp_creds'], 30 | inputs=[('[SmP] - Email', pcapFile)], 31 | debug=True 32 | ) 33 | def dotransform(request, response): 34 | 35 | pcap = request.value 36 | usedb = config['working/usedb'] 37 | # Check to see if we are using the database or not 38 | if usedb > 0: 39 | # Connect to the database so we can insert the record created below 40 | x = mongo_connect() 41 | c = x['CREDS'] 42 | 43 | # Hash the pcap file 44 | try: 45 | md5pcap = md5_for_file(pcap) 46 | except Exception as e: 47 | return response + UIMessage(str(e)) 48 | d = find_session(md5pcap) 49 | pcap_id = d[0] 50 | else: 51 | pass 52 | 53 | d = smtp_creds(pcap) 54 | if len(d) == 0: 55 | return response + UIMessage('No SMTP Credentials found..sorry') 56 | for n in d: 57 | if usedb > 0: 58 | data = {'PCAP ID': pcap_id, 'Type': 'Email Credential', 'Record': n} 59 | t = x.CREDS.find({'Record': n}).count() 60 | if t > 0: 61 | pass 62 | else: 63 | c.insert(data) 64 | else: 65 | pass 66 | e = Credential(n) 67 | response += e 68 | return response -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/getfolder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect 4 | from common.entities import SessionID, Folder 5 | from canari.maltego.message import UIMessage, Field 6 | from canari.framework import configure 7 | from canari.config import config 8 | 9 | __author__ = 'catalyst256' 10 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 11 | __credits__ = [] 12 | 13 | __license__ = 'GPL' 14 | __version__ = '0.1' 15 | __maintainer__ = 'catalyst256' 16 | __email__ = 'catalyst256@gmail.com' 17 | __status__ = 'Development' 18 | 19 | __all__ = [ 20 | 'dotransform' 21 | ] 22 | 23 | 24 | @configure( 25 | label='Get Folder', 26 | description='Return the working directory for the session', 27 | uuids=['sniffMyPacketsv2.v2.get_folder_2_maltego'], 28 | inputs=[('[SmP] - Sessions', SessionID)], 29 | debug=True 30 | ) 31 | def dotransform(request, response): 32 | 33 | pcap_id = request.value 34 | usedb = config['working/usedb'] 35 | # Check to see if we are using the database or not 36 | if usedb == 0: 37 | return response + UIMessage('No database support configured, check your config file') 38 | else: 39 | pass 40 | # Connect to the database so we can insert the record created below 41 | x = mongo_connect() 42 | c = x['INDEX'] 43 | 44 | try: 45 | s = c.find({'PCAP ID': pcap_id}).count() 46 | if s > 0: 47 | r = c.find({'PCAP ID': pcap_id}, {'Working Directory': 1, '_id': 0}) 48 | for i in r: 49 | folder = i['Working Directory'] 50 | except Exception as e: 51 | return response + UIMessage(str(e)) 52 | 53 | e = Folder(folder) 54 | e += Field('sessionid', pcap_id, displayname='Session ID') 55 | response += e 56 | return response 57 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/index-pcap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | 4 | import time 5 | import uuid 6 | from collections import OrderedDict 7 | from common.dbconnect import mongo_connect 8 | from common.pcaptools import * 9 | from common.hashmethods import * 10 | from canari.easygui import multenterbox 11 | from canari.config import config 12 | from common.entities import pcapFile, SessionID 13 | from canari.maltego.message import Field, UIMessage 14 | from canari.framework import configure 15 | 16 | __author__ = 'catalyst256' 17 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 18 | __credits__ = [] 19 | 20 | __license__ = 'GPL' 21 | __version__ = '0.1' 22 | __maintainer__ = 'catalyst256' 23 | __email__ = 'catalyst256@gmail.com' 24 | __status__ = 'Development' 25 | 26 | __all__ = [ 27 | 'dotransform' 28 | 29 | ] 30 | 31 | @configure( 32 | label='Index PCAP File', 33 | description='Adds the pcap file into the database', 34 | uuids=['sniffmypacketsv2.v2.index_pcap_2_db'], 35 | inputs=[('[SmP] - PCAP', pcapFile)], 36 | debug=True 37 | ) 38 | def dotransform(request, response): 39 | 40 | # pcap file pulled from Maltego 41 | pcap = request.value 42 | usedb = config['working/usedb'] 43 | # Check to see if we are using the database or not 44 | if usedb == 0: 45 | return response + UIMessage('You have chosen not to use a database') 46 | else: 47 | pass 48 | 49 | # Connect to the database so we can insert the record created below 50 | x = mongo_connect() 51 | c = x['INDEX'] 52 | 53 | # Check the file exists first (so we don't add crap to the database 54 | try: 55 | open(pcap) 56 | except IOError: 57 | return response + UIMessage('The file doesn\'t exist') 58 | 59 | # Check the pcap file is in the correct format (not pcap-ng) 60 | try: 61 | f_format = check_pcap(pcap) 62 | if 'BAD' in f_format: 63 | return response + UIMessage('File format is pcap-ng, not supported by sniffMyPackets, please convert.') 64 | except Exception as e: 65 | return response + UIMessage(str(e)) 66 | 67 | # Hash the pcap file 68 | try: 69 | md5hash = md5_for_file(pcap) 70 | sha1hash = sha1_for_file(pcap) 71 | except Exception as e: 72 | return response + UIMessage(str(e)) 73 | 74 | # Get the file size 75 | try: 76 | filesize = check_size(pcap) 77 | except Exception as e: 78 | return response + UIMessage(str(e)) 79 | 80 | # Check the pcap file doesn't exist in the database already (based on MD5 hash) 81 | try: 82 | s = x.INDEX.find({"MD5 Hash": md5hash}).count() 83 | if s > 0: 84 | r = x.INDEX.find({"MD5 Hash": md5hash}, {"PCAP ID": 1, "_id": 0}) 85 | for i in r: 86 | e = SessionID(i['PCAP ID']) 87 | e += Field('sniffmypacketsv2.pcapfile', pcap, displayname='PCAP File') 88 | response += e 89 | return response 90 | else: 91 | pass 92 | except Exception as e: 93 | return response + UIMessage(str(e)) 94 | 95 | # Popup message box for entering comments about the pcap file 96 | msg = 'Enter Comments' 97 | title = 'Comments' 98 | field_names = ["Comments"] 99 | field_values = [] 100 | field_values = multenterbox(msg, title, field_names) 101 | 102 | # General variables used to build the index 103 | comments = field_values[0] 104 | now = time.strftime("%c") 105 | pcap_id = str(uuid.uuid4())[:12].replace('-', '') 106 | 107 | # Get a count of packets available 108 | try: 109 | pkcount = packet_count(pcap) 110 | except Exception as e: 111 | return response + UIMessage(str(e)) 112 | 113 | # Get the start/end time of packets 114 | try: 115 | pcap_time = get_time(pcap) 116 | except Exception as e: 117 | return response + UIMessage(str(e)) 118 | 119 | # Check for working directory, if it doesn't exist create it. 120 | w = config['working/directory'].strip('\'') 121 | try: 122 | if w != '': 123 | w = w + '/' + pcap_id 124 | if not os.path.exists(w): 125 | os.makedirs(w) 126 | else: 127 | return response + UIMessage('No working directory set, check your config file') 128 | except Exception as e: 129 | return response + UIMessage(e) 130 | 131 | # Build a dictonary object to upload into the database 132 | index = OrderedDict({'PCAP ID': pcap_id, 'PCAP Path': pcap, 'Working Directory': w, 'Upload Time': now, 133 | 'Comments': comments, 'MD5 Hash': md5hash, 'SHA1 Hash': sha1hash, 134 | 'Packet Count': pkcount, 'First Packet': pcap_time[0], 'Last Packet': pcap_time[1], 135 | 'File Size': filesize}) 136 | 137 | # Insert record into the database 138 | c.insert(index) 139 | 140 | # Return the entity with Session ID into Maltego 141 | r = SessionID(pcap_id) 142 | r += Field('sniffmypacketsv2.pcapfile', pcap, displayname='PCAP File') 143 | response += r 144 | return response 145 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/md5hash2virustotal.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.apicalls import vt_lookup_file 4 | from common.dbconnect import mongo_connect 5 | from common.entities import Artifact, VirusTotal 6 | from canari.maltego.message import UIMessage 7 | from canari.framework import configure 8 | from canari.config import config 9 | 10 | __author__ = 'catalyst256' 11 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 12 | __credits__ = [] 13 | 14 | __license__ = 'GPL' 15 | __version__ = '0.1' 16 | __maintainer__ = 'catalyst256' 17 | __email__ = 'catalyst256@gmail.com' 18 | __status__ = 'Development' 19 | 20 | __all__ = [ 21 | 'dotransform' 22 | ] 23 | 24 | 25 | @configure( 26 | label='Query VirusTotal', 27 | description='Lookup MD5 hash on virustotal', 28 | uuids=['sniffMyPacketsv2.v2.md5hash_2_virustotal'], 29 | inputs=[('[SmP] - Artifacts', Artifact)], 30 | debug=True 31 | ) 32 | def dotransform(request, response): 33 | filename = request.value 34 | md5hash = request.fields['sniffmypacketsv2.fhash'] 35 | 36 | usedb = config['working/usedb'] 37 | # Check to see if we are using the database or not 38 | if usedb > 0: 39 | 40 | # Connect to the database so we can insert the record created below 41 | x = mongo_connect() 42 | c = x['MALWARE'] 43 | 44 | v = vt_lookup_file(md5hash) 45 | 46 | if v is not None: 47 | link = v['permalink'] 48 | scan = v['scan_date'] 49 | else: 50 | return response + UIMessage('No record found in VirusTotal') 51 | 52 | s = x.ARTIFACTS.find({'MD5 HASH': md5hash}, {"PCAP ID": 1, "_id": 0}) 53 | pcap_id = '' 54 | for m in s: 55 | pcap_id = m['PCAP ID'] 56 | 57 | data = {'PCAP ID': pcap_id, 'File Name': filename, 'Permalink': link, 'Scan Date': scan, 'MD5 Hash': md5hash} 58 | 59 | t = x.MALWARE.find({'MD5 Hash': md5hash}).count() 60 | if t > 0: 61 | pass 62 | else: 63 | c.insert(data) 64 | 65 | e = VirusTotal(link) 66 | response += e 67 | return response 68 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap-dns-domain.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import datetime 4 | from common.dbconnect import mongo_connect, find_session 5 | from common.hashmethods import * 6 | import tldextract 7 | import logging 8 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 9 | from scapy.all import * 10 | from collections import OrderedDict 11 | from common.entities import pcapFile 12 | from canari.maltego.entities import Domain 13 | from canari.maltego.message import UIMessage 14 | from canari.framework import configure 15 | from common.auxtools import error_logging 16 | from canari.config import config 17 | 18 | __author__ = 'catalyst256' 19 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 20 | __credits__ = [] 21 | 22 | __license__ = 'GPL' 23 | __version__ = '0.1' 24 | __maintainer__ = 'catalyst256' 25 | __email__ = 'catalyst256@gmail.com' 26 | __status__ = 'Development' 27 | 28 | __all__ = [ 29 | 'dotransform' 30 | ] 31 | 32 | 33 | @configure( 34 | label='Find DNS Domains', 35 | description='Find DNS Domains in a pcap file', 36 | uuids=['sniffMyPacketsv2.v2.pcap_2_dnsdomains'], 37 | inputs=[('[SmP] - DNS', pcapFile)], 38 | debug=True 39 | ) 40 | def dotransform(request, response): 41 | # Store the pcap file as a variable 42 | pcap = request.value 43 | usedb = config['working/usedb'] 44 | # Check to see if we are using the database or not 45 | if usedb > 0: 46 | # Connect to the database so we can insert the record created below 47 | x = mongo_connect() 48 | c = x['DNS'] 49 | # Hash the pcap file 50 | try: 51 | md5hash = md5_for_file(pcap) 52 | except Exception as e: 53 | return response + UIMessage(str(e)) 54 | # Get the session and/or pcap id 55 | d = find_session(md5hash) 56 | pcap_id = d[0] 57 | session_id = d[1] 58 | else: 59 | pass 60 | 61 | try: 62 | pkts = rdpcap(pcap) 63 | dns_requests = [] 64 | for p in pkts: 65 | if p.haslayer(DNSQR): 66 | timestamp = datetime.datetime.fromtimestamp(p.time).strftime('%Y-%m-%d %H:%M:%S.%f') 67 | r = p[DNSQR].qname[:-1] 68 | tld = tldextract.extract(r) 69 | domain = tld.registered_domain 70 | if usedb > 0: 71 | dns = OrderedDict({'PCAP ID': pcap_id, 'Stream ID': session_id, 72 | 'Time Stamp': timestamp, 73 | 'Type': 'Request', 'IP': {'src': p[IP].src, 'dst': p[IP].dst, 'length': p[IP].len}, 74 | 'Request Details': {'Query Type': p[DNSQR].qtype, 'Query Name': r, 'Domain': domain}}) 75 | t = x.DNS.find({'Time Stamp': timestamp}).count() 76 | if t > 0: 77 | pass 78 | else: 79 | c.insert(dns) 80 | else: 81 | pass 82 | if r not in dns_requests: 83 | dns_requests.append(domain) 84 | else: 85 | pass 86 | 87 | for d in dns_requests: 88 | x = Domain(d) 89 | response += x 90 | return response 91 | 92 | except Exception as e: 93 | if usedb > 0: 94 | error_logging(str(e), 'DNS Requests') 95 | else: 96 | return response + UIMessage(str(e)) 97 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap-geoip.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.entities import pcapFile, GeoMap 4 | from common.dbconnect import mongo_connect 5 | from common.hashmethods import * 6 | import logging 7 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 8 | from scapy.all import * 9 | from collections import OrderedDict 10 | from common.geoip import lookup_geo 11 | from canari.maltego.message import UIMessage 12 | from canari.framework import configure 13 | from canari.config import config 14 | 15 | __author__ = 'catalyst256' 16 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 17 | __credits__ = [] 18 | 19 | __license__ = 'GPL' 20 | __version__ = '0.1' 21 | __maintainer__ = 'catalyst256' 22 | __email__ = 'catalyst256@gmail.com' 23 | __status__ = 'Development' 24 | 25 | __all__ = [ 26 | 'dotransform' 27 | ] 28 | 29 | 30 | @configure( 31 | label='Lookup GeoIP Details', 32 | description='TODO: Returns a Something entity with the phrase "Hello Word!"', 33 | uuids=['sniffMyPacketsv2.v2.pcap_2_geoip'], 34 | inputs=[('[SMP] - GeoIP', pcapFile)], 35 | debug=True 36 | ) 37 | def dotransform(request, response): 38 | 39 | # Store the pcap file as a variable 40 | pcap = request.value 41 | usedb = config['working/usedb'] 42 | # Check to see if we are using the database or not 43 | if usedb == 0: 44 | return response + UIMessage('No database in use, so this is pointless!!!') 45 | else: 46 | pass 47 | # Connect to the database so we can insert the record created below 48 | x = mongo_connect() 49 | c = x['GEOIP'] 50 | 51 | # Hash the pcap file 52 | try: 53 | md5hash = md5_for_file(pcap) 54 | except Exception as e: 55 | return response + UIMessage(str(e)) 56 | 57 | # Get the PCAP ID for the pcap file 58 | try: 59 | s = x.INDEX.find({"MD5 Hash": md5hash}).count() 60 | if s == 0: 61 | t = x.STREAMS.find({"MD5 Hash": md5hash}).count() 62 | if t > 0: 63 | r = x.STREAMS.find({"MD5 Hash": md5hash}, {"PCAP ID": 1, "_id": 0}) 64 | for i in r: 65 | pcap_id = i['PCAP ID'] 66 | else: 67 | return response + UIMessage('No PCAP ID, you need to index the pcap file') 68 | if s > 0: 69 | r = x.INDEX.find({"MD5 Hash": md5hash}, {"PCAP ID": 1, "_id": 0}) 70 | for i in r: 71 | pcap_id = i['PCAP ID'] 72 | except Exception as e: 73 | return response + UIMessage(str(e)) 74 | 75 | # Load the pcap file and look for IP addresses, then GeoIP them 76 | convo = [] 77 | pkts = rdpcap(pcap) 78 | for p in pkts: 79 | if p.haslayer(IP) and p.haslayer(TCP): 80 | proto = 'TCP' 81 | s = proto, p[IP].src, p[TCP].sport 82 | r = proto, p[IP].dst, p[TCP].dport 83 | if s not in convo: 84 | convo.append(s) 85 | if r in convo: 86 | convo.remove(r) 87 | else: 88 | convo.append(r) 89 | else: 90 | pass 91 | if p.haslayer(IP) and p.haslayer(UDP): 92 | proto = 'UDP' 93 | s = proto, p[IP].src, p[UDP].sport 94 | r = proto, p[IP].dst, p[UDP].dport 95 | if s not in convo: 96 | convo.append(s) 97 | if r in convo: 98 | convo.remove(r) 99 | else: 100 | convo.append(r) 101 | else: 102 | pass 103 | 104 | # Run each IP through a GeoIP lookup and build a directory object to insert into the database 105 | for proto, src, sport in convo: 106 | s = lookup_geo(src) 107 | if s is not None: 108 | geo = OrderedDict({'PCAP ID': pcap_id, 'Protocol': proto, 'src': src, 'src port': sport, 'src geo': s}) 109 | t = x.GEOIP.find({'src': src, 'src port': sport}).count() 110 | if t > 0: 111 | pass 112 | else: 113 | c.insert(geo) 114 | else: 115 | pass 116 | 117 | # Build the URL for the returned Maltego entity 118 | url = config['web/server'].strip('\'') 119 | port = config['web/port'].strip('\'') 120 | map_url = 'http://%s:%s/pcap/%s/map' % (url, port, pcap_id) 121 | e = GeoMap(map_url) 122 | response += e 123 | return response 124 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap-http.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import datetime 4 | import logging 5 | 6 | from sniffmypacketsv2.transforms.common.layers.http import * 7 | from common.dbconnect import mongo_connect, find_session 8 | from common.hashmethods import * 9 | 10 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 11 | from scapy.all import * 12 | from collections import OrderedDict 13 | from common.entities import pcapFile 14 | from canari.framework import configure 15 | from canari.maltego.entities import Website 16 | from canari.maltego.message import UIMessage 17 | from canari.config import config 18 | 19 | bind_layers(TCP, HTTP) 20 | 21 | __author__ = 'catalyst256' 22 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 23 | __credits__ = [] 24 | 25 | __license__ = 'GPL' 26 | __version__ = '0.1' 27 | __maintainer__ = 'catalyst256' 28 | __email__ = 'catalyst256@gmail.com' 29 | __status__ = 'Development' 30 | 31 | __all__ = [ 32 | 'dotransform' 33 | ] 34 | 35 | 36 | @configure( 37 | label='Find HTTP Requests', 38 | description='Find HTTP Requests in a PCAP file', 39 | uuids=['sniffMyPacketsv2.v2.pcap_2_http'], 40 | inputs=[('[SmP] - HTTP', pcapFile)], 41 | debug=True 42 | ) 43 | def dotransform(request, response): 44 | 45 | # Store the pcap file as a variable 46 | pcap = request.value 47 | usedb = config['working/usedb'] 48 | # Check to see if we are using the database or not 49 | if usedb > 0: 50 | # Connect to the database so we can insert the record created below 51 | x = mongo_connect() 52 | c = x['HTTP'] 53 | 54 | # Hash the pcap file 55 | try: 56 | md5hash = md5_for_file(pcap) 57 | except Exception as e: 58 | return response + UIMessage(str(e)) 59 | 60 | d = find_session(md5hash) 61 | pcap_id = d[0] 62 | else: 63 | pass 64 | 65 | # Find HTTP Requests 66 | pkts = rdpcap(pcap) 67 | http_requests = [] 68 | for p in pkts: 69 | if p.haslayer(HTTPRequest): 70 | timestamp = datetime.datetime.fromtimestamp(p.time).strftime('%Y-%m-%d %H:%M:%S.%f') 71 | r = p[HTTPRequest].Host 72 | if usedb > 0: 73 | http = OrderedDict({'PCAP ID': pcap_id, 74 | 'Time Stamp': timestamp, 75 | 'Type': 'HTTP Request', 'IP': {'src': p[IP].src, 'dst': p[IP].dst}, 76 | 'HTTP': {'Method': p[HTTPRequest].Method, 'URI': p[HTTPRequest].Path, 77 | 'Referer': p[HTTPRequest].Referer, 'Host': p[HTTPRequest].Host}}) 78 | # Check if record already exists 79 | s = x.HTTP.find({'Time Stamp': timestamp}).count() 80 | if s > 0: 81 | pass 82 | else: 83 | c.insert(http) 84 | if r not in http_requests: 85 | http_requests.append(r) 86 | else: 87 | pass 88 | 89 | for i in http_requests: 90 | h = Website(i) 91 | response += h 92 | return response -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap-openwireshark.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | from common.entities import pcapFile 4 | from canari.maltego.message import UIMessage 5 | from canari.framework import configure 6 | 7 | __author__ = 'catalyst256' 8 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 9 | __credits__ = [] 10 | 11 | __license__ = 'GPL' 12 | __version__ = '0.1' 13 | __maintainer__ = 'catalyst256' 14 | __email__ = 'catalyst256@gmail.com' 15 | __status__ = 'Development' 16 | 17 | __all__ = [ 18 | 'dotransform' 19 | ] 20 | 21 | @configure( 22 | label='Open Wireshark', 23 | description='Open pcap file with Wireshark', 24 | uuids=['sniffmypacketsv2.v2.open_pcap_wshark'], 25 | inputs=[('[SmP] - PCAP', pcapFile)], 26 | debug=False 27 | ) 28 | def dotransform(request, response): 29 | pcap = request.value 30 | try: 31 | cmd = 'wireshark ' + pcap 32 | os.system(cmd) 33 | except Exception as e: 34 | return response + UIMessage(str(e)) 35 | return response + UIMessage('Wireshark has closed!') 36 | 37 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap-packets.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import json 4 | from common.hashmethods import * 5 | from common.dbconnect import mongo_connect 6 | from common.entities import pcapFile, pcapStream 7 | from canari.maltego.message import UIMessage 8 | from common.loadpackets import loadpackets 9 | from common.packetParser import * 10 | from canari.framework import configure 11 | from canari.config import config 12 | from common.auxtools import error_logging 13 | import datetime 14 | 15 | __author__ = 'catalyst256' 16 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 17 | __credits__ = [] 18 | 19 | __license__ = 'GPL' 20 | __version__ = '0.1' 21 | __maintainer__ = 'catalyst256' 22 | __email__ = 'catalyst256@gmail.com' 23 | __status__ = 'Development' 24 | 25 | __all__ = [ 26 | 'dotransform' 27 | ] 28 | 29 | 30 | @configure( 31 | label='Load Packets from PCAP', 32 | description='Load packets from PCAP', 33 | uuids=['sniffMyPacketsv2.v2.pcap_2_packets'], 34 | inputs=[('[SmP] - PCAP', pcapFile)], 35 | debug=True 36 | ) 37 | def dotransform(request, response): 38 | 39 | pcap = request.value 40 | usedb = config['working/usedb'] 41 | # Check to see if we are using the database or not 42 | if usedb == 0: 43 | return response + UIMessage('You have chosen not to use a database') 44 | else: 45 | pass 46 | 47 | d = mongo_connect() 48 | c = d['PACKETS'] 49 | y = d['PACKETSUMMARY'] 50 | url = config['web/server'].strip('\'') 51 | port = config['web/port'].strip('\'') 52 | 53 | # Hash the pcap file 54 | try: 55 | md5pcap = md5_for_file(pcap) 56 | except Exception as e: 57 | return response + UIMessage(str(e)) 58 | 59 | def convert_encoding(data, encoding='utf-8'): 60 | if isinstance(data, dict): 61 | return dict((convert_encoding(key), convert_encoding(value)) \ 62 | for key, value in data.iteritems()) 63 | elif isinstance(data, list): 64 | return [convert_encoding(element) for element in data] 65 | elif isinstance(data, unicode): 66 | return data.encode(encoding, errors='replace') 67 | else: 68 | return data 69 | 70 | # Get the PCAP ID for the pcap file 71 | try: 72 | s = d.INDEX.find({"MD5 Hash": md5pcap}).count() 73 | if s == 0: 74 | t = d.STREAMS.find({"MD5 Hash": md5pcap}).count() 75 | if t > 0: 76 | r = d.STREAMS.find({"MD5 Hash": md5pcap}, {"PCAP ID": 1, "Stream ID": 1, "_id": 0}) 77 | for i in r: 78 | pcap_id = i['PCAP ID'] 79 | streamid = i['Stream ID'] 80 | else: 81 | return response + UIMessage('No PCAP ID, you need to index the pcap file') 82 | if s > 0: 83 | r = d.INDEX.find({"MD5 Hash": md5pcap}, {"PCAP ID": 1, "_id": 0}) 84 | for i in r: 85 | pcap_id = i['PCAP ID'] 86 | streamid = i['PCAP ID'] 87 | except Exception as e: 88 | return response + UIMessage(str(e)) 89 | 90 | 91 | stream_url = 'http://%s:%s/pcap/%s/packets' % (url, port, streamid) 92 | pkts = loadpackets(pcap) 93 | 94 | # Dump the full packets into the database for later use. 95 | x = find_layers(pkts, pcap, pcap_id, streamid) 96 | try: 97 | for s in x: 98 | tstamp = s['Buffer']['timestamp'] 99 | q = d.PACKETS.find({"Buffer.timestamp": tstamp}).count() 100 | if q > 0: 101 | pass 102 | else: 103 | v = OrderedDict(json.loads(json.dumps(convert_encoding(s), encoding='latin-1', ensure_ascii=False))) 104 | c.insert(v) 105 | except Exception as e: 106 | error_logging(str(e), 'Packets') 107 | 108 | # Build the packet summary so we can make pretty pages. 109 | count = 1 110 | packet = OrderedDict() 111 | try: 112 | for p in pkts: 113 | tstamp = datetime.datetime.fromtimestamp(p.time).strftime('%Y-%m-%d %H:%M:%S.%f') 114 | p_header = {"PCAP ID": pcap_id, "Buffer": {"timestamp": tstamp, "packetnumber": count, "pcapfile": pcap, 115 | "packet_length": p.len, "StreamID": streamid}} 116 | packet.update(p_header) 117 | if p.haslayer(IP): 118 | p_ip = {"IP": {"ip_src": p[IP].src, "ip_dst": p[IP].dst, "ip_ttl": p[IP].ttl}} 119 | packet.update(p_ip) 120 | layers = [] 121 | counter = 0 122 | while True: 123 | layer = p.getlayer(counter) 124 | if layer != None: 125 | if layer.name == 'HTTP': 126 | pass 127 | else: 128 | layers.append(layer.name) 129 | else: 130 | break 131 | counter += 1 132 | p_layers = {"Layers": layers} 133 | packet.update(p_layers) 134 | view_url = 'http://%s:%s/pcap/%s/%s/packets/%s' % (url, port, pcap_id, streamid, count) 135 | p_view = {"View": view_url} 136 | packet.update(p_view) 137 | t = d.PACKETSUMMARY.find({"Buffer.timestamp": tstamp}).count() 138 | if t > 0: 139 | pass 140 | else: 141 | y.insert(packet) 142 | count += 1 143 | packet.clear() 144 | except Exception as e: 145 | error_logging(str(e), 'PacketSummary') 146 | 147 | # Return the Maltego Entity 148 | a = pcapStream(stream_url) 149 | response += a 150 | return response 151 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap-ssltraffic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import binascii 4 | import datetime 5 | from common.hashmethods import * 6 | from common.dbconnect import mongo_connect, find_session 7 | import logging 8 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 9 | from scapy.all import * 10 | from common.entities import pcapFile 11 | from canari.maltego.message import UIMessage 12 | from canari.maltego.entities import Website 13 | from canari.framework import configure 14 | from canari.config import config 15 | 16 | __author__ = 'catalyst256' 17 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 18 | __credits__ = [] 19 | 20 | __license__ = 'GPL' 21 | __version__ = '0.1' 22 | __maintainer__ = 'catalyst256' 23 | __email__ = 'catalyst256@gmail.com' 24 | __status__ = 'Development' 25 | 26 | __all__ = [ 27 | 'dotransform' 28 | ] 29 | 30 | @configure( 31 | label='Find SSL Traffic', 32 | description='Looks for SSL connections within a pcap', 33 | uuids=['sniffMyPacketsv2.v2.pcap_2_ssltraffic'], 34 | inputs=[('[SmP] - SSL', pcapFile)], 35 | debug=True 36 | ) 37 | def dotransform(request, response): 38 | pcap = request.value 39 | usedb = config['working/usedb'] 40 | # Check to see if we are using the database or not 41 | if usedb > 0: 42 | # Connect to the database so we can insert the record created below 43 | d = mongo_connect() 44 | c = d['SSL'] 45 | 46 | # Hash the pcap file 47 | try: 48 | md5hash = md5_for_file(pcap) 49 | except Exception as e: 50 | return response + UIMessage(str(e)) 51 | d = find_session(md5hash) 52 | pcap_id = d[0] 53 | else: 54 | pass 55 | 56 | # Load the packets 57 | pkts = rdpcap(pcap) 58 | # Look for SSL packets and pull out the required information. 59 | servers = [] 60 | try: 61 | for p in pkts: 62 | if p.haslayer(IP) and p.haslayer(TCP) and p.haslayer(Raw): 63 | x = p[Raw].load 64 | x = hexstr(x) 65 | x = x.split(' ') 66 | if x[0] == '16': 67 | timestamp = datetime.datetime.fromtimestamp(p.time).strftime('%Y-%m-%d %H:%M:%S.%f') 68 | stype = 'Handshake' 69 | if x[5] == '01': 70 | htype = 'Client Hello' 71 | slen = int(''.join(x[131:133]), 16) 72 | s = 133 + slen 73 | sname = binascii.unhexlify(''.join(x[133:s])) 74 | if sname not in servers: 75 | servers.append(sname) 76 | if usedb > 0: 77 | data = {'PCAP ID': pcap_id, 'SSL Type': stype, 'Handshake Type': htype, 78 | 'Time Stamp': timestamp, 79 | 'Source IP': p[IP].src, 'Source Port': p[TCP].sport, 'Destination IP': p[IP].dst, 80 | 'Destination Port': p[TCP].dport, 'Server Name': sname} 81 | t = d.SSL.find({'Time Stamp': timestamp}).count() 82 | if t > 0: 83 | pass 84 | else: 85 | c.insert(data) 86 | else: 87 | pass 88 | 89 | if x[5] == '02': 90 | htype = 'Server Hello' 91 | ctype = ''.join(x[76:78]) 92 | if usedb > 0: 93 | data = {'PCAP ID': pcap_id, 'SSL Type': stype, 'Handshake Type': htype, 94 | 'Time Stamp': timestamp, 95 | 'Source IP': p[IP].src, 'Source Port': p[TCP].sport, 'Destination IP': p[IP].dst, 96 | 'Destination Port': p[TCP].dport, 'Cipher Suite': ctype} 97 | t = d.SSL.find({'Time Stamp': timestamp}).count() 98 | if t > 0: 99 | pass 100 | else: 101 | c.insert(data) 102 | else: 103 | pass 104 | else: 105 | pass 106 | else: 107 | pass 108 | except Exception as e: 109 | return response + UIMessage(str(e)) 110 | 111 | # Return Maltego entities based on the SSL server name 112 | for s in servers: 113 | e = Website(s) 114 | response += e 115 | return response 116 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap-streams.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import uuid 4 | from common.pcapstreams import create_streams 5 | from collections import OrderedDict 6 | from common.pcaptools import * 7 | from common.hashmethods import * 8 | from common.dbconnect import mongo_connect, find_session 9 | from common.entities import pcapFile 10 | from canari.maltego.message import UIMessage 11 | from canari.framework import configure 12 | from canari.config import config 13 | 14 | __author__ = 'catalyst256' 15 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 16 | __credits__ = [] 17 | 18 | __license__ = 'GPL' 19 | __version__ = '0.1' 20 | __maintainer__ = 'catalyst256' 21 | __email__ = 'catalyst256@gmail.com' 22 | __status__ = 'Development' 23 | 24 | __all__ = [ 25 | 'dotransform' 26 | ] 27 | 28 | 29 | @configure( 30 | label='Get TCP/UDP Streams', 31 | description='Extract TCP/UDP streams from pcap file', 32 | uuids=['sniffMyPackets.v2.pcap_2_streams'], 33 | inputs=[('[SmP] - Streams', pcapFile)], 34 | debug=True 35 | ) 36 | def dotransform(request, response): 37 | pcap = request.value 38 | folder = '' 39 | usedb = config['working/usedb'] 40 | # Check to see if we are using the database or not 41 | if usedb > 0: 42 | # Connect to the database so we can insert the record created below 43 | x = mongo_connect() 44 | c = x['STREAMS'] 45 | # Hash the pcap file 46 | try: 47 | md5hash = md5_for_file(pcap) 48 | d = find_session(md5hash) 49 | pcap_id = d[0] 50 | folder = d[2] 51 | except Exception as e: 52 | return response + UIMessage(str(e)) 53 | else: 54 | w = config['working/directory'].strip('\'') 55 | try: 56 | if w != '': 57 | w = w + '/' + str(uuid.uuid4())[:12].replace('-', '') 58 | if not os.path.exists(w): 59 | os.makedirs(w) 60 | folder = w 61 | else: 62 | return response + UIMessage('No working directory set, check your config file') 63 | except Exception as e: 64 | return response + UIMessage(e) 65 | 66 | # Create TCP/UDP stream files 67 | s = create_streams(pcap, folder) 68 | if usedb > 0: 69 | for i in s: 70 | # Create StreamID 71 | streamid = str(uuid.uuid4())[:8] 72 | # Get a count of packets available 73 | try: 74 | pkcount = packet_count(i) 75 | except Exception as e: 76 | return response + UIMessage(str(e)) 77 | # Get the start/end time of packets 78 | try: 79 | pcap_time = get_time(i) 80 | except Exception as e: 81 | return response + UIMessage(str(e)) 82 | # Hash the pcap file 83 | try: 84 | md5hash = md5_for_file(i) 85 | sha1hash = sha1_for_file(i) 86 | except Exception as e: 87 | return response + UIMessage(str(e)) 88 | 89 | # Pull out the details of the packets 90 | l = len(folder) + 1 91 | raw = i[l:-5] 92 | pkt = raw.replace('-', ' ').replace(':', ' ').split() 93 | 94 | # Create the dictonary object to insert into database 95 | data = OrderedDict({'PCAP ID': pcap_id, 'Stream ID': streamid, 'Folder': folder, 'Packet Count': pkcount, 96 | 'File Name': i, 'First Packet': pcap_time[0], 'Last Packet': pcap_time[1], 97 | 'MD5 Hash': md5hash, 'SHA1 Hash': sha1hash, 98 | 'Packet': {'Protocol': pkt[0], 'Source IP': pkt[1], 'Source Port': pkt[2], 99 | 'Destination IP': pkt[3], 'Destination Port': pkt[4]}}) 100 | 101 | # Check to see if the record exists 102 | try: 103 | t = x.STREAMS.find({"File Name": i}).count() 104 | if t > 0: 105 | pass 106 | else: 107 | c.insert(data) 108 | except Exception as e: 109 | return response + UIMessage(str(e)) 110 | else: 111 | pass 112 | # Create Maltego entities for each pcap file 113 | for p in s: 114 | e = pcapFile(p) 115 | response += e 116 | return response 117 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/pcap2stream.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.hashmethods import * 4 | from common.dbconnect import mongo_connect, find_session 5 | from common.entities import pcapFile, pcapStream 6 | from canari.maltego.message import UIMessage 7 | from canari.framework import configure 8 | from canari.config import config 9 | 10 | __author__ = 'catalyst256' 11 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 12 | __credits__ = [] 13 | 14 | __license__ = 'GPL' 15 | __version__ = '0.1' 16 | __maintainer__ = 'catalyst256' 17 | __email__ = 'catalyst256@gmail.com' 18 | __status__ = 'Development' 19 | 20 | __all__ = [ 21 | 'dotransform' 22 | ] 23 | 24 | 25 | @configure( 26 | label='Extract Stream Info', 27 | description='Extract Stream information', 28 | uuids=['sniffMyPacketsv2.v2.stream2info'], 29 | inputs=[('[SmP] - Streams', pcapFile)], 30 | debug=True 31 | ) 32 | def dotransform(request, response): 33 | pcap = request.value 34 | usedb = config['working/usedb'] 35 | if usedb > 0: 36 | # Connect to the database so we can insert the record created below 37 | x = mongo_connect() 38 | c = x['STREAMS'] 39 | 40 | # Hash the pcap file 41 | try: 42 | md5hash = md5_for_file(pcap) 43 | except Exception as e: 44 | return response + UIMessage(str(e)) 45 | d = find_session(md5hash) 46 | folder = d[2] 47 | else: 48 | folder = config['working/directory'] 49 | 50 | l = len(folder) + 11 51 | raw = pcap[l:-5] 52 | raw = raw.split('-') 53 | banner = 'Protocol:%s\nSource:%s\nDestination:%s' % (raw[0], raw[1], raw[2]) 54 | e = pcapStream(banner) 55 | response += e 56 | return response 57 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/session_2_dns.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect 4 | from common.entities import pcapFile 5 | from canari.framework import configure 6 | from canari.maltego.entities import Website 7 | from canari.maltego.message import UIMessage 8 | from canari.config import config 9 | 10 | __author__ = 'catalyst256' 11 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 12 | __credits__ = [] 13 | 14 | __license__ = 'GPL' 15 | __version__ = '0.1' 16 | __maintainer__ = 'catalyst256' 17 | __email__ = 'catalyst256@gmail.com' 18 | __status__ = 'Development' 19 | 20 | __all__ = [ 21 | 'dotransform' 22 | ] 23 | 24 | 25 | @configure( 26 | label='Return DNS Requests', 27 | description='Return DNS Requests from Session ID', 28 | uuids=['sniffMyPacketsv2.v2.session_2_dns'], 29 | inputs=[('[SmP] - Sessions', pcapFile)], 30 | debug=True 31 | ) 32 | def dotransform(request, response): 33 | 34 | pcap = request.value 35 | usedb = config['working/usedb'] 36 | # Check to see if we are using the database or not 37 | if usedb == 0: 38 | return response + UIMessage('No database support configured, check your config file') 39 | else: 40 | pass 41 | 42 | x = mongo_connect() 43 | try: 44 | r = x.STREAMS.find({"File Name": pcap}).count() 45 | if r > 0: 46 | p = x.STREAMS.find({"File Name": pcap}, {"Stream ID": 1, "_id": 0}) 47 | for i in p: 48 | sessionid = i['Stream ID'] 49 | else: 50 | return response + UIMessage('This needs to be run from a TCP/UDP stream') 51 | except Exception as e: 52 | return response + UIMessage(str(e)) 53 | 54 | try: 55 | t = x.DNS.find({"Stream ID": sessionid}).count() 56 | if t > 0: 57 | p = x.DNS.find({"Stream ID": sessionid}, {"Request Details.Query Name": 1, "_id": 0}) 58 | for i in p: 59 | e = Website(i['Request Details']['Query Name']) 60 | response += e 61 | return response 62 | else: 63 | return response + UIMessage('No DNS records found') 64 | except Exception as e: 65 | return response + UIMessage(str(e)) -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/session_2_ipaddr.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect 4 | from common.entities import pcapFile 5 | from canari.framework import configure 6 | from canari.maltego.entities import IPv4Address 7 | from canari.maltego.message import UIMessage 8 | from canari.config import config 9 | 10 | __author__ = 'catalyst256' 11 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 12 | __credits__ = [] 13 | 14 | __license__ = 'GPL' 15 | __version__ = '0.1' 16 | __maintainer__ = 'catalyst256' 17 | __email__ = 'catalyst256@gmail.com' 18 | __status__ = 'Development' 19 | 20 | __all__ = [ 21 | 'dotransform' 22 | ] 23 | 24 | 25 | @configure( 26 | label='Return IPv4 Address(s)', 27 | description='Return IPv4 Addresses from Session ID', 28 | uuids=['sniffMyPacketsv2.v2.session_2_ipaddr'], 29 | inputs=[('[SmP] - Sessions', pcapFile)], 30 | debug=True 31 | ) 32 | def dotransform(request, response): 33 | 34 | pcap = request.value 35 | usedb = config['working/usedb'] 36 | # Check to see if we are using the database or not 37 | if usedb == 0: 38 | return response + UIMessage('No database support configured, check your config file') 39 | else: 40 | pass 41 | x = mongo_connect() 42 | ipaddr = [] 43 | try: 44 | r = x.STREAMS.find({"File Name": pcap}).count() 45 | if r > 0: 46 | p = x.STREAMS.find({"File Name": pcap}, {"Packet.Source IP": 1, "Packet.Destination IP": 1, "_id": 0}) 47 | for i in p: 48 | sip = i['Packet']['Source IP'] 49 | dip = i['Packet']['Destination IP'] 50 | ipaddr.append(sip) 51 | ipaddr.append(dip) 52 | else: 53 | return response + UIMessage('This needs to be run from a TCP/UDP stream') 54 | except Exception as e: 55 | return response + UIMessage(str(e)) 56 | 57 | for t in ipaddr: 58 | e = IPv4Address(t) 59 | response += e 60 | return response -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/session_2_pcap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect 4 | from common.entities import SessionID, pcapFile 5 | from canari.framework import configure 6 | from canari.maltego.message import UIMessage 7 | from canari.config import config 8 | 9 | __author__ = 'catalyst256' 10 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 11 | __credits__ = [] 12 | 13 | __license__ = 'GPL' 14 | __version__ = '0.1' 15 | __maintainer__ = 'catalyst256' 16 | __email__ = 'catalyst256@gmail.com' 17 | __status__ = 'Development' 18 | 19 | __all__ = [ 20 | 'dotransform' 21 | ] 22 | 23 | 24 | @configure( 25 | label='Return PCAP File', 26 | description='Return PCAP file from Session ID', 27 | uuids=['sniffMyPacketsv2.v2.session_2_pcap'], 28 | inputs=[('[SmP] - Sessions', SessionID)], 29 | debug=True 30 | ) 31 | def dotransform(request, response): 32 | sessionid = request.value 33 | usedb = config['working/usedb'] 34 | # Check to see if we are using the database or not 35 | if usedb == 0: 36 | return response + UIMessage('No database support configured, check your config file') 37 | else: 38 | pass 39 | x = mongo_connect() 40 | try: 41 | r = x.INDEX.find({"PCAP ID": sessionid}).count() 42 | if r > 0: 43 | p = x.INDEX.find({"PCAP ID": sessionid}, {"_id": 0}) 44 | for i in p: 45 | pcap = i['PCAP Path'] 46 | s = pcapFile(pcap) 47 | response += s 48 | return response 49 | else: 50 | return response + UIMessage('PCAP not found, is the SessionID correct??') 51 | except Exception as e: 52 | return response + UIMessage(str(e)) -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/session_2_streams.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect 4 | from common.entities import pcapFile 5 | from canari.framework import configure 6 | from canari.maltego.message import UIMessage 7 | from canari.config import config 8 | 9 | __author__ = 'catalyst256' 10 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 11 | __credits__ = [] 12 | 13 | __license__ = 'GPL' 14 | __version__ = '0.1' 15 | __maintainer__ = 'catalyst256' 16 | __email__ = 'catalyst256@gmail.com' 17 | __status__ = 'Development' 18 | 19 | __all__ = [ 20 | 'dotransform' 21 | ] 22 | 23 | 24 | @configure( 25 | label='Return TCP/UDP Streams', 26 | description='Return TCP/UDP streams from Session ID', 27 | uuids=['sniffMyPacketsv2.v2.session_2_streams'], 28 | inputs=[('[SmP] - Sessions', pcapFile)], 29 | debug=True 30 | ) 31 | def dotransform(request, response): 32 | 33 | pcap = request.value 34 | usedb = config['working/usedb'] 35 | # Check to see if we are using the database or not 36 | if usedb == 0: 37 | return response + UIMessage('No database support configured, check your config file') 38 | else: 39 | pass 40 | x = mongo_connect() 41 | 42 | try: 43 | r = x.INDEX.find({"PCAP Path": pcap}).count() 44 | if r > 0: 45 | p = x.INDEX.find({"PCAP Path": pcap}, {"PCAP ID": 1, "_id": 0}) 46 | for i in p: 47 | sessionid = i['PCAP ID'] 48 | else: 49 | return response + UIMessage('PCAP not found, is the SessionID correct??') 50 | except Exception as e: 51 | return response + UIMessage(str(e)) 52 | 53 | try: 54 | s = x.STREAMS.find({"PCAP ID": sessionid}).count() 55 | if s > 0: 56 | p = x.STREAMS.find({"PCAP ID": sessionid}, {"File Name": 1, "_id": 0}) 57 | for i in p: 58 | fname = i['File Name'] 59 | q = pcapFile(fname) 60 | response += q 61 | return response 62 | else: 63 | return response + UIMessage('No streams found for that Session ID') 64 | except Exception as e: 65 | return response + UIMessage(str(e)) 66 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/streams2ipaddr.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from common.dbconnect import mongo_connect 4 | from common.entities import pcapFile 5 | from canari.maltego.entities import IPv4Address 6 | from canari.maltego.message import UIMessage 7 | from canari.framework import configure 8 | from canari.config import config 9 | 10 | __author__ = 'catalyst256' 11 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 12 | __credits__ = [] 13 | 14 | __license__ = 'GPL' 15 | __version__ = '0.1' 16 | __maintainer__ = 'catalyst256' 17 | __email__ = 'catalyst256@gmail.com' 18 | __status__ = 'Development' 19 | 20 | __all__ = [ 21 | 'dotransform' 22 | ] 23 | 24 | 25 | @configure( 26 | label='Extract IP Addresses', 27 | description='Extrac IP addresses from a pcap stream file', 28 | uuids=['sniffMyPacketsv2.v2.streams_2_ipaddress'], 29 | inputs=[('[SmP] - IP', pcapFile)], 30 | debug=True 31 | ) 32 | def dotransform(request, response): 33 | 34 | filename = request.value 35 | usedb = config['working/usedb'] 36 | # Check to see if we are using the database or not 37 | if usedb == 0: 38 | return response + UIMessage('No database support configured, check your config file') 39 | else: 40 | pass 41 | # Connect to the database so we can search for IP addresses. 42 | x = mongo_connect() 43 | c = x['STREAMS'] 44 | 45 | try: 46 | hosts = [] 47 | r = x.STREAMS.find({'File Name': {'$regex': filename}}) 48 | if r > 0: 49 | for x in r: 50 | hosts.append(x['Packet']['Source IP']) 51 | hosts.append(x['Packet']['Destination IP']) 52 | # streamid = x['Stream ID'] 53 | else: 54 | return response + UIMessage('No records found, please make sure the pcap stream file is indexed') 55 | 56 | for h in hosts: 57 | e = IPv4Address(h) 58 | # e += Field('streamid', streamid, displayname='Stream ID', MatchingRule='Loose') 59 | response += e 60 | return response 61 | except Exception as e: 62 | return response + UIMessage(str(e)) 63 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/uploadfile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import requests 4 | import time 5 | from canari.config import config 6 | from common.entities import Artifact 7 | from canari.framework import configure 8 | from canari.maltego.message import UIMessage 9 | from common.dbconnect import mongo_connect 10 | from common.hashmethods import * 11 | 12 | __author__ = 'catalyst256' 13 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 14 | __credits__ = [] 15 | 16 | __license__ = 'GPL' 17 | __version__ = '0.1' 18 | __maintainer__ = 'catalyst256' 19 | __email__ = 'catalyst256@gmail.com' 20 | __status__ = 'Development' 21 | 22 | __all__ = [ 23 | 'dotransform' 24 | ] 25 | 26 | 27 | @configure( 28 | label='Upload file to Web Server', 29 | description='Uploads a file to the web server', 30 | uuids=['sniffMyPacketsv2.v2.artifact_2_web'], 31 | inputs=[('[SmP] - Artifacts', Artifact)], 32 | debug=True 33 | ) 34 | def dotransform(request, response): 35 | 36 | filename = request.value 37 | folder = request.fields['path'] 38 | usedb = config['working/usedb'] 39 | # Check to see if we are using the database or not 40 | if usedb == 0: 41 | return response + UIMessage('No database support configured, check your config file') 42 | else: 43 | pass 44 | 45 | # Build the web server variables 46 | url = config['web/server'].strip('\'') 47 | port = config['web/port'].strip('\'') 48 | upload_url = 'http://%s:%s/pcap/_uploads' % (url, port) 49 | 50 | # Connect to the database so we can insert the record created below 51 | x = mongo_connect() 52 | c = x['FILES'] 53 | 54 | now = time.strftime("%c") 55 | zipfile = '%s/%s' % (folder, filename) 56 | 57 | # Hash the pcap file 58 | try: 59 | md5hash = md5_for_file(zipfile) 60 | sha1hash = sha1_for_file(zipfile) 61 | except Exception as e: 62 | return response + UIMessage(str(e)) 63 | 64 | # Get the PCAP ID for the pcap file 65 | try: 66 | s = x.ARTIFACTS.find({"MD5 Hash": md5hash}).count() 67 | if s > 0: 68 | r = x.ARTIFACTS.find({"MD5 Hash": md5hash}, {"File Type": 1, "PCAP ID": 1, "_id": 0}) 69 | for i in r: 70 | pcap_id = i['PCAP ID'] 71 | ftype = i['File Type'] 72 | else: 73 | return response + UIMessage('No PCAP ID, you need to index the pcap file') 74 | except Exception as e: 75 | return response + UIMessage(str(e)) 76 | 77 | download_url = 'http://%s:%s/pcap/downloads/%s' % (url, port, filename) 78 | 79 | # Check to see if the file is already uploaded 80 | 81 | s = c.find({'File Name': filename}).count() 82 | if s > 0: 83 | return response + UIMessage('File already uploaded!!') 84 | else: 85 | data = {'Upload Time': now, 'File Name': filename, 'Folder': folder, 'MD5 Hash': md5hash, 'SHA1 Hash': sha1hash, 86 | 'Download': download_url, 'PCAP ID': pcap_id, 'File Type': ftype} 87 | 88 | try: 89 | # Create the POST request to upload the file 90 | files = {'files': open(zipfile, 'rb')} 91 | r = requests.post(upload_url, files=files) 92 | if r.status_code == 200: 93 | c.insert(data) 94 | return response + UIMessage('File Uploaded!!') 95 | else: 96 | return response + UIMessage(str(r.status_code)) 97 | except Exception as e: 98 | return response + UIMessage(str(e)) 99 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/uploadpcap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import requests 4 | import time 5 | from canari.config import config 6 | from common.entities import pcapFile 7 | from canari.framework import configure 8 | from canari.maltego.message import UIMessage 9 | from common.dbconnect import mongo_connect 10 | from common.hashmethods import * 11 | 12 | __author__ = 'catalyst256' 13 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 14 | __credits__ = [] 15 | 16 | __license__ = 'GPL' 17 | __version__ = '0.1' 18 | __maintainer__ = 'catalyst256' 19 | __email__ = 'catalyst256@gmail.com' 20 | __status__ = 'Development' 21 | 22 | __all__ = [ 23 | 'dotransform' 24 | ] 25 | 26 | 27 | @configure( 28 | label='Upload PCAP to Web Server', 29 | description='Uploads the pcap file to the web server', 30 | uuids=['sniffMyPacketsv2.v2.pcapfile_2_web'], 31 | inputs=[('[SmP] - PCAP', pcapFile)], 32 | debug=True 33 | ) 34 | def dotransform(request, response): 35 | usedb = config['working/usedb'] 36 | # Check to see if we are using the database or not 37 | if usedb == 0: 38 | return response + UIMessage('No database support configured, check your config file') 39 | else: 40 | pass 41 | zipfile = request.value 42 | # Build the web server variables 43 | url = config['web/server'].strip('\'') 44 | port = config['web/port'].strip('\'') 45 | upload_url = 'http://%s:%s/pcap/_uploads' % (url, port) 46 | 47 | # Connect to the database so we can insert the record created below 48 | x = mongo_connect() 49 | c = x['FILES'] 50 | 51 | now = time.strftime("%c") 52 | 53 | # Hash the pcap file 54 | try: 55 | md5hash = md5_for_file(zipfile) 56 | sha1hash = sha1_for_file(zipfile) 57 | except Exception as e: 58 | return response + UIMessage(str(e)) 59 | 60 | # Get the PCAP ID for the pcap file 61 | try: 62 | s = x.INDEX.find({"MD5 Hash": md5hash}).count() 63 | if s == 0: 64 | t = x.STREAMS.find({"MD5 Hash": md5hash}).count() 65 | if t > 0: 66 | r = x.STREAMS.find({"MD5 Hash": md5hash}, {"Folder": 1, "PCAP ID": 1, "_id": 0}) 67 | for i in r: 68 | folder = i['Folder'] 69 | pcap_id = i['PCAP ID'] 70 | else: 71 | return response + UIMessage('No PCAP ID, you need to index the pcap file') 72 | if s > 0: 73 | r = x.INDEX.find({"MD5 Hash": md5hash}, {"Working Directory": 1, "PCAP ID": 1, "_id": 0}) 74 | for i in r: 75 | folder = i['Working Directory'] 76 | pcap_id = i['PCAP ID'] 77 | except Exception as e: 78 | return response + UIMessage(str(e)) 79 | 80 | f = zipfile.split('/') 81 | filename = f[len(f) - 1] 82 | filename = filename.replace(':', '') 83 | download_url = 'http://%s:%s/pcap/downloads/%s' % (url, port, filename) 84 | 85 | # Check to see if the file is already uploaded 86 | 87 | s = c.find({'File Name': filename}).count() 88 | if s > 0: 89 | return response + UIMessage('File already uploaded!!') 90 | else: 91 | data = {'Upload Time': now, 'File Name': filename, 'Folder': folder, 'MD5 Hash': md5hash, 'SHA1 Hash': sha1hash, 92 | 'Download': download_url, 'PCAP ID': pcap_id} 93 | 94 | 95 | try: 96 | # Create the POST request to upload the file 97 | files = {'files': open(zipfile, 'rb')} 98 | r = requests.post(upload_url, files=files) 99 | if r.status_code == 200: 100 | c.insert(data) 101 | return response + UIMessage('File Uploaded!!') 102 | else: 103 | return response + UIMessage('Whoops file upload didn\'t work.') 104 | except Exception as e: 105 | return response + UIMessage(str(e)) 106 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/uploadzip.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import requests 4 | import time 5 | from canari.config import config 6 | from common.entities import ZipFile 7 | from canari.framework import configure 8 | from canari.maltego.message import UIMessage 9 | from common.dbconnect import mongo_connect 10 | from common.hashmethods import * 11 | 12 | __author__ = 'catalyst256' 13 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 14 | __credits__ = [] 15 | 16 | __license__ = 'GPL' 17 | __version__ = '0.1' 18 | __maintainer__ = 'catalyst256' 19 | __email__ = 'catalyst256@gmail.com' 20 | __status__ = 'Development' 21 | 22 | __all__ = [ 23 | 'dotransform' 24 | ] 25 | 26 | 27 | @configure( 28 | label='Upload ZipFile to Web Server', 29 | description='Uploads the zip file to the web server', 30 | uuids=['sniffMyPacketsv2.v2.zipfile_2_web'], 31 | inputs=[('[SmP] - Sessions', ZipFile)], 32 | debug=True 33 | ) 34 | def dotransform(request, response): 35 | usedb = config['working/usedb'] 36 | # Check to see if we are using the database or not 37 | if usedb == 0: 38 | return response + UIMessage('No database support configured, check your config file') 39 | else: 40 | pass 41 | zipfile = request.value 42 | folder = request.fields['folder'] 43 | pcap_id = request.fields['sessionid'] 44 | # Build the web server variables 45 | url = config['web/server'].strip('\'') 46 | port = config['web/port'].strip('\'') 47 | upload_url = 'http://%s:%s/pcap/_uploads' % (url, port) 48 | 49 | # Connect to the database so we can insert the record created below 50 | x = mongo_connect() 51 | c = x['FILES'] 52 | 53 | now = time.strftime("%c") 54 | 55 | # Hash the pcap file 56 | try: 57 | md5hash = md5_for_file(zipfile) 58 | sha1hash = sha1_for_file(zipfile) 59 | except Exception as e: 60 | return response + UIMessage(str(e)) 61 | 62 | f = len(folder) + 1 63 | filename = zipfile[f:] 64 | download_url = 'http://%s:%s/pcap/downloads/%s' % (url, port, filename) 65 | 66 | # Check to see if the file is already uploaded 67 | 68 | s = c.find({'File Name': filename}).count() 69 | if s > 0: 70 | return response + UIMessage('File already uploaded!!') 71 | else: 72 | data = {'Upload Time': now, 'File Name': filename, 'Folder': folder, 'MD5 Hash': md5hash, 'SHA1 Hash': sha1hash, 73 | 'Download': download_url, 'PCAP ID': pcap_id} 74 | 75 | 76 | try: 77 | # Create the POST request to upload the file 78 | files = {'files': open(zipfile, 'rb')} 79 | r = requests.post(upload_url, files=files) 80 | if r.status_code == 200: 81 | c.insert(data) 82 | return response + UIMessage('File Uploaded!!') 83 | else: 84 | return response + UIMessage('Whoops file upload didn\'t work.') 85 | except Exception as e: 86 | return response + UIMessage(str(e)) 87 | -------------------------------------------------------------------------------- /src/sniffmypacketsv2/transforms/zipfolder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import zipfile 5 | from common.entities import Folder, ZipFile 6 | from canari.maltego.message import UIMessage, Field 7 | from canari.framework import configure 8 | from canari.config import config 9 | 10 | __author__ = 'catalyst256' 11 | __copyright__ = 'Copyright 2014, sniffmypacketsv2 Project' 12 | __credits__ = [] 13 | 14 | __license__ = 'GPL' 15 | __version__ = '0.1' 16 | __maintainer__ = 'catalyst256' 17 | __email__ = 'catalyst256@gmail.com' 18 | __status__ = 'Development' 19 | 20 | __all__ = [ 21 | 'dotransform' 22 | ] 23 | 24 | 25 | @configure( 26 | label='Zip Folder', 27 | description='Zip the working directory folder', 28 | uuids=['sniffMyPacketsv2.v2.folder_2_zip'], 29 | inputs=[('[SmP] - Sessions', Folder)], 30 | debug=True 31 | ) 32 | def dotransform(request, response): 33 | 34 | folder = request.value 35 | usedb = config['working/usedb'] 36 | # Check to see if we are using the database or not 37 | if usedb == 0: 38 | return response + UIMessage('No database support configured, check your config file') 39 | else: 40 | pass 41 | pcap_id = request.fields['sessionid'] 42 | save_file = '%s/%s.zip' % (folder, pcap_id) 43 | 44 | try: 45 | # Zip the files in the specified folder 46 | def zipdir(path, zip): 47 | for root, dirs, files in os.walk(path): 48 | for file in files: 49 | zip.write(os.path.join(root, file)) 50 | 51 | myzip = zipfile.ZipFile(save_file, 'w') 52 | zipdir(folder, myzip) 53 | myzip.close() 54 | 55 | except Exception as e: 56 | return response + UIMessage(str(e)) 57 | 58 | e = ZipFile(save_file) 59 | e += Field('folder', folder, displayname='Folder') 60 | e += Field('sessionid', pcap_id, displayname='Session ID') 61 | response += e 62 | return response 63 | --------------------------------------------------------------------------------