├── README.md ├── cscanV1.py ├── cscanV2.py ├── media ├── 16053498949374.jpg ├── 16053499858114.jpg └── 16053501531691.jpg └── requirements.txt /README.md: -------------------------------------------------------------------------------- 1 | # 🍺 Cscan ![Stage](https://img.shields.io/badge/Release-STABLE-brightgreen.svg) [![Python 3.7](https://img.shields.io/badge/Python-3.7-yellow.svg)](http://www.python.org/download/) ![Build Status](https://img.shields.io/badge/Version-2.0-red.svg) 2 | 3 | 🔧 一款C段扫描工具,支持python3.7以上 4 | 5 | - ~~V1版本已废弃~~ 6 | - V2版本使用协程重构 7 | 8 | > 线程过大会导致丢包 9 | 10 | ## 1. Install 11 | ``` 12 | git clone https://github.com/j3ers3/Cscan 13 | pip3 install -r requirement.txt 14 | ``` 15 | 16 | ## 2. 配置端口(可选) 17 | - web端口: `Ports_web = [80, 8080]` 18 | - 非web端口:`Ports_other = [21, 22, 3306]` 19 | 20 | 21 | ## 3. Usage 22 | - 普通扫描 23 | 24 | ```python 25 | python3 cscanV2.py -i 192.168.0.1/24 26 | ``` 27 | ![-w818](media/16053501531691.jpg) 28 | 29 | - 只扫描web端口,并使用安静模式 30 | 31 | ```python 32 | python3 cscanV2.py -i 192.168.0.1/24 -t 100 -web -q 33 | ``` 34 | 35 | - 扫描web端口,并指定路径来发现敏感信息 36 | 37 | ```python 38 | python3 cscanV2.py -i 192.168.0.1/24 -web -path /phpinfo.php 39 | python3 cscanV2.py -f domain.txt -web -path /phpinfo.php 40 | ``` 41 | 42 | ![-w938](media/16053499858114.jpg) 43 | 44 | ![-w938](media/16053498949374.jpg) 45 | 46 | 47 | ## 4. ChangeLog 48 | #### 2.2.0 49 | - 添加`-key`匹配关键内容 50 | - 添加`-ip` 输出目标IP 51 | - 添加`-port` 指定自定义端口 52 | - 优化了文件模式输出 53 | 54 | #### 2.1.1 55 | - 修复了一些bug 56 | 57 | #### v2.1.0 58 | - 增加`-path`指定路径扫描 59 | - 增加`-web`只扫描web端口 60 | - 增加`-q`安静模式 61 | - 做了一些优化输出 62 | - 修复了http头信息错误 63 | -------------------------------------------------------------------------------- /cscanV1.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # encoding:utf8 3 | import sys 4 | import argparse 5 | import socket 6 | import ipaddr 7 | import requests 8 | from time import time 9 | from threading import Thread 10 | from bs4 import BeautifulSoup 11 | 12 | if sys.version_info.major == 2: 13 | from Queue import Queue 14 | else: 15 | from queue import Queue 16 | 17 | requests.packages.urllib3.disable_warnings() 18 | 19 | __author__ = "whois" 20 | __update__ = "2019/06/30" 21 | """ 22 | C段web扫描,选取几个web端口,获取标题,版本信息 23 | 24 | ToDo 25 | 通过bing接口反查域名 26 | 加入路径扫描 27 | 归纳ip和端口 28 | 多线程优化 29 | """ 30 | 31 | Ports_web = [80, 88, 7001, 8000, 8009, 8888, 8080, 8443] 32 | # Ports_web = [80, 81, 82, 88, 89, 443, 5000, 5001, 7001, 7070, 7777, 7788, 8000, 8001, 8002, 8008, 8080, 8081, 8088, 8089, 8090, 8443, 8888, 8899] 33 | 34 | # Ports_other = [] 35 | Ports_other = [21, 22, 445, 1433, 1434, 1521, 3306, 3389, 6379] 36 | 37 | Ports = Ports_other + Ports_web 38 | 39 | Threads = 45 40 | count = 0 41 | 42 | user_agent = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.73 Safari/537.36" 43 | 44 | purp = '\033[95m' 45 | blue = '\033[94m' 46 | red = '\033[31m' 47 | yellow = '\033[93m' 48 | end = '\033[0m' 49 | 50 | queue = Queue() 51 | 52 | 53 | def banner(): 54 | print(red + """ 55 | ..:::::::::.. 56 | ..:::aad88x8888baa:::.. 57 | .::::d:?88888xxx888?::8b::::. 58 | .:::d8888:?888xxxxx??a888888b:::. 59 | .:::d8888888a8888xxxaa8888888888b:::. 60 | ::::dP::::::::88888x88888::::::::Yb:::: 61 | ::::dP:::::::::Y888888888P:::::::::Yb:::: 62 | ::::d8::::x::::::Y8888888P:::::x:::::8b:::: 63 | .::::88::::::::::::Y88888P::::::::::::88::::. 64 | :::::Y8baaaaaaaaaa88P:T:Y88aaaaaaaaaad8P::::: 65 | :::::::Y88888888888P::|::Y88888888888P::::::: 66 | ::::::::::::::::888:::|:::888:::::::::::::::: 67 | `:::::::::::::::8888888888888b::::::::::::::' 68 | :::::::::::::::88888888888888:::::::::::::: 69 | :::::::::::::d88888888888888::::::::::::: 70 | ::::::::::::88::88::88:::88:::::::::::: 71 | `::::::::::88::88::88:::88::::::::::' 72 | `::::::::88::88::P::::88::::::::' 73 | `::::::88::88:::::::88::::::' 74 | ``:::::::::::::::::::'' 75 | ``:::::::::''""" + yellow + """ 76 | 77 | ================= WEB Info Scan ================== 78 | ================= Code by whois ================== 79 | ================= v1.2 ================== 80 | +++++++++++++++++++++++++++++++++++++++++++++++++++++ 81 | 82 | """ + end) 83 | 84 | 85 | def get_info(url): 86 | try: 87 | # 页面的跳转没解决 88 | r = requests.get(url, headers={'UserAgent': user_agent}, timeout=6, verify=False, allow_redirects=True) 89 | # pip install lxml 90 | soup = BeautifulSoup(r.content, 'lxml') 91 | 92 | info = blue + soup.title.string + end if soup.title else "No title" 93 | 94 | if 'Server' in r.headers: 95 | info += "\t" + yellow + r.headers['Server'] + end 96 | if 'X-Powered-By' in r.headers: 97 | info += "\t" + purp + r.headers['X-Powered-By'] + end 98 | 99 | return info 100 | except Exception as e: 101 | pass 102 | 103 | 104 | def get_ip(url): 105 | domain = url.split('/')[0] if '://' not in url else url.split('//')[1].split('/')[0] 106 | try: 107 | ip = socket.gethostbyname(domain) 108 | return ip 109 | except Exception as e: 110 | return False 111 | 112 | 113 | def do_cscan(ip): 114 | global count 115 | for port in Ports: 116 | 117 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 118 | 119 | try: 120 | s.settimeout(0.6) 121 | s.connect((str(ip), port)) 122 | 123 | if port in Ports_other: 124 | url = str(ip) + ":" + str(port) 125 | info = blue + "open" + end 126 | 127 | else: 128 | protocol = "http" if port not in [443, 8443] else "https" 129 | url = "{0}://{1}:{2}".format(protocol, ip, port) 130 | 131 | url = "{protocol}://{ip}:{port}".format(protocol=protocol, ip=ip, port=port) 132 | 133 | info = get_info(url) 134 | 135 | if info is None: 136 | pass 137 | else: 138 | sys.stdout.write("%-28s %-30s\n" % (url, info)) 139 | count += 1 140 | 141 | s.close() 142 | 143 | except Exception as e: 144 | s.close() 145 | continue 146 | 147 | 148 | def do_file(url): 149 | ip, info = get_ip(url), get_info(url) 150 | 151 | try: 152 | if ip: 153 | sys.stdout.write("%-28s %-30s %-32s\n" % (url, ip, info)) 154 | 155 | except Exception as e: 156 | # print(e) 157 | pass 158 | 159 | 160 | def scan_cscan(): 161 | while not queue.empty(): 162 | do_cscan(queue.get()) 163 | 164 | def scan_file(): 165 | while not queue.empty(): 166 | do_file(queue.get()) 167 | 168 | def cscan(ips): 169 | ips = ipaddr.IPNetwork(ips) 170 | 171 | for ip in ips: 172 | queue.put(ip) 173 | 174 | time_start = time() 175 | 176 | threads_list = [] 177 | threads = Threads 178 | 179 | for i in range(threads): 180 | t = Thread(target=scan_cscan) 181 | t.start() 182 | threads_list.append(t) 183 | 184 | for i in range(threads): 185 | threads_list[i].join() 186 | 187 | time_end = time() - time_start 188 | print(blue + "\nFound {0} ports in {1} seconds\n".format(count, time_end)) 189 | 190 | 191 | def files(file): 192 | 193 | with open(file, 'r') as f: 194 | for line in f.readlines(): 195 | line = line.rstrip() 196 | if len(line) != 0: 197 | url = line if '://' in line else 'http://' + line 198 | # print(url) 199 | queue.put(url) 200 | 201 | time_start = time() 202 | 203 | threads_list = [] 204 | threads = Threads 205 | 206 | for i in range(threads): 207 | t = Thread(target=scan_file) 208 | t.start() 209 | threads_list.append(t) 210 | 211 | for i in range(threads): 212 | threads_list[i].join() 213 | 214 | time_end = time() - time_start 215 | print(blue + "\nFound {0} ports in {1} seconds\n".format(count, time_end)) 216 | 217 | 218 | if __name__ == '__main__': 219 | banner() 220 | parser = argparse.ArgumentParser( 221 | usage='cscan -i 1.1.1.1/24', 222 | description="Cscan V1", 223 | ) 224 | 225 | parser.add_argument("-i", dest="ips", 226 | help="Use ip segment (192.168.0.1/24)") 227 | parser.add_argument("-f", dest="file", 228 | help="Use ip or domain file") 229 | parser.add_argument("-o", dest="save", 230 | help="save") 231 | 232 | args = parser.parse_args() 233 | 234 | if args.ips is None and args.file is None: 235 | print(red + "[x] cscan -h" + end) 236 | exit(0) 237 | 238 | if args.ips: 239 | print(yellow + 'Target: ' + blue + args.ips + purp + ' | ' + yellow + 'Threads: ' + blue + str(Threads) + end + '\n') 240 | cscan(args.ips) 241 | 242 | if args.file: 243 | files(args.file) 244 | -------------------------------------------------------------------------------- /cscanV2.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # encoding:utf8 3 | import sys 4 | import argparse 5 | from socket import gethostbyname 6 | from netaddr import IPNetwork 7 | import requests 8 | from time import time 9 | import asyncio 10 | from bs4 import BeautifulSoup 11 | 12 | requests.packages.urllib3.disable_warnings() 13 | 14 | __author__ = "nul1" 15 | __update__ = "2021/07/02" 16 | __version__ = "v2.2.0" 17 | """ 18 | HTTP扫描,指定web端口和非web端口,获取标题,版本信息 and more. 19 | """ 20 | 21 | Ports_web = [80, 88, 443, 7001, 8000, 8008, 8888, 8080, 8088, 8089, 8161, 9090] 22 | Ports_other = [21, 22, 445, 1100, 1433, 1434, 1521, 3306, 3389, 6379, 8009, 9200, 11211, 27017, 50070] 23 | 24 | COUNT = 0 25 | TIMEOUT_HTTP = 6 26 | TIMEOUT_SOCK = 0.9 27 | PATH = '' 28 | 29 | user_agent = "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.73 Safari/537.36" 30 | 31 | purp = '\033[95m' 32 | blue = '\033[94m' 33 | red = '\033[31m' 34 | yellow = '\033[93m' 35 | green = '\033[96m' 36 | end = '\033[0m' 37 | 38 | 39 | def banner(): 40 | print(red + """ 41 | ..:::::::::.. 42 | ..:::aad88x8888baa:::.. 43 | .::::d:?88888xxx888?::8b::::. 44 | .:::d8888:?888xxxxx??a888888b:::. 45 | .:::d8888888a8888xxxaa8888888888b:::. 46 | ::::dP::::::::88888x88888::::::::Yb:::: 47 | ::::dP:::::::::Y888888888P:::::::::Yb:::: 48 | ::::d8::::x::::::Y8888888P:::::x:::::8b:::: 49 | .::::88::::::::::::Y88888P::::::::::::88::::. 50 | :::::Y8baaaaaaaaaa88P:T:Y88aaaaaaaaaad8P::::: 51 | :::::::Y88888888888P::|::Y88888888888P::::::: 52 | ::::::::::::::::888:::|:::888:::::::::::::::: 53 | `:::::::::::::::8888888888888b::::::::::::::' 54 | :::::::::::::::88888888888888:::::::::::::: 55 | :::::::::::::d88888888888888::::::::::::: 56 | ::::::::::::88::88::88:::88:::::::::::: 57 | `::::::::::88::88::88:::88::::::::::' 58 | `::::::::88::88::P::::88::::::::' 59 | `::::::88::88:::::::88::::::' 60 | ``:::::::::::::::::::'' 61 | ``:::::::::''""" + purp + """ 62 | 63 | ================= WEB Info Scan ================== 64 | ================= Code by {0} ================== 65 | ================= {1} ================== 66 | +++++++++++++++++++++++++++++++++++++++++++++++++++++ 67 | """.format(__author__, __version__) + end) 68 | 69 | 70 | def save(save_file, content): 71 | with open(save_file, 'a') as f: 72 | try: 73 | f.writelines(content + '\n') 74 | except Exception as e: 75 | pass 76 | 77 | 78 | def tag(info): 79 | return "[" + info + "]" 80 | 81 | 82 | # 将域名转化为ip 83 | def url_to_ip(url): 84 | domain = url.split('/')[0] if '://' not in url else url.split('//')[1].split('/')[0] 85 | domain = domain.split(':')[0] if ':' in domain else domain # fix domain 86 | try: 87 | ip = gethostbyname(domain) 88 | return ip 89 | except Exception as e: 90 | return "" 91 | 92 | 93 | def get_info(url, keyword): 94 | try: 95 | r = requests.get(url, headers={'UserAgent': user_agent}, timeout=TIMEOUT_HTTP, verify=False, 96 | allow_redirects=True) 97 | 98 | soup = BeautifulSoup(r.content, "lxml") 99 | 100 | # HTTP头信息分析 101 | info_code = tag(red + str(r.status_code) + end) 102 | info_title = tag(blue + soup.title.string.replace('\n', '').replace('\r', '').replace('\t', 103 | '') + end) if soup.title.string else tag( 104 | "") 105 | 106 | info_len = tag(purp + str(len(r.content)) + end) 107 | info_server, info_shiro = "", "" 108 | 109 | info_jenkins = " [jenkins " + r.headers['X-Jenkins'] + end + "]" if 'X-Jenkins' in r.headers else "" 110 | 111 | if 'Server' in r.headers: 112 | info_server = " [" + yellow + r.headers['Server'] 113 | info_server += " " + r.headers['X-Powered-By'] + end + "]" if 'X-Powered-By' in r.headers else "]" 114 | 115 | 116 | 117 | if 'Set-Cookie' in r.headers: 118 | info_shiro = "[Shiro]" if 'rememberMe=deleteMe' in r.headers['Set-Cookie'] else "" 119 | 120 | 121 | 122 | else: 123 | info_server = tag("") 124 | 125 | 126 | result = info_code + info_title + info_server + info_jenkins + info_shiro + info_len 127 | 128 | # HTTP内容,关键字匹配 129 | key = tag(red + "Keyword!!!" + end) if keyword and keyword in r.text else "" 130 | 131 | return result + key 132 | 133 | except Exception as e: 134 | #print(e) 135 | return tag(green + "open" + end) 136 | 137 | 138 | async def connet(host, sem, keyword, ip): 139 | """ 140 | 先用异步网络请求判断端口是否存在,如果存在,再对web端口进行信息输出 141 | :param host thread keyword ip 142 | :param sem: 设置进程 143 | :param keyword: 对web内容关键字匹配 144 | :param ip: 获取目标host的IP 145 | :return info 146 | """ 147 | global COUNT 148 | async with sem: 149 | # 是否输出目标IP 150 | output_ip = tag(url_to_ip(host)) if ip else "" 151 | 152 | for port in Ports: 153 | fut = asyncio.open_connection(host=host, port=port) 154 | try: 155 | reader, writer = await asyncio.wait_for(fut, timeout=TIMEOUT_SOCK) 156 | if writer: 157 | # 对非web端口直接返回 158 | if port in Ports_other: 159 | url = str(host) + ":" + str(port) 160 | info = tag(green + "open" + end) 161 | 162 | # 对web端口调用get_info()处理 163 | else: 164 | protocol = "http" if int(port) not in [443, 8443] else "https" 165 | url = "{0}://{1}:{2}{3}".format(protocol, host, port, PATH) 166 | info = get_info(url, keyword) 167 | 168 | sys.stdout.write("%-28s %-28s %-30s\n" % (url, info, output_ip)) 169 | COUNT += 1 170 | 171 | except Exception as e: 172 | # print(e) 173 | pass 174 | 175 | 176 | async def scan(mode, x, t, keyword, myip): 177 | time_start = time() 178 | 179 | # 加入信号量用于限制并发数 180 | sem = asyncio.Semaphore(t) 181 | tasks = [] 182 | 183 | # IP模式:10.1.1.1/24 184 | if mode == 'ips': 185 | try: 186 | ips = [str(ip) for ip in IPNetwork(x)] 187 | except Exception as e: 188 | print("[x] 请指定ip段") 189 | exit(1) 190 | for host in ips: 191 | tasks.append(asyncio.create_task(connet(host, sem, keyword, myip))) 192 | 193 | # 文件模式:文件格式支持ip、域名 194 | if mode == 'file': 195 | with open(x, 'r') as f: 196 | for line in f.readlines(): 197 | line = line.rstrip() 198 | if len(line) != 0: 199 | host = line if '://' not in line else line.split('//')[1] 200 | tasks.append(asyncio.create_task(connet(host, sem, keyword, myip))) 201 | 202 | await asyncio.wait(tasks) 203 | print(blue + "\nFound {0} in {1} seconds\n".format(COUNT, time() - time_start)) 204 | 205 | 206 | def main(): 207 | global Ports, PATH 208 | 209 | parser = argparse.ArgumentParser( 210 | usage='\ncscan -i 192.168.0.1/24 -t 100\ncscan -f url.txt -t 100\ncscan -i 192.168.0.1/24 -t 100 -q -port 80,8080 -path /actuator', 211 | description="CScan Tookit V2", 212 | ) 213 | 214 | basic = parser.add_argument_group('Basic') 215 | basic.add_argument("-i", dest="ips", 216 | help="Use ip segment (192.168.0.1/24)") 217 | basic.add_argument("-f", dest="file", 218 | help="Use ip or domain file") 219 | basic.add_argument("-t", dest="threads", type=int, default=60, 220 | help="Set thread (default 60)") 221 | # basic.add_argument("-o", dest="output", 222 | # help="Specify output file default output.txt") 223 | basic.add_argument("-q", dest="silent", action="store_true", 224 | help="Silent mode") 225 | 226 | god = parser.add_argument_group('God') 227 | god.add_argument("-port", dest="port", help="Specify port") 228 | god.add_argument("-path", dest="path", 229 | help="Request path (example '/phpinfo.php')") 230 | god.add_argument("-key", dest="keyword", help="Specify keyword") 231 | god.add_argument("-web", dest="web", action="store_true", 232 | help="Only scan web ports") 233 | god.add_argument("-ip", dest="ip", action="store_true", help="Output target ip") 234 | 235 | args = parser.parse_args() 236 | 237 | # 指定 -web 参数情况下 238 | Ports = Ports_web if args.web else [*Ports_web, *Ports_other] 239 | 240 | # 自定义端口 241 | if args.port: 242 | Ports = args.port.split(',') 243 | 244 | if args.silent is False: 245 | banner() 246 | 247 | if args.ips is None and args.file is None: 248 | print(red + "[x] cscan -h" + end) 249 | sys.exit(0) 250 | 251 | if args.path: 252 | PATH = args.path 253 | 254 | if args.ips: 255 | print(yellow + 'Target: ' + blue + args.ips + purp + ' | ' + yellow + 'Threads: ' + blue + str( 256 | args.threads) + end) 257 | print(yellow + 'Ports: ' + blue + str(Ports) + end + '\n') 258 | try: 259 | asyncio.run(scan('ips', args.ips, args.threads, args.keyword, args.ip)) 260 | except KeyboardInterrupt: 261 | print(red + "\nCTRL+C detected, Exit..." + end) 262 | 263 | if args.file: 264 | print(yellow + 'Target: ' + blue + args.file + purp + ' | ' + yellow + 'Threads: ' + blue + str( 265 | args.threads) + end) 266 | print(yellow + 'Ports: ' + blue + str(Ports) + end + '\n') 267 | try: 268 | asyncio.run(scan('file', args.file, args.threads, args.keyword, args.ip)) 269 | except KeyboardInterrupt: 270 | print(red + "\nCTRL+C detected, Exit..." + end) 271 | 272 | 273 | if __name__ == '__main__': 274 | main() 275 | -------------------------------------------------------------------------------- /media/16053498949374.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/j3ers3/Cscan/3d59f9b3293c8b2834eceedab935bfd6c39f1b81/media/16053498949374.jpg -------------------------------------------------------------------------------- /media/16053499858114.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/j3ers3/Cscan/3d59f9b3293c8b2834eceedab935bfd6c39f1b81/media/16053499858114.jpg -------------------------------------------------------------------------------- /media/16053501531691.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/j3ers3/Cscan/3d59f9b3293c8b2834eceedab935bfd6c39f1b81/media/16053501531691.jpg -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | netaddr 3 | bs4 4 | lxml 5 | --------------------------------------------------------------------------------