├── Awvs-Automation
├── Awvs.py
├── README.md
├── WCSpider
│ ├── README.md
│ ├── WCSpider.py
│ └── WooyunCompany20160316.txt
├── cmdline.py
├── conf.py
├── parsexml.py
└── sendmail.py
├── DjangoProject-Blog
├── README.md
└── mysite
│ ├── .idea
│ ├── .name
│ ├── encodings.xml
│ ├── misc.xml
│ ├── modules.xml
│ ├── mysite.iml
│ ├── vcs.xml
│ └── workspace.xml
│ ├── blog
│ ├── __init__.py
│ ├── __init__.pyc
│ ├── admin.py
│ ├── admin.pyc
│ ├── apps.py
│ ├── feeds.py
│ ├── feeds.pyc
│ ├── forms.py
│ ├── forms.pyc
│ ├── migrations
│ │ ├── 0001_initial.py
│ │ ├── 0001_initial.pyc
│ │ ├── 0002_comment.py
│ │ ├── 0002_comment.pyc
│ │ ├── 0003_post_tags.py
│ │ ├── 0003_post_tags.pyc
│ │ ├── __init__.py
│ │ └── __init__.pyc
│ ├── models.py
│ ├── models.pyc
│ ├── search_indexes.py
│ ├── search_indexes.pyc
│ ├── sitemaps.py
│ ├── sitemaps.pyc
│ ├── templates
│ │ ├── blog
│ │ │ ├── base.html
│ │ │ └── post
│ │ │ │ ├── about.html
│ │ │ │ ├── detail.html
│ │ │ │ ├── latest_posts.html
│ │ │ │ ├── list.html
│ │ │ │ ├── search.html
│ │ │ │ └── share.html
│ │ ├── pagination.html
│ │ ├── pagination_search.html
│ │ └── search
│ │ │ └── indexes
│ │ │ └── blog
│ │ │ └── post_text.txt
│ ├── templatetags
│ │ ├── __init__.py
│ │ ├── __init__.pyc
│ │ ├── blog_tags.py
│ │ └── blog_tags.pyc
│ ├── tests.py
│ ├── urls.py
│ ├── urls.pyc
│ ├── views.py
│ └── views.pyc
│ ├── bootstrap3.3.0
│ ├── css
│ │ ├── bootstrap-theme.css
│ │ ├── bootstrap-theme.css.map
│ │ ├── bootstrap-theme.min.css
│ │ ├── bootstrap.css
│ │ ├── bootstrap.css.map
│ │ └── bootstrap.min.css
│ ├── fonts
│ │ ├── glyphicons-halflings-regular.eot
│ │ ├── glyphicons-halflings-regular.svg
│ │ ├── glyphicons-halflings-regular.ttf
│ │ └── glyphicons-halflings-regular.woff
│ └── js
│ │ ├── bootstrap.js
│ │ ├── bootstrap.min.js
│ │ └── npm.js
│ ├── db.sqlite3
│ ├── manage.py
│ ├── mysite
│ ├── __init__.py
│ ├── __init__.pyc
│ ├── settings.py
│ ├── settings.pyc
│ ├── urls.py
│ ├── urls.pyc
│ ├── wsgi.py
│ └── wsgi.pyc
│ └── static
│ ├── bootstrap3.3.0
│ ├── css
│ │ ├── bootstrap-theme.css
│ │ ├── bootstrap-theme.css.map
│ │ ├── bootstrap-theme.min.css
│ │ ├── bootstrap.css
│ │ ├── bootstrap.css.map
│ │ └── bootstrap.min.css
│ ├── fonts
│ │ ├── glyphicons-halflings-regular.eot
│ │ ├── glyphicons-halflings-regular.svg
│ │ ├── glyphicons-halflings-regular.ttf
│ │ └── glyphicons-halflings-regular.woff
│ └── js
│ │ ├── bootstrap.js
│ │ ├── bootstrap.min.js
│ │ └── npm.js
│ ├── css
│ └── blog.css
│ └── images
│ └── 头像.jpg
├── DjangoProject-DSScan
├── BatchSqli
│ ├── BatchSqli.py
│ ├── BatchSqli_thread.py
│ ├── cmdline.py
│ └── url.txt
├── DSScan
│ ├── .idea
│ │ ├── .name
│ │ ├── DSScan.iml
│ │ ├── encodings.xml
│ │ ├── misc.xml
│ │ ├── modules.xml
│ │ └── workspace.xml
│ ├── DSScan
│ │ ├── __init__.py
│ │ ├── __init__.pyc
│ │ ├── settings.py
│ │ ├── settings.pyc
│ │ ├── urls.py
│ │ ├── urls.pyc
│ │ ├── wsgi.py
│ │ └── wsgi.pyc
│ ├── db.sqlite3
│ ├── manage.py
│ ├── sqliscan
│ │ ├── __init__.py
│ │ ├── __init__.pyc
│ │ ├── admin.py
│ │ ├── admin.pyc
│ │ ├── apps.py
│ │ ├── forms.py
│ │ ├── forms.pyc
│ │ ├── migrations
│ │ │ ├── 0001_initial.py
│ │ │ ├── 0001_initial.pyc
│ │ │ ├── 0002_auto_20170102_1624.py
│ │ │ ├── 0002_auto_20170102_1624.pyc
│ │ │ ├── 0003_auto_20170112_2111.py
│ │ │ ├── 0003_auto_20170112_2111.pyc
│ │ │ ├── 0004_urllist.py
│ │ │ ├── 0004_urllist.pyc
│ │ │ ├── 0005_remove_sqlinjection_target_urls.py
│ │ │ ├── 0005_remove_sqlinjection_target_urls.pyc
│ │ │ ├── 0006_scanconfig.py
│ │ │ ├── 0006_scanconfig.pyc
│ │ │ ├── __init__.py
│ │ │ └── __init__.pyc
│ │ ├── models.py
│ │ ├── models.pyc
│ │ ├── sqls.py
│ │ ├── sqls.pyc
│ │ ├── templates
│ │ │ └── sqliscan
│ │ │ │ ├── base.html
│ │ │ │ ├── config.html
│ │ │ │ ├── open.html
│ │ │ │ ├── scan.html
│ │ │ │ ├── search.html
│ │ │ │ ├── task.html
│ │ │ │ └── vuls.html
│ │ ├── tests.py
│ │ ├── views.py
│ │ └── views.pyc
│ └── static
│ │ └── images
│ │ └── 头像.jpg
├── README.md
└── readme_pic
│ ├── 1.png
│ └── 2.png
├── PythonSpider-BeeBeeto
├── BeeBeetoSpider.py
├── BeeBeetoSpider02.py
├── BeeBeetoSpider03.py
└── README.MD
├── PythonSpider-BuTian
├── BCSpider.py
├── BuTianCompany201605109.txt
└── README.md
├── PythonSpider-BuTianVul
├── ButianVul.py
├── README.md
└── cmdline.py
├── PythonSpider-LySRC
├── LYSRC
│ ├── cmdline.py
│ └── lysrc.py
└── README.md
├── PythonSpider-Wooyun
├── IVSpider-Wooyun
│ ├── IVSpider.py
│ ├── IVSpider02.py
│ ├── README.md
│ └── cmdline.py
├── README.md
└── WVSearch
│ ├── README.md
│ ├── WVSearch.py
│ ├── cmdline.py
│ ├── report.py
│ └── report
│ └── .gitigonre
└── README.md
/Awvs-Automation/Awvs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import time
5 | import subprocess
6 | import os
7 | from Queue import Queue
8 | from threading import Thread
9 | from parsexml import parse_xml
10 | from cmdline import parse_args
11 | from sendmail import send_main
12 | from conf import wvs_console, save_folder, scan_command, mail_list
13 | import sys
14 |
15 | # 用于保存url的队列
16 | url_queue = Queue()
17 |
18 | def read_url(file):
19 |
20 | with open(file, 'r') as f:
21 | for each in f:
22 | # 每个读取出来的url都带了后面的\n,所以需要去除
23 | each_url = each.replace('\n', '')
24 | url_queue.put(each_url)
25 |
26 |
27 | # 调用wvs_console进行扫描
28 | def wvs_scan(url):
29 |
30 | save_name = time.strftime('%Y%m%d', time.localtime()) + r'\\' + url
31 | # 判断保存目录是否存在,不存在则创建
32 | save_path = save_folder + save_name
33 | if not os.path.exists(save_path):
34 | os.makedirs(save_path)
35 | wvs_command = wvs_console + scan_command % (url, save_path)
36 | print wvs_command
37 | # 如果有漏洞,返回码大于0,小于0是异常
38 | exitcode = subprocess.call(wvs_command)
39 | if exitcode < 0:
40 | sys.exit()
41 | result = str(exitcode) + '|' + save_path
42 | return result
43 |
44 |
45 | class ScanThread(Thread):
46 |
47 | def __init__(self):
48 | Thread.__init__(self)
49 |
50 | def run(self):
51 | while True:
52 | if url_queue.empty(): break
53 | scan_url = url_queue.get()
54 | scan_resutl = wvs_scan(scan_url)
55 | (code, save_load) = scan_resutl.split('|')
56 | if code > 0:
57 | xml_result = parse_xml(save_load + '\\export.xml')
58 | # str.join(sequence),序列之间用str间隔,这里用换行来间隔转换成字符串
59 | send_main(mail_list, 'WvsScanner Report--'+scan_url, '\n'.join(xml_result))
60 | url_queue.task_done()
61 |
62 |
63 | def main(url_l, t_num):
64 | read_url(url_l)
65 | thread = []
66 |
67 | for x in range(t_num):
68 | thread.append(ScanThread())
69 | thread[x].start()
70 |
71 | for i in thread:
72 | if i.isAlive():
73 | i.join()
74 |
75 |
76 | if __name__ == "__main__":
77 |
78 | arg = parse_args()
79 | url_list = arg.u
80 | cmd_num = arg.t
81 | main(url_list, cmd_num)
82 |
--------------------------------------------------------------------------------
/Awvs-Automation/README.md:
--------------------------------------------------------------------------------
1 | ##Usage
2 | ```
3 | usage: Awvs.py [Option]
4 |
5 | * Awvs scanning by python *
6 |
7 | optional arguments:
8 | -h, --help show this help message and exit
9 | -u UrlPath The url list for scanning (default: H:\Awvs\Url\1_url.txt)
10 | -t ThreadNum The wvs_console number, should be a int between 1 and 10
11 | (default: 3)
12 |
13 | ```
14 | ##Instruction
15 | ```
16 | 1. 自行设置具体路径等内容
17 | 2. 利用wvs_console.exe来实现扫描txt文本内的url列
18 | 3. 多开wvs_console.exe来实现多线程的同时扫描
19 | 4. 扫描后xml分析结果,有漏洞发送指定邮箱报告
20 | ```
21 | ##Example
22 | ```
23 | python Awvs.py -u H:\url.txt -t 2
24 | ```
25 | ##Bug
26 | ```
27 | 1. 多线程可能不是线程池的方式,有时候会出现错误,询问是否保存
28 | 2. 太占网络带宽了,用一下就根本无法流量其他网页了
29 | ```
30 |
--------------------------------------------------------------------------------
/Awvs-Automation/WCSpider/README.md:
--------------------------------------------------------------------------------
1 | #WCSpider
2 | **W**ooyun **C**ompany **S**pider
3 | ##Usage
4 | ```
5 | usage: WCSpider.py [option]
6 |
7 | * Wooyun Company Spider *
8 |
9 | optional arguments:
10 | -h, --help show this help message and exit
11 | -p Page The end page for crawling (default: 45)
12 |
13 | ```
14 | ##Instruction
15 | ```
16 | 1. Python 2.7.x && BeautifulSoup4==4.3.2
17 | 2. 默认爬取45页面,可以自定义页面数
18 | 3. 以日期的形式保存为txt文件
19 | ```
20 | ##Example
21 | ```
22 | python WCSpider.py -p 46
23 | ```
24 |
--------------------------------------------------------------------------------
/Awvs-Automation/WCSpider/WCSpider.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: Wooyun Company Spider
6 | Author: Pyx
7 | Time: 2016年3月16日 15:05:19
8 | """
9 |
10 | import urllib2
11 | from bs4 import BeautifulSoup
12 | import random
13 | import time
14 | import argparse
15 |
16 |
17 | def url_res(url):
18 | # 设置一个随机的用户代理,模拟浏览器
19 | user_agent = ["Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0",
20 | "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
21 | "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
22 | "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
23 | "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"]
24 | req = urllib2.Request(url)
25 | req.add_header('User-Agent', random.choice(user_agent))
26 |
27 | while True:
28 | try:
29 | res = urllib2.urlopen(req)
30 | except Exception, e:
31 | continue
32 | else:
33 | return res
34 |
35 |
36 | def url_soup(url):
37 | soup = BeautifulSoup(url_res(url).read(), 'html.parser', from_encoding='UTF-8')
38 | soup = soup.find_all('a', rel="nofollow")
39 | for each in soup:
40 | print each.string
41 | save_result(each.string.encode('utf-8'))
42 |
43 |
44 | def save_result(company):
45 | # 保存文件名附带时间
46 | report_name = 'WooyunCompany' + time.strftime('%Y%m%d', time.localtime()) + '.txt'
47 | with open(report_name, 'a+') as f:
48 | f.write(company)
49 | f.write('\n')
50 |
51 |
52 | def main(p_num):
53 | # 从第一页开始,最后一页加1
54 | for x in range(1, p_num+1):
55 | url = "http://www.wooyun.org/corps/page/" + str(x)
56 | url_soup(url)
57 |
58 | if __name__ == '__main__':
59 | # 设置一个命令行参数p,默认45页,以后厂商多了,可以自行设定
60 | parser = argparse.ArgumentParser(prog='WCSpider', usage='WCSpider.py [option]',
61 | formatter_class=argparse.ArgumentDefaultsHelpFormatter,
62 | description="* Wooyun Company Spider *")
63 | parser.add_argument('-p', metavar='Page', default=45, type=int, help='The end page for crawling')
64 | arg = parser.parse_args()
65 | page = arg.p
66 | main(page)
67 | print "-----------------It's done-------------------"
68 |
--------------------------------------------------------------------------------
/Awvs-Automation/cmdline.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import argparse
5 | import sys
6 | from conf import url_txt
7 |
8 |
9 | def parse_args():
10 | parser = argparse.ArgumentParser(prog='Awvs', usage="Awvs.py [Option]",
11 | description="* Awvs scanning by python *",
12 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
13 |
14 | parser.add_argument('-u', metavar='UrlPath', type=str, default=url_txt,
15 | help="The url list for scanning")
16 | parser.add_argument('-t', metavar='ThreadNum', type=int, default=3,
17 | help='The wvs_console number, should be a int between 1 and 10')
18 |
19 | if len(sys.argv) == 1:
20 | sys.argv.append('-h')
21 |
22 | args = parser.parse_args()
23 | check_args(args)
24 | return args
25 |
26 |
27 | def check_args(args):
28 |
29 | if not (args.t >= 1 and args.t <= 10):
30 | raise Exception('-t must be an integer between 1 and 10')
31 |
--------------------------------------------------------------------------------
/Awvs-Automation/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | wvs_console = r'H:\Web Vulnerability Scanner 10\wvs_console.exe ' # wvs_console的路径
5 |
6 | save_folder = r'H:\Awvs\Result\\' # 保存记录的目录,后面如果需要对反斜杠转义,否则反斜杠对后面的单引号转义
7 |
8 | url_txt = r'H:\Awvs\Url\1_url.txt' # 待检测url文本
9 |
10 | # wvs扫描语句(--不扫描当前目录以上的其他目录(二级目录有效),--启发式扫描)
11 | scan_command = "/Scan %s /Profile default /ExportXML /SaveFolder %s --RestrictToBaseFolder=true " \
12 | "--ScanningMode=Heuristic"
13 |
14 | # 邮箱
15 | mail_host = "smtp.163.com"
16 | mail_user = "123" #发件帐号
17 | mail_pass = "123" #发件密码
18 | mail_postfix = "163.com"
19 | mail_list = ['123@qq.com'] #收件人
20 |
--------------------------------------------------------------------------------
/Awvs-Automation/parsexml.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | from xml.dom import minidom
5 | import sys
6 |
7 |
8 | # 对扫描结果进行分析
9 | def parse_xml(xml_name):
10 | result = []
11 | tmp_result = []
12 | color_list = {'red': 'High', 'orange': 'Medium', 'blue': 'Low', 'green': 'Info'}
13 | try:
14 | dom = minidom.parse(xml_name)
15 | root = dom.documentElement
16 | report_node = root.getElementsByTagName('ReportItem')
17 | # 只有一个节点所以用列表中的[0]取得其中的唯一一个,节点中的子节点的值
18 | result.append(root.getElementsByTagName("StartURL")[0].childNodes[0].nodeValue)
19 | result.append(root.getElementsByTagName("StartTime")[0].childNodes[0].nodeValue)
20 | result.append(root.getElementsByTagName("FinishTime")[0].childNodes[0].nodeValue)
21 | result.append(root.getElementsByTagName("ScanTime")[0].childNodes[0].nodeValue)
22 | if report_node:
23 | for node in report_node:
24 | # 获得color节点的属性值
25 | color = node.getAttribute('color')
26 | name = node.getElementsByTagName('Name')[0]
27 | if color in color_list:
28 | # 因为color后面接漏洞名,需要空个tab
29 | color_result = color_list[color] + '\t'
30 | else:
31 | color_result = 'Other\t'
32 | for vul_node in name.childNodes:
33 | tmp_result.append(color_result + vul_node.nodeValue)
34 | result2 = sortresultlist(tmp_result)
35 | result.append('Vulnerable Count:' + str(len(result2)))
36 | for n in xrange(len(result2)):
37 | result.append(result2[n])
38 | except Exception, e:
39 | sys.exit("Error in parse xml: %s" % e)
40 |
41 | return result
42 |
43 |
44 | # 将扫描结果进行排序,这太渣了
45 | def sortresultlist(List):
46 | Result = []
47 | for i in List:
48 | if i.startswith('High'):
49 | Result.append(i)
50 | for i in List:
51 | if i.startswith('Medium'):
52 | Result.append(i)
53 | for i in List:
54 | if i.startswith('Low'):
55 | Result.append(i)
56 | for i in List:
57 | if i.startswith('Info'):
58 | Result.append(i)
59 | for i in List:
60 | if i.startswith('Other'):
61 | Result.append(i)
62 | return Result
63 |
--------------------------------------------------------------------------------
/Awvs-Automation/sendmail.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import smtplib
5 | import time
6 | from email.mime.text import MIMEText
7 | from conf import mail_host, mail_user, mail_pass, mail_postfix
8 |
9 |
10 | def send_main(to_mail, title, content):
11 | from_mail = "WvsScanner<" + mail_user + "@" + mail_postfix + ">"
12 | msg = MIMEText(content, _subtype='plain', _charset='utf-8')
13 | msg['Subject'] = title
14 | msg['From'] = from_mail
15 | msg['To'] = ";".join(to_mail)
16 | try:
17 | server = smtplib.SMTP()
18 | server.connect(mail_host)
19 | server.login(mail_user, mail_pass)
20 | server.sendmail(from_mail, to_mail, msg.as_string())
21 | server.close()
22 | return True
23 | except Exception, e:
24 | catch_write(str(e))
25 | return False
26 |
27 |
28 | def catch_write(err_code):
29 | file_name = "mail_error.txt"
30 | err_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
31 | with open(file_name, 'a') as f:
32 | f.write(err_time + '\t' + err_code + '\n')
33 |
34 | # 测试用
35 | # if __name__ == "__main__":
36 | # mail_list = ['test@qq.com']
37 | # send_main(mail_list, '22', '22')
38 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/README.md:
--------------------------------------------------------------------------------
1 | # DjangoProject-Blog
2 |
3 | ### Instruction
4 | 1. Blog附带了全文检索功能,利用Solr来实现
5 | 2. 前端调用了Bootstrap来美化页面
6 | 3. 整个项目跟随 `Django by example` 来学习和改进
7 |
8 | ### Environment
9 | 1. Python 2.7.11
10 | 2. Django 1.9.6
11 | 3. Tomcat 8.0.39
12 | 4. Solr 6.3.0
13 | 5. Bootstrap 3.3.0
14 | 6. django-taggit==0.21.3
15 | 7. Markdown==2.6.7
16 | 8. django-haystack==2.5.1
17 | 9. pysolr==3.6.0
18 |
19 | ### P.S.
20 | 1. 可能会有遗漏, `Issues` 中都记录了实现过程,可做参考补充
21 | 2. 生产环境中没有部署实现过,以后可能会填坑
22 | 3. **docker版 这个坑应该会填**
23 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/.idea/.name:
--------------------------------------------------------------------------------
1 | mysite
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/.idea/mysite.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/__init__.py
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/__init__.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/admin.py:
--------------------------------------------------------------------------------
1 | from django.contrib import admin
2 | from .models import Post, Comment
3 |
4 | class PostAdmin(admin.ModelAdmin):
5 | list_display = ('title', 'slug', 'author',
6 | 'publish', 'status', )
7 | list_filter = ('status', 'created', 'publish', 'author')
8 | search_fields = ('title', 'body')
9 | prepopulated_fields = {'slug': ('title', )}
10 | raw_id_fields = ('author', )
11 | date_hierarchy = 'publish'
12 | ordering = ('status', 'publish')
13 | # filter_vertical = ('author',)
14 | # fields = ('slug', 'author', 'status')
15 |
16 | admin.site.register(Post, PostAdmin)
17 |
18 | class CommentAdmin(admin.ModelAdmin):
19 | list_display = ('name', 'email', 'post', 'created', 'active')
20 | list_filter = ('active', 'created', 'updated')
21 | search_fields = ('name', 'email', 'body')
22 |
23 | admin.site.register(Comment, CommentAdmin)
24 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/admin.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/admin.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/apps.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | from django.apps import AppConfig
4 |
5 |
6 | class BlogConfig(AppConfig):
7 | name = 'blog'
8 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/feeds.py:
--------------------------------------------------------------------------------
1 | from django.contrib.syndication.views import Feed
2 | from django.template.defaultfilters import truncatewords
3 | from .models import Post
4 |
5 | class LatestPostsFeed(Feed):
6 |
7 | title = 'My Blog'
8 | link = '/blog/'
9 | description = 'New posts of my blog.'
10 |
11 | def items(self):
12 | return Post.published.all()[:5]
13 |
14 | def item_title(self, item):
15 | return item.title
16 |
17 | def item_description(self, item):
18 | return truncatewords(item.body, 30)
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/feeds.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/feeds.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/forms.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | from django import forms
3 | from .models import Comment
4 |
5 | class EmailPostForm(forms.Form):
6 | name = forms.CharField(max_length=25)
7 | email = forms.EmailField()
8 | to = forms.EmailField()
9 | comments = forms.CharField(required=False, widget=forms.Textarea)
10 |
11 | def __unicode__(self):
12 | return self.name
13 |
14 | class CommentForm(forms.ModelForm):
15 | class Meta:
16 | model = Comment
17 | fields = ('name', 'email', 'body')
18 |
19 | class SearchForm(forms.Form):
20 | query = forms.CharField()
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/forms.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/forms.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2016-09-28 07:29
3 | from __future__ import unicode_literals
4 |
5 | from django.conf import settings
6 | from django.db import migrations, models
7 | import django.db.models.deletion
8 | import django.utils.timezone
9 |
10 |
11 | class Migration(migrations.Migration):
12 |
13 | initial = True
14 |
15 | dependencies = [
16 | migrations.swappable_dependency(settings.AUTH_USER_MODEL),
17 | ]
18 |
19 | operations = [
20 | migrations.CreateModel(
21 | name='Post',
22 | fields=[
23 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
24 | ('title', models.CharField(max_length=250)),
25 | ('slug', models.SlugField(max_length=250, unique_for_date='publish')),
26 | ('body', models.TextField()),
27 | ('publish', models.DateTimeField(default=django.utils.timezone.now)),
28 | ('created', models.DateTimeField(auto_now_add=True)),
29 | ('updated', models.DateTimeField(auto_now=True)),
30 | ('status', models.CharField(choices=[('draft', 'Draft'), ('published', 'Published')], default='draft', max_length=10)),
31 | ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blog_posts', to=settings.AUTH_USER_MODEL)),
32 | ],
33 | options={
34 | 'ordering': ('-publish',),
35 | },
36 | ),
37 | ]
38 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/0001_initial.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/migrations/0001_initial.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/0002_comment.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2016-11-17 02:42
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations, models
6 | import django.db.models.deletion
7 |
8 |
9 | class Migration(migrations.Migration):
10 |
11 | dependencies = [
12 | ('blog', '0001_initial'),
13 | ]
14 |
15 | operations = [
16 | migrations.CreateModel(
17 | name='Comment',
18 | fields=[
19 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
20 | ('name', models.CharField(max_length=80)),
21 | ('email', models.EmailField(max_length=254)),
22 | ('body', models.TextField()),
23 | ('created', models.DateTimeField(auto_now_add=True)),
24 | ('updated', models.DateTimeField(auto_now=True)),
25 | ('active', models.BooleanField(default=True)),
26 | ('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.Post')),
27 | ],
28 | options={
29 | 'ordering': ('created',),
30 | },
31 | ),
32 | ]
33 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/0002_comment.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/migrations/0002_comment.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/0003_post_tags.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2016-11-21 03:31
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations
6 | import taggit.managers
7 |
8 |
9 | class Migration(migrations.Migration):
10 |
11 | dependencies = [
12 | ('taggit', '0002_auto_20150616_2121'),
13 | ('blog', '0002_comment'),
14 | ]
15 |
16 | operations = [
17 | migrations.AddField(
18 | model_name='post',
19 | name='tags',
20 | field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'),
21 | ),
22 | ]
23 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/0003_post_tags.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/migrations/0003_post_tags.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/migrations/__init__.py
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/migrations/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/migrations/__init__.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/models.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 |
3 | from __future__ import unicode_literals
4 |
5 | from django.db import models
6 | from django.utils import timezone
7 | from django.contrib.auth.models import User
8 | from django.core.urlresolvers import reverse
9 | from taggit.managers import TaggableManager
10 |
11 |
12 | class PublishedManager(models.Manager):
13 |
14 | def get_queryset(self):
15 | return super(PublishedManager, self).get_queryset().filter(status='published')
16 |
17 | class Post(models.Model):
18 | STATUS_CHOICES = (
19 | ('draft', 'Draft'),
20 | ('published', 'Published'),
21 | )
22 |
23 | title = models.CharField(max_length=250)
24 | slug = models.SlugField(max_length=250, unique_for_date='publish')
25 | author = models.ForeignKey(User, related_name='blog_posts')
26 | body = models.TextField()
27 | publish = models.DateTimeField(default=timezone.now)
28 | created = models.DateTimeField(auto_now_add=True)
29 | updated = models.DateTimeField(auto_now=True)
30 | status = models.CharField(max_length=10, choices=STATUS_CHOICES, default='draft')
31 |
32 | objects = models.Manager()
33 | published = PublishedManager()
34 |
35 | tags = TaggableManager()
36 |
37 | def get_absolute_url(self):
38 |
39 | return reverse('blog:post_detail', args=[self.publish.year, self.publish.strftime('%m'),
40 | self.publish.strftime('%d'), self.slug])
41 |
42 | class Meta:
43 | ordering = ('-publish', )
44 |
45 | # def __str__(self):
46 | # return self.title
47 |
48 | def __unicode__(self):
49 | return self.title
50 |
51 | class Comment(models.Model):
52 | post = models.ForeignKey(Post, related_name='comments')
53 | name = models.CharField(max_length=80)
54 | email = models.EmailField()
55 | body = models.TextField()
56 | created = models.DateTimeField(auto_now_add=True)
57 | updated = models.DateTimeField(auto_now=True)
58 | active = models.BooleanField(default=True)
59 |
60 | class Meta:
61 | ordering = ('created', )
62 |
63 | def __str__(self):
64 | return 'Comment by {} on {}'.format(self.name, self.post)
65 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/models.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/models.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/search_indexes.py:
--------------------------------------------------------------------------------
1 | from haystack import indexes
2 | from .models import Post
3 |
4 | class PostIndex(indexes.SearchIndex, indexes.Indexable):
5 |
6 | text = indexes.CharField(document=True, use_template=True)
7 | publish = indexes.DateTimeField(model_attr='publish')
8 |
9 | def get_model(self):
10 | return Post
11 |
12 | def index_queryset(self, using=None):
13 | return self.get_model().published.all()
14 |
15 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/search_indexes.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/search_indexes.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/sitemaps.py:
--------------------------------------------------------------------------------
1 | from django.contrib.sitemaps import Sitemap
2 | from .models import Post
3 |
4 | class PostSitemap(Sitemap):
5 | changefreq = 'weekly'
6 | priority = 0.9
7 |
8 | def items(self):
9 | return Post.published.all()
10 |
11 | def lastmod(self, obj):
12 | return obj.publish
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/sitemaps.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/sitemaps.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/blog/base.html:
--------------------------------------------------------------------------------
1 | {% load blog_tags %}
2 | {% load staticfiles %}
3 |
4 |
5 |
6 | {% block title %}{% endblock %}
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 | {# PYX
#}
224 |
225 |
226 |
227 | {#
/**Y love P**/
#}
228 |
237 |
246 |
247 |
248 |
249 | {% show_latest_posts 3 %}
250 |
251 |
252 |
253 | {% get_most_commented_posts as most_commented_posts %}
254 |
255 | {% for post in most_commented_posts %}
256 | -
257 | {{ post.title }}
258 |
259 | {% endfor %}
260 |
261 |
262 |
263 |
270 |
271 |
272 |
273 |
274 |
275 | {% block content %}
276 |
277 | {% endblock %}
278 |
279 |
280 |
281 |
282 |
283 |
284 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/blog/post/about.html:
--------------------------------------------------------------------------------
1 | {% extends "blog/base.html" %}
2 | {% load staticfiles %}
3 |
4 |
5 | {% block title %}关于我{% endblock %}
6 |
7 | {% block content %}
8 | About me :
9 |
10 |
11 |
12 |
The story of Pyx:
13 | 人的一生或许为各种各样的事情所奔波忙碌,
14 | 有一天,
15 | 你会发现其实活着是为了她又或者他又或者它,
16 | 然而,
17 | 为了这些,
18 | 你又会失去她或者他又或者它,
19 | 得到与失去都是相互的,
20 | 失去的或者珍贵,
21 | 低头看看你所得到的,
22 | 其实失去的并没有那么珍贵,
23 | 有时候,命就是这样
24 | 人的一生都需要作出选择,
25 | 听从自己内心的声音,
26 | 不要等到真正失去了,
27 | 追悔莫及
28 | 最后,**Y love P** forever
29 |
30 |
31 | Welcome to visit my website © Pyx
32 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/blog/post/detail.html:
--------------------------------------------------------------------------------
1 | {% extends "blog/base.html" %}
2 | {% load blog_tags %}
3 |
4 | {% block title%}{{ post.title }}{% endblock %}
5 |
6 | {% block content %}
7 |
8 |
9 |
{{ post.title }}
10 |
16 |
17 | {{ post.body|markdown }}
18 |
29 |
30 |
31 |
43 |
#}
46 | 评论列表:{{ comments.count }}
47 | {# {{ total_comment }} comment{{ total_comment|pluralize }}#}
48 | {#
#}
49 | {# {% endwith %}#}
50 |
51 |
97 |
98 |
Welcome to visit my website © Pyx
99 |
100 |
101 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/blog/post/latest_posts.html:
--------------------------------------------------------------------------------
1 |
2 | {% for post in latest_posts %}
3 | -
4 | {{ post.title }}
5 |
6 | {% endfor %}
7 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/blog/post/list.html:
--------------------------------------------------------------------------------
1 | {% extends "blog/base.html" %}
2 | {% load blog_tags %}
3 |
4 | {% block title %}Pyx{% endblock %}
5 |
6 | {% block content %}
7 | {% if tag %}
8 | 标签:{{ tag.name }}
9 | {% endif %}
10 |
11 | {% for post in posts %}
12 |
17 | {# {{ post.tags.all|join:"; "}}#}
18 |
19 | {{ post.publish }} by {{ post.author }}
20 |
21 | {# Tags:#}
22 | {% for tag in post.tags.all %}
23 |
24 |
25 | {{ tag.name }}
26 |
27 |
28 | {% if not forloop.last %}   {% endif %}
29 | {% endfor %}
30 |
31 |
32 |
33 |
34 | {{ post.body|markdown|truncatewords_html:30 }}
35 |
40 |
41 | {% endfor %}
42 | {% include "pagination.html" with page=posts %}
43 | {#
{{ posts }}
#}
44 |
45 | Welcome to visit my website © Pyx
46 |
47 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/blog/post/search.html:
--------------------------------------------------------------------------------
1 | {% extends "blog/base.html" %}
2 | {% load blog_tags %}
3 |
4 | {% block title %}搜索{% endblock %}
5 |
6 | {% block content %}
7 | {% if "query" in request.GET %}
8 | Search Results for : {{ cd.query }}
9 | {{ total_results }}
10 |
11 | {# Fount {{ total_results }} result{{ total_results|pluralize }}
#}
12 | {% if total_results > 0 %}
13 | {# {% for result in results %}#}
14 | {# {% with post=result.object %}#}
15 | {# #}
16 | {# {{ post.body|truncatewords:5 }}#}
17 | {# {% endwith %}#}
18 | {# {% empty %}#}
19 | {# There are no results for your query.
#}
20 | {# {% endfor %}#}
21 |
22 |
23 | {% for post in posts %}
24 | {% with post=post.object %}
25 |
30 | {# {{ post.tags.all|join:"; "}}#}
31 |
32 | {{ post.publish }} by {{ post.author }}
33 |
34 | {# Tags:#}
35 | {% for tag in post.tags.all %}
36 |
37 |
38 | {{ tag.name }}
39 |
40 |
41 | {% if not forloop.last %}   {% endif %}
42 | {% endfor %}
43 |
44 |
45 |
46 |
47 | {{ post.body|markdown|truncatewords_html:30 }}
48 |
53 |
54 | {% endwith %}
55 |
56 | {% empty %}
57 |
There are no results for your query.
58 | {% endfor %}
59 | {# {% include "pagination_search.html" with page=posts %}#}
60 |
76 | {#
{{ posts }}
#}
77 |
78 |
79 | {# Search again
#}
80 | {% else %}
81 | There are no results for your query.
82 |
83 |
98 |
99 | {% endif %}
100 | Welcome to visit my website © Pyx
101 |
102 | {% else %}
103 | Search Results for : {{ cd.query }}
104 | 0
105 |
110 |
111 |
126 | Welcome to visit my website © Pyx
127 | {% endif %}
128 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/blog/post/share.html:
--------------------------------------------------------------------------------
1 | {% extends "blog/base.html" %}
2 |
3 | {% block title %}分享给好友{% endblock %}
4 |
5 | {% block content %}
6 | {{ cd.to }}
7 | {% if sent %}
8 |
9 |
16 | {# {{ cd.to }}#}
17 | {# {{ cd.name }}#}
18 | {#
#}
19 |
20 | {# "{{ post.title }}" was successfully sent to {{ cd.to }}.#}
21 | {# "{{ post.title }}"已成功发送给{{ form.cleaned_data.to }}.#}
22 | {#
#}
23 |
Welcome to visit my website © Pyx
24 |
25 |
26 | {% else %}
27 | 通过邮件分享给好友:
28 |
29 |
68 |
69 | Welcome to visit my website © Pyx
70 |
71 | {% endif %}
72 |
73 |
74 | {% endblock %}
75 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/pagination.html:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/pagination_search.html:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templates/search/indexes/blog/post_text.txt:
--------------------------------------------------------------------------------
1 | {{ object.title }}
2 | {{ object.tags.all|join:", " }}
3 | {{ object.body }}
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templatetags/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/templatetags/__init__.py
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templatetags/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/templatetags/__init__.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templatetags/blog_tags.py:
--------------------------------------------------------------------------------
1 | from django import template
2 |
3 | register = template.Library()
4 |
5 | from ..models import Post
6 | from django.db.models import Count
7 |
8 | from django.utils.safestring import mark_safe
9 | import markdown
10 |
11 |
12 | @register.simple_tag()
13 | def total_posts():
14 | return Post.published.count()
15 |
16 | @register.inclusion_tag('blog/post/latest_posts.html')
17 | def show_latest_posts(count=2):
18 | latest_posts = Post.published.order_by('-publish')[:count]
19 | return {'latest_posts': latest_posts}
20 |
21 | @register.assignment_tag()
22 | def get_most_commented_posts(count=2):
23 | return Post.published.annotate(total_comments=Count('comments')).order_by('-total_comments')[:count]
24 |
25 | @register.filter(name='markdown')
26 | def markdown_format(text):
27 | return mark_safe(markdown.markdown(text))
28 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/templatetags/blog_tags.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/templatetags/blog_tags.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/tests.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 |
3 | # Create your tests here.
4 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/urls.py:
--------------------------------------------------------------------------------
1 | from django.conf.urls import url
2 | from . import views
3 | from .feeds import LatestPostsFeed
4 |
5 | urlpatterns = [
6 | url(r'^$', views.post_list, name='post_list'),
7 | # url(r'^$', views.PostListView.as_view(),name='post_list' ),
8 | url(r'^tag/(?P[-\w]+)/$', views.post_list, name='post_list_by_tag'),
9 | url(r'^(?P\d{4})/(?P\d{2})/(?P\d{2})/(?P[-\w]+)/$',
10 | views.post_detail, name='post_detail'),
11 | url(r'^(?P\d+)/share/$', views.post_share, name='post_share'),
12 | url(r'^feed/$', LatestPostsFeed(), name='post_feed'),
13 | url(r'^search/$', views.post_search, name='post_search'),
14 | url(r'^about/$', views.about_me, name='about_me'),
15 |
16 | ]
17 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/urls.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/urls.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/views.py:
--------------------------------------------------------------------------------
1 | #!/urs/bin/env python
2 | # coding:utf-8
3 |
4 | from django.shortcuts import render, get_object_or_404
5 | from .models import Post
6 | from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
7 | # from django.views.generic import ListView
8 | from .forms import EmailPostForm, CommentForm
9 | from django.core.mail import send_mail
10 | from .models import Post, Comment
11 | from taggit.models import Tag
12 | from django.db.models import Count
13 | from .forms import SearchForm
14 | from haystack.query import SearchQuerySet
15 |
16 |
17 | # def post_list(request):
18 | # posts = Post.published.all())
19 | # return render(request, 'blog/post/list.html', {'posts': posts}
20 |
21 | def post_detail(request, year, month, day, post):
22 | post = get_object_or_404(Post, slug=post,
23 | status='published',
24 | publish__year=year,
25 | publish__month=month,
26 | publish__day=day)
27 | # active评论列表
28 | comments = post.comments.filter(active=True)
29 |
30 | if request.method == 'POST':
31 | # 评论被提交
32 | comment_form = CommentForm(data=request.POST)
33 | if comment_form.is_valid():
34 | # 创建评论对象,但不保存
35 | new_comment = comment_form.save(commit=False)
36 | # 分配当前的post给这个评论
37 | new_comment.post = post
38 | # 保存评论到数据库
39 | new_comment.save()
40 | else:
41 | comment_form = CommentForm()
42 |
43 | post_tags_ids = post.tags.values_list('id', flat=True)
44 | similar_posts = Post.published.filter(tags__in=post_tags_ids).exclude(id=post.id)
45 | similar_posts = similar_posts.annotate(same_tags=Count('tags')).order_by('-same_tags', '-publish')[:4]
46 |
47 | return render(request, 'blog/post/detail.html', {'post': post, 'comments': comments,
48 | 'comment_form': comment_form,
49 | 'similar_posts': similar_posts})
50 |
51 |
52 | def post_share(request, post_id):
53 | # 通过id来检索post内容
54 | post = get_object_or_404(Post, id=post_id, status='published')
55 | # 在html模板中应用,显示是否发生成功
56 | sent = False
57 |
58 | if request.method == 'POST':
59 | # 表单已提交
60 | form = EmailPostForm(request.POST)
61 | if form.is_valid():
62 | # 表单字段合法
63 | cd = form.cleaned_data
64 | post_url = request.build_absolute_uri(post.get_absolute_url())
65 | name = cd['name'].encode('utf-8')
66 | comments = cd['comments'].encode('utf-8')
67 | subject = '{} ({}) recommends you reading "{}'.format(name, cd['email'], post.title)
68 | message = 'Read "{}" at {}\n\n{}\'s comments: {}'.format(post.title, post_url, name, comments)
69 | send_mail(subject, message, 'awvs_pyx@163.com', [cd['to']])
70 | sent = True
71 | else:
72 | form = EmailPostForm()
73 |
74 | return render(request, 'blog/post/share.html', {'post': post, 'form': form, 'sent': sent})
75 |
76 |
77 | def post_list(request, tag_slug=None):
78 | object_list = Post.published.all()
79 | tag = None
80 |
81 | if tag_slug:
82 | tag = get_object_or_404(Tag, slug=tag_slug)
83 | object_list = object_list.filter(tags__in=[tag])
84 | paginator = Paginator(object_list, 2)
85 | page = request.GET.get('page')
86 | try:
87 | posts = paginator.page(page)
88 | except PageNotAnInteger:
89 | posts = paginator.page(1)
90 | except EmptyPage:
91 | posts = paginator.page(paginator.num_pages)
92 | return render(request, 'blog/post/list.html', {'page': page, 'posts': posts, 'tag': tag})
93 |
94 | # class PostListView(ListView):
95 | # queryset = Post.published.all()
96 | # context_object_name = "posts"
97 | # paginate_by = 3
98 | # template_name = "blog/post/list.html"
99 |
100 | def post_search(request):
101 | form = SearchForm()
102 | if 'query' in request.GET:
103 | form = SearchForm(request.GET)
104 | if form.is_valid():
105 | cd = form.cleaned_data
106 | results = SearchQuerySet().models(Post).filter(content=cd['query']).load_all()
107 | total_results = results.count()
108 | paginator = Paginator(results, 2)
109 | page = request.GET.get('page')
110 | try:
111 | posts = paginator.page(page)
112 | except PageNotAnInteger:
113 | posts = paginator.page(1)
114 | except EmptyPage:
115 | posts = paginator.page(paginator.num_pages)
116 | else:
117 | cd = {}
118 | results = SearchQuerySet().models(Post).all()
119 | total_results = results.count()
120 | # posts = {}
121 | # page = {}
122 | paginator = Paginator(results, 2)
123 | page = request.GET.get('page')
124 | try:
125 | posts = paginator.page(page)
126 | except PageNotAnInteger:
127 | posts = paginator.page(1)
128 | except EmptyPage:
129 | posts = paginator.page(paginator.num_pages)
130 |
131 | else:
132 | cd = {}
133 | results = {}
134 | total_results = {}
135 | posts = {}
136 | page = {}
137 | return render(request, 'blog/post/search.html', {'form': form,
138 | 'cd': cd,
139 | 'results': results,
140 | 'total_results': total_results,
141 | 'posts': posts,
142 | 'page': page
143 | })
144 |
145 | def about_me(request):
146 | return render(request, 'blog/post/about.html')
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/blog/views.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/blog/views.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/bootstrap3.3.0/fonts/glyphicons-halflings-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/bootstrap3.3.0/fonts/glyphicons-halflings-regular.eot
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/bootstrap3.3.0/fonts/glyphicons-halflings-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/bootstrap3.3.0/fonts/glyphicons-halflings-regular.ttf
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/bootstrap3.3.0/fonts/glyphicons-halflings-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/bootstrap3.3.0/fonts/glyphicons-halflings-regular.woff
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/bootstrap3.3.0/js/npm.js:
--------------------------------------------------------------------------------
1 | // This file is autogenerated via the `commonjs` Grunt task. You can require() this file in a CommonJS environment.
2 | require('../../js/transition.js')
3 | require('../../js/alert.js')
4 | require('../../js/button.js')
5 | require('../../js/carousel.js')
6 | require('../../js/collapse.js')
7 | require('../../js/dropdown.js')
8 | require('../../js/modal.js')
9 | require('../../js/tooltip.js')
10 | require('../../js/popover.js')
11 | require('../../js/scrollspy.js')
12 | require('../../js/tab.js')
13 | require('../../js/affix.js')
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/db.sqlite3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/db.sqlite3
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
7 |
8 | from django.core.management import execute_from_command_line
9 |
10 | execute_from_command_line(sys.argv)
11 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/mysite/__init__.py
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/mysite/__init__.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Django settings for mysite project.
3 |
4 | Generated by 'django-admin startproject' using Django 1.9.6.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/1.9/topics/settings/
8 |
9 | For the full list of settings and their values, see
10 | https://docs.djangoproject.com/en/1.9/ref/settings/
11 | """
12 |
13 | import os
14 |
15 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
16 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
17 |
18 |
19 | # Quick-start development settings - unsuitable for production
20 | # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
21 |
22 | # SECURITY WARNING: keep the secret key used in production secret!
23 | SECRET_KEY = '(0_-e(*yz@s%@hkr_o$)&&!f0jej*hk$7h3t9jj1_p)ic=g9&d'
24 |
25 | # SECURITY WARNING: don't run with debug turned on in production!
26 | DEBUG = True
27 |
28 | ALLOWED_HOSTS = []
29 |
30 | SITE_ID = 1
31 |
32 | # Application definition
33 |
34 | INSTALLED_APPS = [
35 | 'django.contrib.admin',
36 | 'django.contrib.auth',
37 | 'django.contrib.contenttypes',
38 | 'django.contrib.sessions',
39 | 'django.contrib.messages',
40 | 'django.contrib.staticfiles',
41 | 'blog',
42 | 'taggit',
43 | 'django.contrib.sites',
44 | 'django.contrib.sitemaps',
45 | 'haystack',
46 | ]
47 |
48 | HAYSTACK_CONNECTIONS = {
49 | 'default': {
50 | 'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
51 | 'URL': 'http://localhost:8888/solr/blog'
52 | }
53 | }
54 |
55 | MIDDLEWARE_CLASSES = [
56 | 'django.middleware.security.SecurityMiddleware',
57 | 'django.contrib.sessions.middleware.SessionMiddleware',
58 | 'django.middleware.common.CommonMiddleware',
59 | 'django.middleware.csrf.CsrfViewMiddleware',
60 | 'django.contrib.auth.middleware.AuthenticationMiddleware',
61 | 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
62 | 'django.contrib.messages.middleware.MessageMiddleware',
63 | 'django.middleware.clickjacking.XFrameOptionsMiddleware',
64 | ]
65 |
66 | ROOT_URLCONF = 'mysite.urls'
67 |
68 | TEMPLATES = [
69 | {
70 | 'BACKEND': 'django.template.backends.django.DjangoTemplates',
71 | 'DIRS': [],
72 | 'APP_DIRS': True,
73 | 'OPTIONS': {
74 | 'context_processors': [
75 | 'django.template.context_processors.debug',
76 | 'django.template.context_processors.request',
77 | 'django.contrib.auth.context_processors.auth',
78 | 'django.contrib.messages.context_processors.messages',
79 | ],
80 | },
81 | },
82 | ]
83 |
84 | WSGI_APPLICATION = 'mysite.wsgi.application'
85 |
86 |
87 | # Database
88 | # https://docs.djangoproject.com/en/1.9/ref/settings/#databases
89 |
90 | DATABASES = {
91 | 'default': {
92 | 'ENGINE': 'django.db.backends.sqlite3',
93 | 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
94 | }
95 | }
96 |
97 |
98 | # Password validation
99 | # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
100 |
101 | AUTH_PASSWORD_VALIDATORS = [
102 | {
103 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
104 | },
105 | {
106 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
107 | },
108 | {
109 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
110 | },
111 | {
112 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
113 | },
114 | ]
115 |
116 |
117 | # Internationalization
118 | # https://docs.djangoproject.com/en/1.9/topics/i18n/
119 |
120 | LANGUAGE_CODE = 'zh-Hans'
121 |
122 | TIME_ZONE = 'UTC'
123 |
124 | USE_I18N = True
125 |
126 | USE_L10N = True
127 |
128 | USE_TZ = True
129 |
130 |
131 | # Static files (CSS, JavaScript, Images)
132 | # https://docs.djangoproject.com/en/1.9/howto/static-files/
133 |
134 | STATIC_URL = '/static/'
135 | STATICFILES_DIRS = (
136 | os.path.join(BASE_DIR, "static"),
137 | )
138 |
139 | # Sending Email
140 | EMAIL_HOST = 'smtp.163.com'
141 | EMAIL_HOST_USER = 'username'
142 | EMAIL_HOST_PASSWORD = 'password'
143 | EMAIL_POST = 25
144 | EMAIL_USE_TLS = True
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/settings.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/mysite/settings.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/urls.py:
--------------------------------------------------------------------------------
1 | """mysite URL Configuration
2 |
3 | The `urlpatterns` list routes URLs to views. For more information please see:
4 | https://docs.djangoproject.com/en/1.9/topics/http/urls/
5 | Examples:
6 | Function views
7 | 1. Add an import: from my_app import views
8 | 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
9 | Class-based views
10 | 1. Add an import: from other_app.views import Home
11 | 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
12 | Including another URLconf
13 | 1. Import the include() function: from django.conf.urls import url, include
14 | 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
15 | """
16 | from django.conf.urls import url, include
17 | from django.contrib import admin
18 | from django.contrib.sitemaps.views import sitemap
19 | from blog.sitemaps import PostSitemap
20 | # from blog.feeds import LatestPostsFeed
21 |
22 | sitemaps = { 'posts': PostSitemap}
23 |
24 | urlpatterns = [
25 | url(r'^admin/', include(admin.site.urls)),
26 | url(r'^$', 'blog.views.post_list', name='post_list'),
27 | url(r'^blog/', include('blog.urls', namespace='blog', app_name='blog')),
28 | url(r'sitemap\.xml$', sitemap, {'sitemaps': sitemaps}, name='django.contrib.sitemaps.views.sitemap'),
29 | # url(r'^feed/$', LatestPostsFeed(), name='post_feed'),
30 | url(r'^about/$', 'blog.views.about_me', name='about_me'),
31 | ]
32 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/urls.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/mysite/urls.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for mysite project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/mysite/wsgi.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/mysite/wsgi.pyc
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/static/bootstrap3.3.0/fonts/glyphicons-halflings-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/static/bootstrap3.3.0/fonts/glyphicons-halflings-regular.eot
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/static/bootstrap3.3.0/fonts/glyphicons-halflings-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/static/bootstrap3.3.0/fonts/glyphicons-halflings-regular.ttf
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/static/bootstrap3.3.0/fonts/glyphicons-halflings-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/static/bootstrap3.3.0/fonts/glyphicons-halflings-regular.woff
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/static/bootstrap3.3.0/js/npm.js:
--------------------------------------------------------------------------------
1 | // This file is autogenerated via the `commonjs` Grunt task. You can require() this file in a CommonJS environment.
2 | require('../../js/transition.js')
3 | require('../../js/alert.js')
4 | require('../../js/button.js')
5 | require('../../js/carousel.js')
6 | require('../../js/collapse.js')
7 | require('../../js/dropdown.js')
8 | require('../../js/modal.js')
9 | require('../../js/tooltip.js')
10 | require('../../js/popover.js')
11 | require('../../js/scrollspy.js')
12 | require('../../js/tab.js')
13 | require('../../js/affix.js')
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/static/css/blog.css:
--------------------------------------------------------------------------------
1 |
2 | .content {
3 | margin-left: 400px;
4 | margin-right: 400px;
5 | }
6 | h1, h2, h3, h4 {
7 | font-family: "Helvetica Neue", Helvetica, Arial, sans-serif
8 | }
9 | .date {
10 | color: #828282;
11 | }
12 | .save {
13 | float: right;
14 | }
15 | .post-form textarea, .post-form input {
16 | width: 100%;
17 | }
18 | .top-menu, .top-menu:hover, .top-menu:visited {
19 | color: #ffffff;
20 | float: right;
21 | font-size: 26pt;
22 | margin-right: 20px;
23 | }
24 | .post {
25 | margin-bottom: 70px;
26 | }
27 | .post h1 a, .post h1 a:visited {
28 | color: #000000;
29 | }
30 |
--------------------------------------------------------------------------------
/DjangoProject-Blog/mysite/static/images/头像.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-Blog/mysite/static/images/头像.jpg
--------------------------------------------------------------------------------
/DjangoProject-DSScan/BatchSqli/BatchSqli.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 |
5 | from cmdline import parse_args
6 | import json
7 | import requests
8 | import time
9 |
10 |
11 | server = 'http://127.0.0.1:8775'
12 | headers = {'Content-Type': 'application/json'}
13 | arg = parse_args()
14 |
15 |
16 | # 创建任务
17 | def task_new():
18 | resp = requests.get(server+'/task/new')
19 | return resp.json()
20 |
21 |
22 | # 设置选项
23 | def option_set(taskid, target):
24 | resp = requests.post(server+'/option/'+taskid+'/set', data=json.dumps({'url': target}), headers=headers)
25 | return resp.json()
26 |
27 |
28 | # 开始扫描
29 | def scan_start(taskid, target):
30 | resp = requests.post(server+'/scan/'+taskid+'/start', data=json.dumps({'url': target}), headers=headers)
31 | return resp.json()
32 |
33 |
34 | # 判断扫描是否结束
35 | def scan_status(taskid):
36 | resp = requests.get(server+'/scan/'+taskid+'/status')
37 | return resp.json()
38 |
39 |
40 | # 查看扫描结果
41 | def scan_data(taskid):
42 | resp = requests.get(server+'/scan/'+taskid+'/data')
43 | return resp.json()
44 |
45 |
46 | # 暂停任务
47 | def scan_stop(taskid):
48 | resp = requests.get(server+'/scan/'+taskid+'/stop')
49 | return resp.json()
50 |
51 |
52 | # 结束任务进程
53 | def scan_kill(taskid):
54 | resp = requests.get(server+'/scan'+taskid+'/kill')
55 | return resp.json()
56 |
57 |
58 | # 删除任务
59 | def task_delete(taskid):
60 | resp = requests.get(server+'/task/'+taskid+'/delete')
61 | return resp.json()
62 |
63 |
64 | def main(target):
65 | with open(target, 'r') as f:
66 | content = f.readlines()
67 | for each in content:
68 | # print each
69 | resp_json = task_new()
70 | # print resp_json
71 | task_id = resp_json['taskid']
72 | if resp_json['success']:
73 | print 'Set options...'
74 | option_json = option_set(task_id, each)
75 | if option_json['success']:
76 | print 'Options are setted, start scan...'
77 | start_json = scan_start(task_id, each)
78 | start_time = time.time()
79 | print 'Scanning...'
80 | if start_json['success']:
81 | while 1:
82 | status_json = scan_status(task_id)
83 | if status_json['status'] != 'terminated':
84 | time.sleep(10)
85 | else:
86 | print 'Scan is finished.'
87 | data_json = scan_data(task_id)
88 | if data_json['data'] == []:
89 | print 'There is no SQL Injection.'
90 | else:
91 | print 'Data is ...'
92 | print data_json['data']
93 | task_delete(task_id)
94 | print 'Delete task.'
95 | break
96 | if time.time() - start_time > 3000:
97 | print 'No response.'
98 | scan_stop(task_id)
99 | scan_kill(task_id)
100 | task_delete(task_id)
101 | break
102 | else:
103 | print 'Task Error.'
104 |
105 |
106 | if __name__ == '__main__':
107 | arg = parse_args()
108 | url_path = arg.u
109 | main(url_path)
110 | print "\n--------------------It's done.---------------------"
111 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/BatchSqli/BatchSqli_thread.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 |
5 | from cmdline import parse_args
6 | from threading import Thread
7 | from Queue import Queue
8 | import json
9 | import requests
10 | import time
11 |
12 |
13 |
14 |
15 | # 创建任务
16 | def task_new():
17 | resp = requests.get(server+'/task/new')
18 | return resp.json()
19 |
20 |
21 | # 设置选项
22 | def option_set(taskid, target):
23 | resp = requests.post(server+'/option/'+taskid+'/set', data=json.dumps({'url': target}), headers=headers)
24 | return resp.json()
25 |
26 |
27 | # 开始扫描
28 | def scan_start(taskid, target):
29 | resp = requests.post(server+'/scan/'+taskid+'/start', data=json.dumps({'url': target}), headers=headers)
30 | return resp.json()
31 |
32 |
33 | # 判断扫描是否结束
34 | def scan_status(taskid):
35 | resp = requests.get(server+'/scan/'+taskid+'/status')
36 | return resp.json()
37 |
38 |
39 | # 查看扫描结果
40 | def scan_data(taskid):
41 | resp = requests.get(server+'/scan/'+taskid+'/data')
42 | return resp.json()
43 |
44 |
45 | # 暂停任务
46 | def scan_stop(taskid):
47 | resp = requests.get(server+'/scan/'+taskid+'/stop')
48 | return resp.json()
49 |
50 |
51 | # 结束任务进程
52 | def scan_kill(taskid):
53 | resp = requests.get(server+'/scan/'+taskid+'/kill')
54 | return resp.json()
55 |
56 |
57 | # 删除任务
58 | def task_delete(taskid):
59 | resp = requests.get(server+'/task/'+taskid+'/delete')
60 | return resp.json()
61 |
62 |
63 | # 创建一个队列用于存放待检测url
64 | def scan_queue(target):
65 | with open(target, 'r') as f:
66 | content = f.readlines()
67 | for each in content:
68 | url_queue.put(each)
69 |
70 |
71 | # 创建多线程
72 | class ScanThread(Thread):
73 |
74 | def __init__(self):
75 | Thread.__init__(self)
76 |
77 | def run(self):
78 |
79 | while True:
80 | if url_queue.empty(): break
81 | url_now = url_queue.get()
82 | print url_now
83 | resp_json = task_new()
84 | task_id = resp_json['taskid']
85 | if resp_json['success']:
86 | print 'Set options...'
87 | option_json = option_set(task_id, url_now)
88 | if option_json['success']:
89 | print 'Options are setted, start scan...'
90 | start_json = scan_start(task_id, url_now)
91 | # print start_json
92 | start_time = time.time()
93 | # print start_time
94 | print 'Scanning...'
95 | if start_json['success']:
96 | while True:
97 | status_json = scan_status(task_id)
98 | # print status_json
99 | if status_json['status'] != 'terminated':
100 | time.sleep(10)
101 | else:
102 | # print status_json
103 | print 'Scan is finished.'
104 | # print task_id
105 | data_json = scan_data(task_id)
106 | # print data_json
107 | if data_json['data'] == []:
108 | print 'There is no SQL Injection.'
109 | else:
110 | print 'Data is ...'
111 | print data_json['data']
112 | sql_list.append(url_now)
113 | task_delete(task_id)
114 | print 'Delete task.'
115 | break
116 | # print time.time()
117 | if time.time() - start_time > 3000:
118 | print 'No response.'
119 | scan_stop(task_id)
120 | scan_kill(task_id)
121 | task_delete(task_id)
122 | break
123 | else:
124 | print 'Task Error.'
125 | url_queue.task_done()
126 | # print url_queue.empty()
127 |
128 |
129 |
130 | def main(num_t):
131 |
132 | threads = []
133 | for x in xrange(num_t):
134 | threads.append(ScanThread())
135 | threads[x].start()
136 |
137 | for y in threads:
138 | y.join()
139 |
140 |
141 | if __name__ == '__main__':
142 |
143 | server = 'http://127.0.0.1:8775'
144 | headers = {'Content-Type': 'application/json'}
145 | sql_list = []
146 | arg = parse_args()
147 | url_path = arg.u
148 | num_thread = arg.t
149 | url_queue = Queue()
150 | scan_queue(url_path)
151 | main(num_thread)
152 | print "\n------------------SQL Injections-------------------"
153 | print sql_list
154 | print "\n--------------------It's done.---------------------"
155 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/BatchSqli/cmdline.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import argparse
5 | import sys
6 |
7 |
8 | def parse_args():
9 |
10 | # 创建一个命令行参数对象
11 | parser = argparse.ArgumentParser(prog='BatchSqli', usage="BatchSqli.py [options]",
12 | description="* Batch Sql injection Scan *",
13 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
14 |
15 | parser.add_argument('-t', metavar='ThreadNum', type=int, default=10, help='Num of threads')
16 | parser.add_argument('-u', metavar='UrlPath', type=str, default=r'./url.txt',
17 | help="The url list for scanning")
18 |
19 | # 如果什么都没输入,就输入了一个脚本名,那么就是sys.argv只有一个参数
20 | if len(sys.argv) == 1:
21 | sys.argv.append('-h')
22 |
23 | # 返回一个保存命令行参数的命名空间
24 | args = parser.parse_args()
25 | return args
26 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/BatchSqli/url.txt:
--------------------------------------------------------------------------------
1 | http://127.0.0.1:88/sqli-labs/Less-1/?id=1
2 |
3 |
4 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/.idea/.name:
--------------------------------------------------------------------------------
1 | DSScan
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/.idea/DSScan.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/DSScan/__init__.py
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/DSScan/__init__.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Django settings for DSScan project.
3 |
4 | Generated by 'django-admin startproject' using Django 1.9.6.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/1.9/topics/settings/
8 |
9 | For the full list of settings and their values, see
10 | https://docs.djangoproject.com/en/1.9/ref/settings/
11 | """
12 |
13 | import os
14 |
15 | # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
16 | BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
17 |
18 | STATICFILES_DIRS = (os.path.join(BASE_DIR, "static"), )
19 |
20 | # Quick-start development settings - unsuitable for production
21 | # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
22 |
23 | # SECURITY WARNING: keep the secret key used in production secret!
24 | SECRET_KEY = 'cb8wt1e3qk^!4cq2m@fp_of1&&p(=9+$(%*jt6ps%679slvo8n'
25 |
26 | # SECURITY WARNING: don't run with debug turned on in production!
27 | DEBUG = True
28 |
29 | ALLOWED_HOSTS = []
30 |
31 |
32 | # Application definition
33 |
34 | INSTALLED_APPS = [
35 | 'django.contrib.admin',
36 | 'django.contrib.auth',
37 | 'django.contrib.contenttypes',
38 | 'django.contrib.sessions',
39 | 'django.contrib.messages',
40 | 'django.contrib.staticfiles',
41 | 'sqliscan',
42 | ]
43 |
44 | MIDDLEWARE_CLASSES = [
45 | 'django.middleware.security.SecurityMiddleware',
46 | 'django.contrib.sessions.middleware.SessionMiddleware',
47 | 'django.middleware.common.CommonMiddleware',
48 | 'django.middleware.csrf.CsrfViewMiddleware',
49 | 'django.contrib.auth.middleware.AuthenticationMiddleware',
50 | 'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
51 | 'django.contrib.messages.middleware.MessageMiddleware',
52 | 'django.middleware.clickjacking.XFrameOptionsMiddleware',
53 | ]
54 |
55 | ROOT_URLCONF = 'DSScan.urls'
56 |
57 | TEMPLATES = [
58 | {
59 | 'BACKEND': 'django.template.backends.django.DjangoTemplates',
60 | 'DIRS': [os.path.join(BASE_DIR, 'templates')]
61 | ,
62 | 'APP_DIRS': True,
63 | 'OPTIONS': {
64 | 'context_processors': [
65 | 'django.template.context_processors.debug',
66 | 'django.template.context_processors.request',
67 | 'django.contrib.auth.context_processors.auth',
68 | 'django.contrib.messages.context_processors.messages',
69 | ],
70 | },
71 | },
72 | ]
73 |
74 | WSGI_APPLICATION = 'DSScan.wsgi.application'
75 |
76 |
77 | # Database
78 | # https://docs.djangoproject.com/en/1.9/ref/settings/#databases
79 |
80 | DATABASES = {
81 | 'default': {
82 | 'ENGINE': 'django.db.backends.sqlite3',
83 | 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
84 | }
85 | }
86 |
87 |
88 | # Password validation
89 | # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
90 |
91 | AUTH_PASSWORD_VALIDATORS = [
92 | {
93 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
94 | },
95 | {
96 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
97 | },
98 | {
99 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
100 | },
101 | {
102 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
103 | },
104 | ]
105 |
106 |
107 | # Internationalization
108 | # https://docs.djangoproject.com/en/1.9/topics/i18n/
109 |
110 | LANGUAGE_CODE = 'zh-Hans'
111 |
112 | TIME_ZONE = 'UTC'
113 |
114 | USE_I18N = True
115 |
116 | USE_L10N = True
117 |
118 | USE_TZ = True
119 |
120 |
121 | # Static files (CSS, JavaScript, Images)
122 | # https://docs.djangoproject.com/en/1.9/howto/static-files/
123 |
124 | STATIC_URL = '/static/'
125 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/settings.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/DSScan/settings.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/urls.py:
--------------------------------------------------------------------------------
1 | """DSScan URL Configuration
2 |
3 | The `urlpatterns` list routes URLs to views. For more information please see:
4 | https://docs.djangoproject.com/en/1.9/topics/http/urls/
5 | Examples:
6 | Function views
7 | 1. Add an import: from my_app import views
8 | 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
9 | Class-based views
10 | 1. Add an import: from other_app.views import Home
11 | 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
12 | Including another URLconf
13 | 1. Import the include() function: from django.conf.urls import url, include
14 | 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
15 | """
16 | from django.conf.urls import url
17 | from django.contrib import admin
18 | from sqliscan import views
19 |
20 | urlpatterns = [
21 | url(r'^admin/', admin.site.urls),
22 | url(r'^task/$', views.sql_tasks, name='task'),
23 | url(r'^$', views.url_sql, name='home'),
24 | url(r'^scan/$', views.sql_scan, name='scan'),
25 | url(r'^vuls/$', views.vul_tasks, name='vuls'),
26 | url(r'^search/', views.url_search, name='search'),
27 | url(r'^config/$', views.scan_config, name='config'),
28 |
29 | ]
30 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/urls.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/DSScan/urls.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for DSScan project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DSScan.settings")
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/DSScan/wsgi.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/DSScan/wsgi.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/db.sqlite3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/db.sqlite3
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DSScan.settings")
7 |
8 | from django.core.management import execute_from_command_line
9 |
10 | execute_from_command_line(sys.argv)
11 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/__init__.py
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/__init__.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/admin.py:
--------------------------------------------------------------------------------
1 | from django.contrib import admin
2 | from .models import SqlInjection, UrlList, ScanConfig
3 |
4 | class SqlInjectionAdmin(admin.ModelAdmin):
5 | list_display = ('target_url', 'task_id', 'scan_data', 'vulnerability','scan_log', )
6 | list_filter = ('scan_status', 'vulnerability', )
7 | search_fields = ('target_url', )
8 | ordering = ('-vulnerability', 'task_id', )
9 |
10 | class UrlListAdmin(admin.ModelAdmin):
11 | list_display = ('target_urls', )
12 |
13 | class ScanConfigAdmin(admin.ModelAdmin):
14 | list_display = ('thread_num', )
15 |
16 | admin.site.register(SqlInjection, SqlInjectionAdmin)
17 | admin.site.register(UrlList, UrlListAdmin)
18 | admin.site.register(ScanConfig, ScanConfigAdmin)
19 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/admin.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/admin.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/apps.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | from django.apps import AppConfig
4 |
5 |
6 | class SqliscanConfig(AppConfig):
7 | name = 'sqliscan'
8 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/forms.py:
--------------------------------------------------------------------------------
1 | from django import forms
2 | from .models import UrlList, ScanConfig
3 |
4 | class UrlListForm(forms.ModelForm):
5 |
6 | class Meta:
7 | model = UrlList
8 | fields = ('target_urls', )
9 |
10 |
11 | class SearchForm(forms.Form):
12 | query = forms.CharField()
13 |
14 |
15 | class ScanConfigForm(forms.ModelForm):
16 |
17 | class Meta:
18 | model = ScanConfig
19 | fields = ('thread_num', )
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/forms.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/forms.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2017-01-02 07:49
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | initial = True
11 |
12 | dependencies = [
13 | ]
14 |
15 | operations = [
16 | migrations.CreateModel(
17 | name='SqlInjection',
18 | fields=[
19 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
20 | ('task_id', models.CharField(db_index=True, max_length=1000)),
21 | ('target_url', models.URLField(max_length=1000, unique=True)),
22 | ('scan_status', models.CharField(max_length=1000)),
23 | ('scan_data', models.CharField(max_length=1000)),
24 | ('scan_log', models.CharField(max_length=1000)),
25 | ('vulnerable', models.BooleanField(db_index=True, default=False)),
26 | ],
27 | options={
28 | 'ordering': ('-vulnerable',),
29 | },
30 | ),
31 | ]
32 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0001_initial.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/0001_initial.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0002_auto_20170102_1624.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2017-01-02 08:24
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('sqliscan', '0001_initial'),
12 | ]
13 |
14 | operations = [
15 | migrations.AlterModelOptions(
16 | name='sqlinjection',
17 | options={'ordering': ('-vulnerability',)},
18 | ),
19 | migrations.RenameField(
20 | model_name='sqlinjection',
21 | old_name='vulnerable',
22 | new_name='vulnerability',
23 | ),
24 | ]
25 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0002_auto_20170102_1624.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/0002_auto_20170102_1624.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0003_auto_20170112_2111.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2017-01-12 13:11
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('sqliscan', '0002_auto_20170102_1624'),
12 | ]
13 |
14 | operations = [
15 | migrations.AddField(
16 | model_name='sqlinjection',
17 | name='target_urls',
18 | field=models.TextField(null=True),
19 | ),
20 | migrations.AlterField(
21 | model_name='sqlinjection',
22 | name='task_id',
23 | field=models.CharField(db_index=True, max_length=1000, verbose_name='\u4efb\u52a1id'),
24 | ),
25 | ]
26 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0003_auto_20170112_2111.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/0003_auto_20170112_2111.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0004_urllist.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2017-01-12 14:47
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('sqliscan', '0003_auto_20170112_2111'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='UrlList',
17 | fields=[
18 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
19 | ('target_urls', models.TextField(null=True)),
20 | ],
21 | ),
22 | ]
23 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0004_urllist.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/0004_urllist.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0005_remove_sqlinjection_target_urls.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2017-01-15 14:25
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('sqliscan', '0004_urllist'),
12 | ]
13 |
14 | operations = [
15 | migrations.RemoveField(
16 | model_name='sqlinjection',
17 | name='target_urls',
18 | ),
19 | ]
20 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0005_remove_sqlinjection_target_urls.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/0005_remove_sqlinjection_target_urls.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0006_scanconfig.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Generated by Django 1.9.6 on 2017-01-22 10:18
3 | from __future__ import unicode_literals
4 |
5 | from django.db import migrations, models
6 |
7 |
8 | class Migration(migrations.Migration):
9 |
10 | dependencies = [
11 | ('sqliscan', '0005_remove_sqlinjection_target_urls'),
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='ScanConfig',
17 | fields=[
18 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
19 | ('thread_num', models.IntegerField(default=2)),
20 | ],
21 | ),
22 | ]
23 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/0006_scanconfig.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/0006_scanconfig.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/__init__.py
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/migrations/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/migrations/__init__.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/models.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import unicode_literals
4 | from django.db import models
5 |
6 | # 每个任务的数据表
7 | class SqlInjection(models.Model):
8 | task_id = models.CharField('任务id', max_length=1000, db_index=True)
9 | target_url = models.URLField(max_length=1000, unique=True)
10 | scan_status = models.CharField(max_length=1000)
11 | scan_data = models.CharField(max_length=1000)
12 | scan_log = models.CharField(max_length=1000)
13 | vulnerability = models.BooleanField(default=False, db_index=True)
14 |
15 | class Meta:
16 | ordering = ('-vulnerability', )
17 |
18 | # 一次性导入的url的数据表
19 | class UrlList(models.Model):
20 | target_urls = models.TextField(null=True)
21 |
22 | # 各种配置的数据表
23 | class ScanConfig(models.Model):
24 | thread_num = models.IntegerField(default=2)
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/models.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/models.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/sqls.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import json
4 | import requests
5 | import time
6 | from threading import Thread
7 | from .models import SqlInjection
8 |
9 | server = 'http://127.0.0.1:8775'
10 | headers = {'Content-Type': 'application/json'}
11 |
12 |
13 | # 创建任务
14 | def task_new():
15 | resp = requests.get(server+'/task/new')
16 | return resp.json()
17 |
18 |
19 | # 设置选项
20 | def option_set(taskid, target):
21 | resp = requests.post(server+'/option/'+taskid+'/set', data=json.dumps({'url': target}), headers=headers)
22 | return resp.json()
23 |
24 |
25 | # 开始扫描
26 | def scan_start(taskid, target):
27 | resp = requests.post(server+'/scan/'+taskid+'/start', data=json.dumps({'url': target}), headers=headers)
28 | return resp.json()
29 |
30 |
31 | # 判断扫描是否结束
32 | def scan_status(taskid):
33 | resp = requests.get(server+'/scan/'+taskid+'/status')
34 | return resp.json()
35 |
36 |
37 | # 查看扫描结果
38 | def scan_data(taskid):
39 | resp = requests.get(server+'/scan/'+taskid+'/data')
40 | return resp.json()
41 |
42 |
43 | # 查看扫描日志
44 | def scan_log(taskid):
45 | resp = requests.get(server+'/scan/'+taskid+'/log')
46 | return resp.json()
47 |
48 |
49 | # 暂停任务
50 | def scan_stop(taskid):
51 | resp = requests.get(server+'/scan/'+taskid+'/stop')
52 | return resp.json()
53 |
54 |
55 | # 结束任务进程
56 | def scan_kill(taskid):
57 | resp = requests.get(server+'/scan/'+taskid+'/kill')
58 | return resp.json()
59 |
60 |
61 | # 删除任务
62 | def task_delete(taskid):
63 | resp = requests.get(server+'/task/'+taskid+'/delete')
64 | return resp.json()
65 |
66 |
67 | # 创建多线程
68 | class ScanThread(Thread):
69 |
70 | def __init__(self, url_queue):
71 | Thread.__init__(self)
72 | self.url_queue = url_queue
73 |
74 | def run(self):
75 | while True:
76 | if self.url_queue.empty(): break
77 | url_now = self.url_queue.get()
78 | print url_now
79 | task = SqlInjection.objects.get(target_url=url_now)
80 | resp_json = task_new()
81 | task_id = resp_json['taskid']
82 | task.task_id = task_id
83 | if resp_json['success']:
84 | print 'Set options...'
85 | option_json = option_set(task_id, url_now)
86 | if option_json['success']:
87 | print 'Options are setted, start scan...'
88 | start_json = scan_start(task_id, url_now)
89 | # print start_json
90 | start_time = time.time()
91 | # print start_time
92 | print 'Scanning...'
93 | if start_json['success']:
94 | while True:
95 | status_json = scan_status(task_id)
96 | # print status_json
97 | task.scan_status = status_json['status']
98 | if status_json['status'] != 'terminated':
99 | time.sleep(10)
100 | else:
101 | # print status_json
102 | print 'Scan is finished.'
103 | # print task_id
104 | data_json = scan_data(task_id)
105 | # print data_json
106 | if data_json['data'] == []:
107 | print 'There is no SQL Injection.'
108 | task.vulnerability = False
109 | else:
110 | print 'Data is ...'
111 | print data_json['data']
112 | task.vulnerability = True
113 | task.scan_data = data_json['data']
114 | task.scan_log = scan_log(task_id)['log']
115 | task_delete(task_id)
116 | print 'Delete task.'
117 | break
118 | # print time.time()
119 | if time.time() - start_time > 3000:
120 | print 'No response.'
121 | scan_stop(task_id)
122 | scan_kill(task_id)
123 | task_delete(task_id)
124 | break
125 | else:
126 | print 'Task Error.'
127 | self.url_queue.task_done()
128 | task.save()
129 | # print url_queue.empty()
130 |
131 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/sqls.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/sqls.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/templates/sqliscan/base.html:
--------------------------------------------------------------------------------
1 | {% load staticfiles %}
2 |
3 |
4 |
5 | {% block title %}{% endblock %}
6 |
7 |
8 |
9 |
86 |
87 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
149 |
150 |
151 |
155 |
156 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 | -
171 |
172 |
173 | 首页
174 |
175 |
176 | -
177 |
182 |
186 |
187 |
188 | -
189 |
194 |
206 |
207 |
208 |
209 |
210 |
211 | {% block content %}
212 |
213 | {% endblock %}
214 |
215 |
216 |
217 |
218 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/templates/sqliscan/config.html:
--------------------------------------------------------------------------------
1 | {% extends "sqliscan/base.html" %}
2 |
3 | {% block title %}DSScan{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 | CONFIG 扫描配置
10 | {{ num_thread }}
11 |
12 |
13 |
14 |
15 | {% if submit %}
16 |
17 |
18 | Success! Thread Num :{{ num_thread }}
19 |
20 |
21 |
22 |
23 |
26 |
27 |
28 |
29 |
32 |
33 |
34 | {% else %}
35 |
50 |
51 | {% endif %}
52 |
53 |
54 |
55 |
56 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/templates/sqliscan/open.html:
--------------------------------------------------------------------------------
1 | {% extends "sqliscan/base.html" %}
2 |
3 | {% block title %}DSScan{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 | URL 导入扫描
10 |
11 |
12 |
13 |
14 | {% if submit %}
15 |
16 |
17 | Success! Successfully submit.
18 |
19 |
20 |
21 |
22 |
25 |
26 |
27 |
28 |
31 |
32 |
33 | {% else %}
34 |
49 |
50 | {% endif %}
51 |
52 |
53 |
54 |
55 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/templates/sqliscan/scan.html:
--------------------------------------------------------------------------------
1 | {% extends "sqliscan/base.html" %}
2 |
3 | {% block title %}DSScan{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 | SCAN 开启扫描
10 |
11 |
12 |
13 |
14 | {% if submit %}
15 |
16 |
17 | Success! Scanning ... Please Wait ...
18 |
19 |
20 |
21 |
22 |
25 |
26 |
27 |
28 |
31 |
32 |
33 | {% else %}
34 |
75 | {% endif %}
76 |
77 |
78 |
79 | {% endblock %}
80 |
81 |
82 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/templates/sqliscan/search.html:
--------------------------------------------------------------------------------
1 | {% extends "sqliscan/base.html" %}
2 |
3 | {% block title %}DSScan{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 | TASK 搜索列表
10 | {{ num_result }}
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 | URL |
25 | TASKID |
26 | STATUS |
27 | DATA |
28 | LOG |
29 |
30 |
31 | {% for result in search_result %}
32 | {% if result.vulnerability %}
33 |
34 | {% else %}
35 |
36 | {% endif %}
37 | {{ result.target_url }} |
38 | {{ result.task_id }} |
39 | {{ result.scan_status }} |
40 | {{ result.scan_data }} |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
55 |
56 | {{ result.scan_log }}
57 |
58 |
61 |
62 |
63 |
64 | |
65 |
66 | {% endfor %}
67 |
68 |
69 |
70 |
71 |
72 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/templates/sqliscan/task.html:
--------------------------------------------------------------------------------
1 | {% extends "sqliscan/base.html" %}
2 |
3 | {% block title %}DSScan{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 | TASK 任务列表
10 | {{ num_url }}
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 | URL |
25 | TASKID |
26 | STATUS |
27 | DATA |
28 | LOG |
29 |
30 |
31 | {% for task in tasks %}
32 | {% if task.vulnerability %}
33 |
34 | {% else %}
35 |
36 | {% endif %}
37 | {{ task.target_url }} |
38 | {{ task.task_id }} |
39 | {{ task.scan_status }} |
40 | {{ task.scan_data }} |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
55 |
56 | {{ task.scan_log }}
57 |
58 |
61 |
62 |
63 |
64 | |
65 |
66 | {% endfor %}
67 |
68 |
69 |
70 |
71 |
72 | {% endblock %}
73 |
74 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/templates/sqliscan/vuls.html:
--------------------------------------------------------------------------------
1 | {% extends "sqliscan/base.html" %}
2 |
3 | {% block title %}DSScan{% endblock %}
4 |
5 | {% block content %}
6 |
7 |
8 |
9 | TASK 漏洞列表
10 | {{ num_vul }}
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 | URL |
25 | TASKID |
26 | STATUS |
27 | DATA |
28 | LOG |
29 |
30 |
31 | {% for vul in vuls %}
32 |
33 | {{ vul.target_url }} |
34 | {{ vul.task_id }} |
35 | {{ vul.scan_status }} |
36 | {{ vul.scan_data }} |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
51 |
52 | {{ vul.scan_log }}
53 |
54 |
57 |
58 |
59 |
60 | |
61 |
62 | {% endfor %}
63 |
64 |
65 |
66 |
67 |
68 | {% endblock %}
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/tests.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 |
3 | # Create your tests here.
4 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/views.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from django.shortcuts import render
4 | from .models import SqlInjection, UrlList, ScanConfig
5 | from .forms import UrlListForm, SearchForm, ScanConfigForm
6 | from .sqls import *
7 | from Queue import Queue
8 |
9 |
10 |
11 | # 扫描配置选项
12 | def scan_config(request):
13 |
14 | submit = False
15 | thread_config = ScanConfig.objects.all()
16 | # 读取数据库中保存的线程数
17 | default_thread = 2
18 | for thread in thread_config:
19 | default_thread = thread.thread_num
20 |
21 | if request.method == 'POST':
22 | form = ScanConfigForm(request.POST)
23 | if form.is_valid():
24 | # 删除之前的配置
25 | for thread in thread_config:
26 | thread.delete()
27 | num_thread = form.cleaned_data['thread_num']
28 | form.save()
29 | submit = True
30 |
31 | else:
32 | num_thread = default_thread
33 |
34 | else:
35 | form = ScanConfigForm()
36 | num_thread = default_thread
37 |
38 | return render(request, 'sqliscan/config.html', {'form': form, 'submit': submit, 'num_thread': num_thread})
39 |
40 | # 关键字Url快速搜索
41 | def url_search(request):
42 |
43 | form = SearchForm()
44 | if 'query' in request.GET:
45 | form = SearchForm(request.GET)
46 | # print form
47 | if form.is_valid():
48 | # keyword 是字典,一对数据
49 | keyword = form.cleaned_data
50 | print keyword
51 | # 只做Url的对比搜索
52 | search_result = SqlInjection.objects.filter(target_url__contains=keyword['query'])
53 | num_result = len(search_result)
54 | else:
55 | print 'form is not valid'
56 | search_result = {}
57 | num_result = 0
58 | else:
59 | print 'not GET no query'
60 | search_result = {}
61 | num_result = 0
62 |
63 | return render(request, 'sqliscan/search.html', {'form': form, 'search_result': search_result,
64 | 'num_result': num_result})
65 |
66 |
67 | # 显示所有有漏洞的任务
68 | def vul_tasks(request):
69 | # 必定是扫描过的任务
70 | vuls = SqlInjection.objects.filter(vulnerability=True)
71 | # print vuls
72 | num_vul = len(vuls)
73 | return render(request, 'sqliscan/vuls.html', {'vuls': vuls, 'num_vul': num_vul})
74 |
75 |
76 | # 显示所有扫描任务
77 | def sql_tasks(request):
78 | # UrlList 所有对象
79 | url_lists = UrlList.objects.all()
80 | # 每个UrlList对象
81 | for each_list in url_lists:
82 | # print each_list.target_urls
83 | # 每个UrlList对象中的所有urls
84 | urls = each_list.target_urls
85 | # 进行字符串分割,生成一个列表,前端输入数据是以换行回车进行分隔
86 | url_list = urls.split('\r\n')
87 | # print url_list
88 | for each_url in url_list:
89 | # SqlInjection.objects.values() 返回的是字典为元素的一列数据的列表,所以dic_url是字典
90 | dic_url = {'target_url': each_url}
91 | # 进行对比,确保不会有重复Url导入,如果SqlInjection数据表中没有,则增加至其中
92 | if dic_url not in SqlInjection.objects.values("target_url"):
93 | # print SqlInjection.objects.values("target_url")
94 | SqlInjection.objects.create(target_url=each_url)
95 | # url_list 被导入至 SqlInjection 数据表中,即 each_list 这个对象全被导入,删除 UrlList 中的 each_list 对象
96 | each_list.delete()
97 |
98 | tasks = SqlInjection.objects.all()
99 |
100 | num_url = len(tasks)
101 |
102 | return render(request, 'sqliscan/task.html', {'tasks': tasks, 'num_url': num_url})
103 |
104 |
105 | # 导入扫描URL列表
106 | def url_sql(request):
107 | # 成功提交会跳转成功的提示
108 | submit = False
109 |
110 | if request.method == 'POST':
111 | form = UrlListForm(request.POST)
112 | if form.is_valid():
113 | # single_urls = form.cleaned_data['target_urls']
114 | # print single_urls
115 | # single_url = single_urls.split('\r\n')
116 | # print single_url
117 | # url_form = form.save(commit=False)
118 | # url_form.save()
119 | form.save()
120 | submit = True
121 |
122 | else:
123 | form = UrlListForm()
124 |
125 | return render(request, 'sqliscan/open.html', {'form': form, 'submit': submit})
126 |
127 |
128 | # 启动扫描Scan
129 | def sql_scan(request):
130 |
131 | submit = False
132 |
133 | # UrlList 所有对象
134 | url_lists = UrlList.objects.all()
135 | # 每个UrlList对象
136 | for each_list in url_lists:
137 | # print each_list.target_urls
138 | # 每个UrlList对象中的所有urls
139 | urls = each_list.target_urls
140 | # 进行字符串分割,生成一个列表,前端输入数据是以换行回车进行分隔
141 | url_list = urls.split('\r\n')
142 | # print url_list
143 | for each_url in url_list:
144 | # SqlInjection.objects.values() 返回的是字典为元素的一列数据的列表,所以dic_url是字典
145 | dic_url = {'target_url': each_url}
146 | # 进行对比,确保不会有重复Url导入,如果SqlInjection数据表中没有,则增加至其中
147 | if dic_url not in SqlInjection.objects.values("target_url"):
148 | # print SqlInjection.objects.values("target_url")
149 | SqlInjection.objects.create(target_url=each_url)
150 | # url_list 被导入至 SqlInjection 数据表中,即 each_list 这个对象全被导入,删除 UrlList 中的 each_list 对象
151 | each_list.delete()
152 |
153 | tasks = SqlInjection.objects.all()
154 | # 创建一个队列用于存放 target_url
155 | url_queue = Queue()
156 |
157 | # 获取复选框选中的数据,一组列表
158 | check_list = request.POST.getlist('checkbox')
159 | # print check_list
160 | btn_val = request.POST.get('btn')
161 | # print btn_val
162 | # 如果复选框选中,并且点击删除
163 | if check_list and btn_val == 'btnDelete':
164 | for url_target in check_list:
165 | # print url_target
166 | SqlInjection.objects.filter(target_url=url_target).delete()
167 | # print "Deleted."
168 |
169 | if check_list and btn_val == 'btnScan':
170 | submit = True
171 | for url_target in check_list:
172 | url_queue.put(url_target)
173 |
174 | # print url_queue.queue
175 | # 创建一个列表,用于保存线程
176 | threads = []
177 | # 默认线程数为2
178 | num_thread = 2
179 | thread_config = ScanConfig.objects.all()
180 | for each_config in thread_config:
181 | num_thread = each_config.thread_num
182 | # print num_thread
183 | # 测试4个线程
184 | for x in xrange(num_thread):
185 | threads.append(ScanThread(url_queue))
186 | threads[x].start()
187 |
188 | for y in threads:
189 | y.join()
190 |
191 | return render(request, 'sqliscan/scan.html', {'tasks': tasks, 'submit': submit})
192 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/sqliscan/views.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/sqliscan/views.pyc
--------------------------------------------------------------------------------
/DjangoProject-DSScan/DSScan/static/images/头像.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/DSScan/static/images/头像.jpg
--------------------------------------------------------------------------------
/DjangoProject-DSScan/README.md:
--------------------------------------------------------------------------------
1 | # DjangoProject-DSScan
2 |
3 | ### Environment
4 | ```
5 | 1. Python 2.7.11
6 | 2. Django 1.9.6
7 | ```
8 |
9 | ### Instruction
10 | ```
11 | 1. 利用Django结合SqlmapApi达到可视化批量检测
12 | 2. 前端采用了Bootstrap进行美化界面
13 | 3. 利用多线程方式进行扫描,前端可自定义线程数
14 | 4. 前端可以自定义扫描任务,以及删除任务
15 | ```
16 |
17 | ### Example
18 | 
19 | 
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/DjangoProject-DSScan/readme_pic/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/readme_pic/1.png
--------------------------------------------------------------------------------
/DjangoProject-DSScan/readme_pic/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/DjangoProject-DSScan/readme_pic/2.png
--------------------------------------------------------------------------------
/PythonSpider-BeeBeeto/BeeBeetoSpider.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: Spider for BeeBeeto
6 | Author: PyxYuYu
7 | """
8 |
9 | import urllib2
10 | from bs4 import BeautifulSoup
11 | import re
12 | import os
13 | import sys
14 | import argparse
15 |
16 | # 保存POC到txt文件
17 | def Poc_Save(save_path, save_name, poc):
18 | # 创建保存路径
19 | if not os.path.exists(save_path):
20 | os.makedirs(save_path)
21 | path = save_path + '/' + save_name
22 | with open(path, 'a+') as f:
23 | f.write(poc)
24 | f.write('\n')
25 |
26 | # 获取URL源码
27 | def Url_Soup(url):
28 | # 网站禁止爬虫,需要伪装浏览器
29 | user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0'
30 | request = urllib2.Request(url)
31 | request.add_header('User-Agent', user_agent)
32 | response = urllib2.urlopen(request)
33 | soup = BeautifulSoup(response.read(), 'html.parser')
34 | return soup
35 |
36 | # 获取POC数据
37 | def Poc_Get():
38 |
39 | global poc_list
40 | global sign
41 | x = 1
42 | url_base = 'http://www.beebeeto.com/pdb/public/?page='
43 | url_first = url_base + str(x)
44 | poc_path = './BeeBeeto'
45 | full_poc = Url_Soup(url_first).find_all(href=re.compile(r'poc'))
46 | for each_public in full_poc:
47 | poc_list.append(each_public.string)
48 | print each_public.string + ' is done.'
49 | url_vul = url_index + each_public.attrs['href']
50 | poc_detail = Url_Soup(url_vul).find_all('pre')
51 | for each_detail in poc_detail:
52 | poc_name = each_public.string.replace('/', '') + '.txt'
53 | Poc_Save(poc_path, poc_name, each_detail.string.encode('utf-8'))
54 | print 'This page is done.The next page is starting.'
55 | x = 2
56 | url = url_base + str(x)
57 | while x <= 100:
58 | if (sign!=0):
59 | Poc_Get1(url)
60 | x = x + 1
61 | url = url_base + str(x)
62 | else:
63 | break
64 |
65 | def Poc_Get1(url):
66 |
67 | global poc_list
68 | global sign
69 | poc_path = './BeeBeeto'
70 | full_poc = Url_Soup(url).find_all(href=re.compile(r'poc'))
71 | for each_public in full_poc:
72 | if each_public.string in poc_list:
73 | print "It's over."
74 | sign = 0
75 | return 0
76 | else:
77 | poc_list.append(each_public.string)
78 | print each_public.string + ' is done.'
79 | url_vul = url_index + each_public.attrs['href']
80 | poc_detail = Url_Soup(url_vul).find_all('pre')
81 | for each_detail in poc_detail:
82 | # 针对文件名中不能出现的几个符号正则替换成空
83 | a = re.compile('[/\?\\<>:\*]')
84 | poc_name = a.sub('', each_public.string) + '.txt'
85 | Poc_Save(poc_path, poc_name, each_detail.string.encode('utf-8'))
86 | print 'This page is done.The next page is starting.'
87 |
88 |
89 | if __name__ == '__main__':
90 | sign = 1
91 | poc_list = []
92 | print '----start----'
93 | url_index = 'http://www.beebeeto.com'
94 | Poc_Get()
95 | print '----end------'
96 |
97 |
--------------------------------------------------------------------------------
/PythonSpider-BeeBeeto/BeeBeetoSpider02.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: Spider for BeeBeeto
6 | Author: PyxYuYu
7 | """
8 |
9 | import urllib2
10 | from bs4 import BeautifulSoup
11 | from threading import Thread
12 | from threading import Lock
13 | from Queue import Queue
14 | import re
15 | import os
16 | import time
17 |
18 | url_base = 'http://www.beebeeto.com/pdb/public/?page='
19 | # 创建一个队列保存poc_url
20 | # queue效率比list高
21 | url_queue = Queue()
22 | # 创建一个队别保存poc_name
23 | name_queue = Queue()
24 | # 多个线程保存同一页的poc_name,需要锁定保存的位置,否则会保存同一个name
25 | name_lock = Lock()
26 | # 用来保证多个线程之间分别保存不同的页面
27 | x = 1
28 |
29 | # 获取POC和POCURL的一个线程类
30 | class PocGet(Thread):
31 | def __init__(self):
32 | Thread.__init__(self)
33 |
34 | def run(self):
35 | # 全局变量,在函数内声明来确定全局
36 | # 用来分隔每个线程处理不同的页面
37 | global x
38 | name_lock.acquire()
39 | get_poc(x)
40 | name_lock.release()
41 | x = x + 1
42 |
43 |
44 | # 保存POC到txt文件
45 | def poc_save(save_path, save_name, poc):
46 | # 创建保存路径
47 | if not os.path.exists(save_path):
48 | os.makedirs(save_path)
49 | path = save_path + '/' + save_name
50 | with open(path, 'a+') as f:
51 | f.write(poc)
52 | f.write('\n')
53 |
54 |
55 | # 利用POCURL队列获取具体POC内容一个线程类
56 | # 这里一个线程只能获取一个POC,如果一个线程需要获取多个POC
57 | # 就需要一个while循环
58 | class SavePoc(Thread):
59 |
60 | def __init__(self):
61 | Thread.__init__(self)
62 |
63 | def run(self):
64 | poc_path = './BeeBeeto'
65 | # 方法一 : 需要开启191条线程
66 | # url_poc = url_queue.get()
67 | # each_name = name_queue.get()
68 | # poc_detail = Url_Soup(url_poc).find_all('pre')
69 | # for each_detail in poc_detail:
70 | # # 针对文件名中不能出现的几个符号正则替换成空
71 | # r = re.compile('[/\?\\<>:\*]')
72 | # poc_name = r.sub('', each_name.string) + '.txt'
73 | # poc_save(poc_path, poc_name, each_detail.string.encode('utf-8'))
74 | # url_queue.task_done()
75 | # name_queue.task_done()
76 | # 方法二 : 使用while循环就可以自己设定线程数
77 | while True:
78 | # 开多少条线程,name_queue.qsize就从哪开始
79 | # 比如开100条,那么size直接从91开始,因为100条先内带了100个name
80 | if name_queue.qsize() > 0:
81 | poc_name = ''
82 | url_poc = url_queue.get()
83 | each_name = name_queue.get()
84 | poc_detail = url_soup(url_poc).find_all('pre')
85 | for each_detail in poc_detail:
86 | # 针对文件名中不能出现的几个符号正则替换成空
87 | r = re.compile('[/\?\\<>:\*]')
88 | poc_name = r.sub('', each_name.string) + '.txt'
89 | poc_save(poc_path, poc_name, each_detail.string.encode('utf-8'))
90 | print 'saving ' + poc_name
91 | url_queue.task_done()
92 | name_queue.task_done()
93 | print name_queue.qsize()
94 | else:
95 | break
96 |
97 |
98 | def get_poc(page):
99 | url_first = url_base + str(page)
100 | full_poc = url_soup(url_first).find_all(href=re.compile(r'poc'))
101 | for each_public in full_poc:
102 | print each_public.string + ' is done.'
103 | url_vul = url_index + each_public.attrs['href']
104 | print url_vul
105 | name_queue.put(each_public)
106 | url_queue.put(url_vul)
107 |
108 |
109 | def main():
110 | threads = []
111 | threads_poc = []
112 | # 一共有13页,所以开启13条线程获取
113 | for y in range(13):
114 | threads.append(PocGet())
115 | threads[y].setDaemon(True)
116 | threads[y].start()
117 | for i in range(13):
118 | threads[i].join()
119 | # 一共有191个POC,自定义线程数
120 | threads_num = int(raw_input('Please input the number of threads: '))
121 | for a in range(threads_num):
122 | threads_poc.append(SavePoc())
123 | threads_poc[a].setDaemon(True)
124 | threads_poc[a].start()
125 | for b in range(threads_num):
126 | threads_poc[b].join()
127 |
128 |
129 | # 获取URL源码
130 | def url_soup(url):
131 | # 网站禁止爬虫,需要伪装浏览器
132 | user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0'
133 | request = urllib2.Request(url)
134 | request.add_header('User-Agent', user_agent)
135 | response = urllib2.urlopen(request)
136 | soup = BeautifulSoup(response.read(), 'html.parser')
137 | return soup
138 |
139 | if __name__ == '__main__':
140 | st = time.time()
141 | url_index = 'http://www.beebeeto.com'
142 | print '----------begin----------'
143 | main()
144 | print '----------end------------'
145 | print (time.time() - st)
146 |
--------------------------------------------------------------------------------
/PythonSpider-BeeBeeto/BeeBeetoSpider03.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: Spider for BeeBeeto
6 | Author: PyxYuYu
7 | """
8 |
9 | import urllib2
10 | from bs4 import BeautifulSoup
11 | from threading import Thread
12 | from threading import Lock
13 | from Queue import Queue
14 | import re
15 | import os
16 | import time
17 |
18 | from argparse import ArgumentParser
19 |
20 | # 创建解析对象
21 | # description: 参数帮助信息之前的描述程序
22 | # add_help: 默认是True,False就是禁用-h/-help
23 | parser = ArgumentParser(usage="You can input the number of threads(default 10).", description="This is a spider for BeeBeeto.")
24 | # 指定程序需要接受的命令参数
25 | # 定位参数,执行程序时必选
26 | # parser.add_argument('echo', help='echo the string')
27 | # 可选参数,可选
28 | parser.add_argument('--threads', default=10, type=int, help="input the number of threads")
29 | # 忽略了错误参数的输入
30 | args, remaining = parser.parse_known_args(args=None, namespace=None)
31 | # 可以设置多个命令参数,利用if判断来达到希望达到的目的
32 | print args.threads
33 |
34 | url_base = 'http://www.beebeeto.com/pdb/public/?page='
35 | # 创建一个队列保存poc_url
36 | # queue效率比list高
37 | url_queue = Queue()
38 | # 创建一个队别保存poc_name
39 | name_queue = Queue()
40 | # 多个线程保存同一页的poc_name,需要锁定保存的位置,否则会保存同一个name
41 | name_lock = Lock()
42 | # 用来保证多个线程之间分别保存不同的页面
43 | x = 1
44 |
45 | # 获取POC和POCURL的一个线程类
46 | class PocGet(Thread):
47 | def __init__(self):
48 | Thread.__init__(self)
49 |
50 | def run(self):
51 | # 全局变量,在函数内声明来确定全局
52 | # 用来分隔每个线程处理不同的页面
53 | global x
54 | name_lock.acquire()
55 | get_poc(x)
56 | name_lock.release()
57 | x = x + 1
58 |
59 |
60 | # 保存POC到txt文件
61 | def poc_save(save_path, save_name, poc):
62 | # 创建保存路径
63 | if not os.path.exists(save_path):
64 | os.makedirs(save_path)
65 | path = save_path + '/' + save_name
66 | with open(path, 'a+') as f:
67 | f.write(poc)
68 | f.write('\n')
69 |
70 |
71 | # 利用POCURL队列获取具体POC内容一个线程类
72 | # 这里一个线程只能获取一个POC,如果一个线程需要获取多个POC
73 | # 就需要一个while循环
74 | class SavePoc(Thread):
75 |
76 | def __init__(self):
77 | Thread.__init__(self)
78 |
79 | def run(self):
80 | poc_path = './BeeBeeto'
81 | # 方法一 : 需要开启191条线程
82 | # url_poc = url_queue.get()
83 | # each_name = name_queue.get()
84 | # poc_detail = url_soup(url_poc).find_all('pre')
85 | # for each_detail in poc_detail:
86 | # # 针对文件名中不能出现的几个符号正则替换成空
87 | # r = re.compile('[/\?\\<>:\*]')
88 | # poc_name = r.sub('', each_name.string) + '.txt'
89 | # poc_save(poc_path, poc_name, each_detail.string.encode('utf-8'))
90 | # url_queue.task_done()
91 | # name_queue.task_done()
92 | # 方法二 : 使用while循环就可以自己设定线程数
93 | while True:
94 | # 开多少条线程,name_queue.qsize就从哪开始
95 | # 比如开100条,那么size直接从91开始,因为100条先内带了100个name
96 | if name_queue.qsize() > 0:
97 | poc_name = ''
98 | url_poc = url_queue.get()
99 | each_name = name_queue.get()
100 | poc_detail = url_soup(url_poc).find_all('pre')
101 | for each_detail in poc_detail:
102 | # 针对文件名中不能出现的几个符号正则替换成空
103 | r = re.compile('[/\?\\<>:\*]')
104 | poc_name = r.sub('', each_name.string) + '.txt'
105 | poc_save(poc_path, poc_name, each_detail.string.encode('utf-8'))
106 | print 'saving ' + poc_name
107 | url_queue.task_done()
108 | name_queue.task_done()
109 | print name_queue.qsize()
110 | else:
111 | break
112 |
113 |
114 | def get_poc(page):
115 | url_first = url_base + str(page)
116 | full_poc = url_soup(url_first).find_all(href=re.compile(r'poc'))
117 | for each_public in full_poc:
118 | print each_public.string + ' is done.'
119 | url_vul = url_index + each_public.attrs['href']
120 | print url_vul
121 | name_queue.put(each_public)
122 | url_queue.put(url_vul)
123 |
124 |
125 | def main():
126 | threads = []
127 | threads_poc = []
128 | # 一共有13页,所以开启13条线程获取
129 | for y in range(13):
130 | threads.append(PocGet())
131 | threads[y].setDaemon(True)
132 | threads[y].start()
133 | for i in range(13):
134 | threads[i].join()
135 | # 一共有191个POC
136 | # threads_num = int(raw_input('Please input the number of threads: '))
137 | for a in range(args.threads):
138 | threads_poc.append(SavePoc())
139 | threads_poc[a].setDaemon(True)
140 | threads_poc[a].start()
141 | for b in range(args.threads):
142 | threads_poc[b].join()
143 |
144 |
145 | # 获取URL源码
146 | def url_soup(url):
147 | # 网站禁止爬虫,需要伪装浏览器
148 | user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0'
149 | request = urllib2.Request(url)
150 | request.add_header('User-Agent', user_agent)
151 | response = urllib2.urlopen(request)
152 | soup = BeautifulSoup(response.read(), 'html.parser')
153 | return soup
154 |
155 | if __name__ == '__main__':
156 | st = time.time()
157 | url_index = 'http://www.beebeeto.com'
158 | print '----------begin----------'
159 | main()
160 | print '----------end------------'
161 | print (time.time() - st)
162 |
--------------------------------------------------------------------------------
/PythonSpider-BeeBeeto/README.MD:
--------------------------------------------------------------------------------
1 | * 爬虫目标:BeeBeeto 网站
2 | * 爬虫目的:下载POC,并保存
3 | * 爬虫思路:
4 | * 先爬取公开POC页面,然后爬取每个POC具体内容
5 | * 应该采用多线程机制
6 | * 应该加入命令行参数
7 |
8 |
9 | ----
10 | * 今日BeeBeeto自动跳转Seebug,所以这个爬虫就结束了
11 | * 已经实现:
12 | * 多线程,简单命令行参数
13 |
14 |
15 | ----
16 | 2016年3月9日 21:57:29
17 |
--------------------------------------------------------------------------------
/PythonSpider-BuTian/BCSpider.py:
--------------------------------------------------------------------------------
1 | #/user/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: BuTian Company Spider
6 | Author: Pyx
7 | Time: 2016年5月10日 15:16:16
8 | """
9 |
10 | import requests
11 | from bs4 import BeautifulSoup
12 | import time
13 | import random
14 | import argparse
15 |
16 | # 简约模式
17 | # for each in range(0, 2):
18 | # each = str(each)
19 | # url = "https://butian.360.cn/company/lists/page/%s" % each
20 | # resp = requests.get(url, verify=False)
21 | # # print resp.content
22 | # soup = BeautifulSoup(resp.content, 'html.parser', from_encoding='UTF-8')
23 | # tmp = soup.find_all('td')
24 | # for x in tmp:
25 | # # 判断厂商域名
26 | # if '.' in x.string:
27 | # print x.string
28 | # num.append(x.string)
29 | #
30 | # print len(num)
31 | #
32 |
33 |
34 | # 分析url
35 | def url_resp(url):
36 | # 设置一个随机的用户代理,模拟浏览器
37 | user_agent = ["Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0",
38 | "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
39 | "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
40 | "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
41 | "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"]
42 |
43 | header = {'User-Agent': random.choice(user_agent)}
44 | # https 证书验证
45 | resp = requests.get(url, verify=False, headers=header)
46 | return resp
47 |
48 |
49 | # 提取url页面中的company
50 | def url_soup(url):
51 | cpy = []
52 |
53 | soup = BeautifulSoup(url_resp(url).content, 'html.parser', from_encoding='UTF-8')
54 | _soup = soup.find_all('td')
55 |
56 | for each in _soup:
57 | if each.string == None:
58 | pass
59 | else:
60 | # 增加一个列表用于判断是否重复
61 | if '.' in each.string and each.string not in cpy:
62 | print each.string
63 | cpy.append(each.string)
64 | save_result(each.string.encode('utf-8'))
65 | # print each.string
66 |
67 | # 保存结果到txt文件
68 | def save_result(company):
69 | report_name = 'BuTianCompany' + time.strftime('%Y%m%d', time.localtime()) + '.txt'
70 | with open(report_name, 'a+') as f:
71 | f.write(company)
72 | f.write('\n')
73 |
74 |
75 | def main(p_num):
76 | for x in xrange(1, p_num+1):
77 | r_url = "https://butian.360.cn/company/lists/page/%s" % str(x)
78 | url_soup(r_url)
79 | print x
80 |
81 |
82 | if __name__ == '__main__':
83 | # 默认106页
84 | parser = argparse.ArgumentParser(prog='BCSpider', usage='BCSpider.py [option]',
85 | formatter_class=argparse.ArgumentDefaultsHelpFormatter,
86 | description="* BuTian Company Spider *")
87 | parser.add_argument('-p', metavar='Page', default=106, type=int, help='The end page for crawling')
88 | arg = parser.parse_args()
89 | page = arg.p
90 | main(page)
91 | print "-----------------It's done-------------------"
92 |
--------------------------------------------------------------------------------
/PythonSpider-BuTian/README.md:
--------------------------------------------------------------------------------
1 | # BCSpider
2 | **B**uTian **C**ompany **Spider**
3 |
4 | ## Usage
5 | ```
6 | usage: BCSpider.py [option]
7 |
8 | * BuTian Company Spider *
9 |
10 | optional arguments:
11 | -h, --help show this help message and exit
12 | -p Page The end page for crawling (default: 106)
13 | ```
14 |
15 | ## Instruction
16 | ```
17 | 1. 默认页数106页,可根据需求更改
18 | 2. 保存文件的命名格式按天,所以每天只需要采集一次便可
19 | ```
20 |
21 | ## Example
22 | ```
23 | python BCSpider.py -p 10
24 | ```
25 |
--------------------------------------------------------------------------------
/PythonSpider-BuTianVul/ButianVul.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import requests
5 | import random, re, time
6 | from bs4 import BeautifulSoup
7 | from threading import Thread
8 | from Queue import Queue
9 | from pymongo import MongoClient
10 | from cmdline import parse_args
11 |
12 |
13 |
14 |
15 | # 获取 url 响应
16 | def url_resp(url):
17 | user_agent = ["Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0",
18 | "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
19 | "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
20 | "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
21 | "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"]
22 | # 随机用户代理
23 | header = {'User-Agent': random.choice(user_agent)}
24 | # https 证书验证
25 | resp = requests.get(url, verify=False, headers=header)
26 | return resp
27 |
28 | # 分析,获取数据
29 | def url_soup(url):
30 | _url = 'http://new.butian.360.cn/Company/u/'
31 | soup = BeautifulSoup(url_resp(url).content, 'html.parser', from_encoding='UTF-8')
32 | # print soup
33 | # _soup = soup.find_all('dd')
34 | # print _soup
35 | # for each in _soup:
36 | # print each.contents
37 | # 获取每一页所有的漏洞名,隐藏版
38 | # for x in each.contents:
39 | # print type(x)
40 | # print x.string
41 | # 获取每一页显示厂商的漏洞,没有则 pass
42 | # if each.a:
43 | # 厂商链接 /Company/u/ + 厂商名
44 | # print each.a.get('href')
45 | # else:
46 | # pass
47 | # 直接用 href 属性正则匹配出每一页显示的厂商链接
48 | _soup = soup.find_all(href=re.compile(r'Company'))
49 | # print _soup
50 | for each in _soup:
51 | # print each.string
52 | company_url = _url + each.string
53 | print company_url
54 | company.append(company_url)
55 | # 简单的去重,数据库判断
56 | result = company_collection.find_one({'name': each.string})
57 | if result:
58 | # 原先 pass 的话,说明漏洞厂商之前就存入至数据库中,pass 就不会更新时间,有新的漏洞也不会获取了
59 | # pass
60 | # 利用 update 更新时间,set 直接覆盖
61 | now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
62 | company_collection.update({'name': each.string}, {'$set': {'time': now}})
63 | else:
64 | now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
65 | company_collection.insert_one({'name': each.string, 'url': company_url, 'time': now})
66 | print 'It has been successfully imported into the database.'
67 |
68 |
69 |
70 | class CompanyUrl(Thread):
71 |
72 | def __init__(self):
73 | Thread.__init__(self)
74 |
75 | def run(self):
76 | while 1:
77 | if page_queue.empty(): break
78 | url = page_queue.get()
79 | url_soup(url)
80 | page_queue.task_done()
81 |
82 |
83 | # 根据厂商,获取漏洞详细标题
84 | def vul_soup(url):
85 | soup = BeautifulSoup(url_resp(url).content, 'html.parser', from_encoding='UTF-8')
86 | _soup = soup.find_all('var')
87 | vul = []
88 | if _soup:
89 |
90 | for each in _soup:
91 | # print each.contents[0]
92 | # print each.contents
93 | # 一些厂商的 St2 045 标题未显示,each.contents 长度为2,会出错,长度为3的不会出错
94 | if len(each.contents) == 3:
95 | vul.append(each.contents[0])
96 | # print vul
97 | # $inc 追加键值,只能用于追加整数、长整数、双精度浮点数
98 | # company_collection.update({'url': url}, {'$inc': {'vul': each.contents[0]}})
99 | # 简单的去重,数据库判断
100 | # result = company_collection.find_one({'vul': vul})
101 | # if result:
102 | # pass
103 | # else:
104 | # $set 追加键值,如果有新漏洞,直接覆盖
105 | now = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
106 | company_collection.update({'url': url}, {'$set': {'vul': vul, 'time': now}})
107 | print vul
108 | print 'It has been successfully imported into the database.'
109 | else:
110 | company_collection.update({'url': url}, {'$set': {'vul': '公有SRC'}})
111 | print 'SRC has been successfully imported into the database.'
112 |
113 |
114 |
115 |
116 |
117 | class CompanyVul(Thread):
118 |
119 | def __init__(self):
120 | Thread.__init__(self)
121 |
122 | def run(self):
123 | while 1:
124 | if company_query.empty(): break
125 | url = company_query.get()
126 | vul_soup(url)
127 | company_query.task_done()
128 |
129 |
130 |
131 | def main(g_company, s_page, e_page, cn_thread, g_vul, e_vul, vn_thread):
132 |
133 | threads = []
134 | vul_threads = []
135 | # 获取漏洞厂商
136 | if g_company:
137 | # 最后一页 6082,后面的无法获取
138 | for x in xrange(s_page, e_page+1):
139 | page_queue.put('http://new.butian.360.cn/Loo/index/p/' + str(x) + '.html')
140 |
141 | for y in xrange(cn_thread):
142 | threads.append(CompanyUrl())
143 | threads[y].start()
144 |
145 | for z in threads:
146 | z.join()
147 |
148 | # 获取漏洞标题
149 | if g_vul:
150 | # e_vul 判断是否没有 vul 字段,没有说明是新增加的 漏洞厂商
151 | if e_vul:
152 | # 存在 vul 字段,说明是以前就增加的厂商,现在是新增漏洞
153 | today = time.strftime("%Y-%m-%d", time.localtime())
154 |
155 | for m in company_collection.find({'time': {'$gte': today}}):
156 | company_query.put(m['url'])
157 | else:
158 | i = 0
159 | for m in company_collection.find({'vul':{'$exists': False}}):
160 | company_query.put(m['url'])
161 | i = i + 1
162 | print 'Adding ' + str(i)
163 | print 'ALL datas have vul field.'
164 |
165 | for n in xrange(vn_thread):
166 | vul_threads.append(CompanyVul())
167 | vul_threads[n].start()
168 |
169 | for i in vul_threads:
170 | i.join()
171 |
172 |
173 | if __name__ == '__main__':
174 |
175 | client = MongoClient('localhost', 27017)
176 | db = client.company
177 | company_collection = db.company_url
178 | # 获取的厂商名
179 | company = []
180 | # 漏洞页队列,用于获取厂商名
181 | page_queue = Queue()
182 | # 厂商队列,用于获取具体漏洞
183 | company_query = Queue()
184 |
185 | args = parse_args()
186 | page_s = args.s
187 | page_e = args.e
188 | company_t = args.ct
189 | vul_t = args.vt
190 | company_g = args.company
191 | vul_g = args.vul
192 | vul_e = args.evul
193 | start_time = time.time()
194 | print '===============Begin==============='
195 | main(company_g, page_s, page_e, company_t, vul_g, vul_e, vul_t)
196 | print '===============End================='
197 | print (time.time() - start_time) / 60
--------------------------------------------------------------------------------
/PythonSpider-BuTianVul/README.md:
--------------------------------------------------------------------------------
1 | # BuTian Vulnerabilities Spider
2 | **B**utian **Vulnerabilities** *S*pider
3 |
4 | ## Usage
5 | ```
6 | usage: ButianVul.py [options]
7 |
8 | * BuTian Vulnerabilities Spider *
9 |
10 | optional arguments:
11 | -h, --help show this help message and exit
12 | -s StartPage Start page for crawling (default: 1)
13 | -e EndPage End page for crawling (default: 2)
14 | -ct CompanyThread Num of company threads (default: 10)
15 | -vt VulThread Num of vul threads (default: 10)
16 | --company Company Spider (default: False)
17 | --vul Vulnerability Spider (default: False)
18 | --evul Vul Exists (default: False)
19 |
20 | ```
21 |
22 | ### Instruction
23 | ```
24 | 1. Python 2.7.11 && BeautifulSoup4 4.3.2 && pymongo && requests
25 | 2. s - 起始页面,e - 终止页面(包括),ct - 爬取漏洞厂商线程,vt - 爬取漏洞名线程
26 | 3. company - 是否爬取漏洞厂商,vul - 是否爬取漏洞名,evul - 爬取漏洞名的厂商是否 vul 字段已经存在
27 | 4. 运行第一步,先爬取所有的厂商,保存至数据库
28 | 5. 运行第二步,爬取漏洞名,保存至数据库
29 | 6. 运行第三步,查看是否有漏爬的漏洞名
30 | 7. 期间会有各种无法连接,因为没用代理池,所以只能隔一段时间继续爬,只要数据库建好,之后每天晚上爬一遍当天的漏洞即可,大概 1-11 页
31 | 8. 至5月8日,一共爬取 32000 多漏洞厂商,漏洞 10W +
32 | ```
33 |
34 | ### Example
35 | ```
36 | python ButianVul.py -s 1 -e 11 --company --vul --evul
37 | python ButianVul.py --vul
38 | ```
39 |
--------------------------------------------------------------------------------
/PythonSpider-BuTianVul/cmdline.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import argparse
5 | import sys
6 |
7 | def parse_args():
8 |
9 | # 创建一个命令行参数对象
10 | parser = argparse.ArgumentParser(prog='BuTianVulSpider', usage="ButianVul.py [options]",
11 | description="* BuTian Vulnerabilities Spider *",
12 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
13 |
14 | parser.add_argument('-s', metavar='StartPage', type=int, default=1, help='Start page for crawling')
15 | parser.add_argument('-e', metavar='EndPage', type=int, default=2, help='End page for crawling')
16 | parser.add_argument('-ct', metavar='CompanyThread', type=int, default=10, help='Num of company threads')
17 | parser.add_argument('-vt', metavar='VulThread', type=int, default=10, help='Num of vul threads')
18 | parser.add_argument('--company', default=False, action='store_true', help="Company Spider")
19 | parser.add_argument('--vul', default=False, action='store_true', help="Vulnerability Spider")
20 | parser.add_argument('--evul', default=False, action='store_true', help="Vul Exists")
21 |
22 |
23 |
24 | # 如果什么都没输入,就输入了一个脚本名,那么就是sys.argv只有一个参数
25 | if len(sys.argv) == 1:
26 | sys.argv.append('-h')
27 |
28 | # 返回一个保存命令行参数的命名空间
29 | args = parser.parse_args()
30 | return args
31 |
--------------------------------------------------------------------------------
/PythonSpider-LySRC/LYSRC/cmdline.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import argparse
5 | import sys
6 |
7 | def parse_args():
8 | # 创建一个命令行参数对象
9 | parser = argparse.ArgumentParser(prog='LySrcPV', usage="LySrc.py [options]",
10 | description="* LySRC Public Vulnerabilities *",
11 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
12 | parser.add_argument('-n', metavar='VulPages', type=int, default=6, help='Total pages')
13 | parser.add_argument('-s', metavar='VulSpeed', type=int, default=6, help='Speed of pages')
14 | parser.add_argument('-t', metavar='ThreadNum', type=int, default=10, help='Num of threads')
15 |
16 | # 什么都没输入情况,就输入一个脚本名,sys.argv 就只有一个参数
17 | if len(sys.argv) == 1:
18 | sys.argv.append('-h')
19 | # 返回一个保存命令行参数的命名空间
20 | args = parser.parse_args()
21 | return args
--------------------------------------------------------------------------------
/PythonSpider-LySRC/LYSRC/lysrc.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: LYSRC Public Vulnerability
6 | Author: Pyx
7 | Time: 2017年5月1日 09:37:06
8 | """
9 |
10 | import requests
11 | from bs4 import BeautifulSoup
12 | import random, re, time, os
13 | import urllib, urllib2
14 | from Queue import Queue
15 | from threading import Thread
16 | from cmdline import parse_args
17 |
18 |
19 | # 获取 url 响应
20 | def url_resp(url):
21 | user_agent = ["Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0",
22 | "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
23 | "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
24 | "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
25 | "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"]
26 | # 随机用户代理
27 | header = {'User-Agent': random.choice(user_agent)}
28 | # https 证书验证
29 | resp = requests.get(url, verify=False, headers=header)
30 | return resp
31 |
32 |
33 | # 分析,获取 vul 链接
34 | def url_soup(url):
35 | # 漏洞列表
36 | # global vul
37 | soup = BeautifulSoup(url_resp(url).content, 'html.parser', from_encoding='UTF-8')
38 | # 包含所有漏洞名的列表
39 | _soup = soup.find_all(href=re.compile('bugdetail'))
40 | # print _soup
41 | for each in _soup:
42 | each_url = 'http://sec.ly.com/' + each.get('href')
43 | print each.get('href')
44 | print each.div.string
45 | # vul.append(each.get('href'))
46 | vul_queue.put(each_url)
47 | save_result(each.get('href'))
48 | save_result(each.div.string.encode('utf-8'))
49 | # print len(vul)
50 |
51 |
52 | # 获取漏洞列表多线程
53 | class VulListThread(Thread):
54 | def __init__(self):
55 | Thread.__init__(self)
56 |
57 | def run(self):
58 | while 1:
59 | if page_queue.empty(): break
60 | vul_page = page_queue.get()
61 | url_soup(vul_page)
62 | page_queue.task_done()
63 |
64 |
65 | # 保存
66 | def save_result(vul):
67 | report_name = 'LYSRC' + time.strftime('%Y%m%d', time.localtime()) + '.txt'
68 | with open(report_name, 'a+') as f:
69 | f.write(vul)
70 | f.write('\n')
71 |
72 |
73 | # vul 详情页面
74 | def vul_detail(url):
75 | vul_soup = BeautifulSoup(url_resp(url).content, 'html.parser', from_encoding='UTF-8')
76 | _vul_soup = vul_soup.find_all('img')
77 | # print _vul_soup
78 | # 保存图片 url 列表
79 | img_list = []
80 | # 有些标题中有 \ 等符号,一次性全部正则替换
81 | title = re.sub(r'[/\?\\<>:\*]', '', vul_soup.find('h2').string)
82 | # print title.string
83 |
84 | # 创建目录,同目录下
85 | vul_path = './' + title
86 | if os.path.exists(vul_path):
87 | pass
88 | else:
89 | os.makedirs(vul_path)
90 | os.makedirs(vul_path+'/img')
91 |
92 | # 下载图片
93 | for each in _vul_soup:
94 | # print each.get('src')
95 | # 获取详情页面的图片地址,后缀有可能为 png
96 | img_url = re.search(r'^(http|https://)(.*?)com/img(.*?).(jpeg|png)', each.get('src'), re.M | re.I)
97 | if img_url:
98 | # print type(img_url.group())
99 | # with open(vul_path+'/img/'+img_url.group()[-15:], 'a+') as jpeg:
100 | # 下载图片会损坏
101 | # jpeg.write(urllib2.urlopen(img_url.group()).read())
102 | # 图片保存至 img 文件夹下,判断是否存在
103 | if os.path.exists(vul_path+'/img/'+img_url.group()[-15:]):
104 | print title + 'Already Exists'
105 | pass
106 | else:
107 | urllib.urlretrieve(img_url.group(), vul_path+'/img/'+img_url.group()[-15:], call_back)
108 |
109 | img_list.append(str(img_url.group()))
110 |
111 | # 下载详情页面方法一,利用 urlretrieve
112 | # urllib.urlretrieve(url, '1.html', call_back)
113 |
114 | # 下载详情页面方法二,直接写,w+ 可读写,文件重写
115 | # 漏洞重名,这里采取了 url 链接后五位来避免重名
116 | with open(vul_path+'/'+title+url[-5:]+'.html', 'w+') as f:
117 | # 控制写入的内容,利用 for 循环一次性修改完成在进行写入
118 | html = str(vul_soup).replace('avatar" src=', 'avatar" src1=')
119 |
120 | for x in range(len(img_list)):
121 | html = html.replace('src="'+img_list[x], 'src="./img/'+img_list[x][-15:])
122 |
123 | f.write(html)
124 |
125 |
126 | # 下载进度,a 已下载数据块,b 数据块大小,c 远程文件大小
127 | def call_back(a, b, c):
128 | per = 100 * a * b / c
129 | if per > 100:
130 | per = 100
131 | print '%.2f%%' % per
132 |
133 |
134 | # 获取漏洞详情多线程
135 | class VulThread(Thread):
136 | def __init__(self):
137 | Thread.__init__(self)
138 |
139 | def run(self):
140 | while 1:
141 | if vul_queue.empty(): break
142 | vul_url = vul_queue.get()
143 | vul_detail(vul_url)
144 | vul_queue.task_done()
145 |
146 |
147 | def main(page_num, speed_num, thread_num):
148 | # global vul
149 | # vul = [] # 用队列替代了全局列表
150 | # vul 详情网址队列
151 | # 线程列表
152 | threads = []
153 | page_threads = []
154 | # 保存 vul 列表文件,单线程
155 | # for x in xrange(1, num+1):
156 | # _url = 'http://sec.ly.com/bugs?page=' + str(x)
157 | # url_soup(_url)
158 | # 多线程,保存 vul 列表文件
159 | for x in xrange(1, page_num+1):
160 | page_queue.put('http://sec.ly.com/bugs?page=' + str(x))
161 |
162 | for i in xrange(speed_num):
163 | page_threads.append(VulListThread())
164 | page_threads[i].start()
165 |
166 | for n in page_threads:
167 | n.join()
168 |
169 | save_result('Total:' + str(vul_queue.qsize()))
170 | print '=============================================='
171 | # 保存 vul 详情文件,单线程
172 | # for y in xrange(0, len(vul)):
173 | # vul_detail('http://sec.ly.com/'+str(vul[y]))
174 | # 多线程
175 | for y in xrange(thread_num):
176 | threads.append(VulThread())
177 | threads[y].start()
178 | # 线程阻塞
179 | for z in threads:
180 | z.join()
181 |
182 | if __name__ == '__main__':
183 | vul_queue = Queue()
184 | page_queue = Queue()
185 | start = time.time()
186 | args = parse_args()
187 | page_num = args.n
188 | speed_num = args.s
189 | thread_num = args.t
190 | main(page_num, speed_num, thread_num)
191 | print '====================Done====================='
192 | print time.time() - start
193 |
--------------------------------------------------------------------------------
/PythonSpider-LySRC/README.md:
--------------------------------------------------------------------------------
1 | # LySRC Public Vulnerabilities
2 | **Ly**SRC **P**ublic **Vulnerabilities**
3 |
4 | ## Usage
5 | ```
6 | usage: LySrc.py [options]
7 |
8 | * LySRC Public Vulnerabilities *
9 |
10 | optional arguments:
11 | -h, --help show this help message and exit
12 | -n VulPages Total pages (default: 6)
13 | -s VulSpeed Speed of pages (default: 6)
14 | -t ThreadNum Num of threads (default: 10)
15 |
16 | ```
17 |
18 | ### Instruction
19 | ```
20 | 1. Python 2.7.11 && BeautifulSoup4 4.3.2
21 | 2. 自定义线程数量,VulSpeed线程数代表获取漏洞列表,ThreadNum线程数代表获取漏洞详情
22 | ```
23 |
24 | ### Example
25 | ```
26 | python lysrc.py -s 5
27 | ```
28 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/IVSpider-Wooyun/IVSpider.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: Ingored Vulnerabilities Spider for Wooyun
6 | Author: PyxYuYu
7 | Time: 2016年3月11日 23:05:44
8 | """
9 |
10 | import urllib2
11 | from bs4 import BeautifulSoup
12 | import re
13 | import time
14 | from cmdline import parse_args
15 |
16 |
17 | # 返回soup,获取url源码
18 | def url_res(url):
19 | user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0"
20 | req = urllib2.Request(url)
21 | req.add_header("User-Agent", user_agent)
22 | res = urllib2.urlopen(req).read()
23 | return res
24 |
25 |
26 | # 分析源码,正则或者BeautifulSoup模块
27 | def get_url(page):
28 |
29 | vul_public = "http://wooyun.org/bugs/new_public/page/" + str(page)
30 | # 测试用
31 | # print url_soup(vul_public).find_all('a')
32 | # 方法一:先找这个tag,找到后在找href,然后匹配
33 | # for each in BeautifulSoup(url_res(vul_public),'html.parser', from_encoding='UTF-8').find_all('a'):
34 | # # print each['href']
35 | # # 判断一下是什么类型 unicode
36 | # # print type(each['href'])
37 | # # /bugs/wooyun-2016-0176846 正则匹配
38 | # # 会有2个网址匹配出来,带个#就只能匹配出一个了
39 | # url_re = re.compile(r'/bugs/.*\d{6}#')
40 | # # 返回的是一个列表,没匹配到返回空列表
41 | # each_url = url_re.findall(each['href'])
42 | # if each_url != []:
43 | # # 每个列表只有一个元素
44 | # # vul_list.append(each_url[0])
45 | # print each_url[0]
46 | # # 寻找忽略漏洞
47 | # if get_vul(each_url[0]) > 0:
48 | # vul_detail(each_url[0])
49 | # else:
50 | # print "Didn't find."
51 | # else:
52 | # pass
53 | # 方法二:更加简单,直接匹配了找
54 | for each in BeautifulSoup(url_res(vul_public), 'html.parser', from_encoding='UTF-8').find_all(href=re.compile(
55 | r'/bugs/.*\d{6}'), title=None):
56 | # print each.string
57 | # 厂商忽略的漏洞
58 | if get_vul(each['href']) > 0:
59 | print each.string
60 | # vul_detail(each['href'])
61 | print "http://www.wooyun.org/" + each['href']
62 | print "The vulnerability is ignored."
63 | else:
64 | # print "The vulnerability is fixed."
65 | pass
66 |
67 |
68 | def get_vul(url):
69 |
70 | url_ignored = "http://www.wooyun.org/" + url
71 | return url_res(url_ignored).find("忽略")
72 |
73 |
74 | def vul_detail(url):
75 |
76 | url_vul = "http://www.wooyun.org/" + url
77 | soup = BeautifulSoup(url_res(url_vul), 'html.parser', from_encoding='UTF-8')
78 | # print soup.find_all('title')[0].string
79 | for each in soup.find_all('code'):
80 | # 因为each.string 无法获取其内包含多个子节点的内容,返回None
81 | # 所以用 get_text()
82 | print each.get_text()
83 |
84 |
85 | def main(s_num, e_num):
86 |
87 | for i in range(s_num, e_num):
88 | get_url(i)
89 |
90 | print "-------------------It's done.--------------------"
91 |
92 |
93 | if __name__ == "__main__":
94 | start_time = time.time()
95 | # 创建命令行参数 Namespace对象
96 | args = parse_args()
97 | main(args.s, args.e)
98 | cost_time = time.time() - start_time
99 | print "Current spider is finished in " + str(int(cost_time / 60)) + " mins %.2f seconds." % (cost_time % 60)
100 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/IVSpider-Wooyun/IVSpider02.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: Ignored Vulnerabilities Spider for Wooyun
6 | Author: PyxYuYu
7 | Time: 2016年3月11日 23:05:44
8 | """
9 |
10 | import urllib2
11 | from bs4 import BeautifulSoup
12 | from threading import Thread
13 | from Queue import Queue
14 | import re
15 | import time
16 | from cmdline import parse_args
17 |
18 |
19 | # 保存url的队列
20 | url_queue = Queue()
21 | # 保存name的队列
22 | name_queue = Queue()
23 |
24 |
25 | # 多线程
26 | class IVSThread(Thread):
27 |
28 | def __init__(self):
29 | Thread.__init__(self)
30 |
31 | def run(self):
32 |
33 | while True:
34 | # 厂商忽略的漏洞
35 | # 方法一:
36 | # if url_queue.qsize() > 0:
37 | # print url_queue.qsize()
38 | # url_ignored = url_queue.get()
39 | # name_ignored = name_queue.get()
40 | # if get_vul(url_ignored) > 0:
41 | # print url_ignored
42 | # print name_ignored
43 | # print "The vulnerability is ignored."
44 | # url_queue.task_done()
45 | # else:
46 | # # print "The vulnerability is fixed."
47 | # # pass
48 | # continue
49 | # else:
50 | # # print 'done'
51 | # break
52 | # 方法二:
53 | if url_queue.empty(): break
54 | url_ignored = url_queue.get()
55 | name_ignored = name_queue.get()
56 | if get_vul(url_ignored) > 0:
57 | print url_queue.qsize()
58 | print url_ignored
59 | print name_ignored
60 | print "The vulnerability is ignored."
61 | else:
62 | continue
63 | url_queue.task_done()
64 | name_queue.task_done()
65 |
66 |
67 | # 返回soup,获取url源码
68 | def url_res(url):
69 | user_agent = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0"
70 | req = urllib2.Request(url)
71 | req.add_header("User-Agent", user_agent)
72 | try:
73 | res = urllib2.urlopen(req).read()
74 | except Exception, reason:
75 | pass
76 |
77 | # 没有检测到异常,则执行
78 | else:
79 | return res
80 |
81 |
82 | # 分析源码,正则或者BeautifulSoup模块
83 | def get_url(page):
84 |
85 | vul_public = "http://wooyun.org/bugs/new_public/page/" + str(page)
86 | # 测试用
87 | # print url_soup(vul_public).find_all('a')
88 | # 方法一:先找这个tag,找到后在找href,然后匹配
89 | # for each in BeautifulSoup(url_res(vul_public),'html.parser', from_encoding='UTF-8').find_all('a'):
90 | # # print each['href']
91 | # # 判断一下是什么类型 unicode
92 | # # print type(each['href'])
93 | # # /bugs/wooyun-2016-0176846 正则匹配
94 | # # 会有2个网址匹配出来,带个#就只能匹配出一个了
95 | # url_re = re.compile(r'/bugs/.*\d{6}#')
96 | # # 返回的是一个列表,没匹配到返回空列表
97 | # each_url = url_re.findall(each['href'])
98 | # if each_url != []:
99 | # # 每个列表只有一个元素
100 | # # vul_list.append(each_url[0])
101 | # print each_url[0]
102 | # # 寻找忽略漏洞
103 | # if get_vul(each_url[0]) > 0:
104 | # vul_detail(each_url[0])
105 | # else:
106 | # print "Didn't find."
107 | # else:
108 | # pass
109 | # 方法二:更加简单,直接匹配了找
110 | for each in BeautifulSoup(url_res(vul_public), 'html.parser', from_encoding='UTF-8').find_all(href=re.compile(
111 | r'/bugs/.*\d{6}'), title=None):
112 | url = "http://www.wooyun.org/" + each['href']
113 | url_queue.put(url)
114 | name_queue.put(each.string)
115 |
116 |
117 | def get_vul(url):
118 |
119 | return url_res(url).find("忽略")
120 |
121 |
122 | def vul_detail(url):
123 |
124 | # url_vul = "http://www.wooyun.org/" + url
125 | soup = BeautifulSoup(url_res(url), 'html.parser', from_encoding='UTF-8')
126 | # print soup.find_all('title')[0].string
127 | for each in soup.find_all('code'):
128 | # 因为each.string 无法获取其内包含多个子节点的内容,返回None
129 | # 所以用 get_text()
130 | print each.get_text()
131 |
132 |
133 | def main(s_num, e_num, t_num):
134 |
135 | threads = []
136 | for i in range(s_num, e_num):
137 | get_url(i)
138 |
139 | print url_queue.qsize()
140 |
141 | for x in range(t_num):
142 | threads.append(IVSThread())
143 | # threads[x].setDaemon(True)
144 | threads[x].start()
145 |
146 | for z in range(t_num):
147 | threads[z].join()
148 | # url_queue.join() 如果用队列来阻塞主线程的话,需要在非忽略的get也task_done
149 |
150 | if __name__ == "__main__":
151 | start_time = time.time()
152 | # 创建命令行参数 Namespace对象
153 | args = parse_args()
154 | main(args.s, args.e, args.t)
155 | # print "--------------------It's done.---------------------"
156 | # cost_time = time.time() - start_time
157 | # print "Current spider is finished in %d mins %.2f seconds." % (cost_time / 60, cost_time % 60)
158 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/IVSpider-Wooyun/README.md:
--------------------------------------------------------------------------------
1 | #IVSpider
2 | **I**gnored **V**ulnerabilities **S**pider for Wooyun
3 | ##Version
4 | ```
5 | IVSpider.py ------- 单线程版本,舍弃
6 | IVSpider02.py ----- 多线程版本,下载本地后重命名为IVSpider.py即可
7 | ```
8 | ##Usage
9 | ```
10 | usage: IVSpider.py [options]
11 |
12 | *Ingored Vulnerabilities Spider for Wooyun.*
13 |
14 | optional arguments:
15 | -h, --help show this help message and exit
16 | -s StartPage The start page of Wooyun (default: 1)
17 | -e EndPage The end page of Wooyun, Not including (default: 2)
18 | -t Threads Num of threads for spider, 10 for default (default: 10)
19 |
20 | ```
21 | ##Instruction
22 | ```
23 | 1. Python 2.7.x && BeautifulSoup4==4.3.2
24 | 2. 自定义搜索起始页和终止页
25 | 3. 自定义线程数量
26 | ```
27 | ##Example
28 | ```
29 | python IVSpider.py -s 10 -e 30 -t 20
30 | ```
31 | ##Bug
32 | ```
33 | 1. 可能会被评论区的忽略给误导了
34 | 2. 当数量大时,子线程可能会有几天不能正常关闭(原因未知,可能是Queue队列的Bug)
35 | ```
36 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/IVSpider-Wooyun/cmdline.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | import argparse
5 | import sys
6 |
7 |
8 | # 设置命令行参数
9 | def parse_args():
10 | # argparse.ArgumentDefaultsHelpFormatter 最常用的输出格式
11 | parser = argparse.ArgumentParser(prog='IVSpider', formatter_class=argparse.ArgumentDefaultsHelpFormatter,
12 | description="*Ingored Vulnerabilities Spider for Wooyun.*",
13 | usage="IVSpider.py [options]")
14 | # metavar 参数在帮助信息的名字
15 | parser.add_argument('-s', metavar='StartPage', type=int, default=1, help="The start page of Wooyun")
16 | parser.add_argument('-e', metavar='EndPage', type=int, default=2, help="The end page of Wooyun, Not including")
17 | parser.add_argument('-t', metavar='Threads', type=int, default=10, help="Num of threads for spider, 10 for default")
18 |
19 | # 如果cmd接受到的参数只有1,也就是只有一个脚本名,那么就添加一个 -h/-help 的命令
20 | if len(sys.argv) == 1:
21 | sys.argv.append('-h')
22 | args = parser.parse_args()
23 | return args
24 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/README.md:
--------------------------------------------------------------------------------
1 | ##Progect
2 | ```
3 | 1. IVSpider 乌云忽略漏洞查询
4 |
5 | 2. WVSearch 乌云漏洞查询
6 | ```
7 | ----
8 | 2016年3月14日 21:47:58
9 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/WVSearch/README.md:
--------------------------------------------------------------------------------
1 | # WVSearch
2 | **W**ooyun **V**ulnerabilites **Search**
3 | ## Usage
4 | ```
5 | usage: WVSearch.py [options]
6 |
7 | * Wooyun Vulnerabilities Search *
8 |
9 | optional arguments:
10 | -h, --help show this help message and exit
11 | -s StartPage Start page for searching (default: 1)
12 | -e EndPage End page for searching (default: 10)
13 | -t ThreadNum Num of threads (default: 10)
14 | -k KeyWord Keywords for searching (default: SQL|XSS|CSRF)
15 | --browser Open web browser to view report after after search was
16 | finished. (default: False)
17 | ```
18 | ## Instruction
19 | ```
20 | 1. Python 2.7.x && BeautifulSoup4==4.3.2
21 | 2. 自定义搜索的起始页和终止页
22 | 3. 自定义线程数,默认10
23 | 4. 自定义搜索关键词,用 '|' 分隔,关键词用双引号包裹
24 | 5. 结果保存为html文件,可以在搜索结束自动打开
25 | ```
26 | ## Example
27 | ```
28 | python WVSearch.py -s 10 -e 100 -t 20 -k "中国|SQL|XSS|xss" --browser
29 | ```
30 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/WVSearch/WVSearch.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 | """
5 | Function: Wooyun Vulnerabilities Search
6 | Author: PyxYuYu
7 | Time: 2016年3月14日 20:10:37
8 | """
9 |
10 | import urllib2
11 | from bs4 import BeautifulSoup
12 | from Queue import Queue
13 | from threading import Thread
14 | from string import Template
15 | from cmdline import parse_args
16 | from report import TEMPLATE_html, TEMPLATE_result
17 | import random
18 | import re
19 | import time
20 | import os, sys
21 | import webbrowser
22 |
23 |
24 | # url页面解析
25 | def url_res(url):
26 | req = urllib2.Request(url)
27 | # 随机User-Agent
28 | ua_list = ["Mozilla/5.0 (Windows NT 6.1; WOW64; rv:44.0) Gecko/20100101 Firefox/44.0",
29 | "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)",
30 | "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
31 | "Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11",
32 | "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)"]
33 | user_agent = random.choice(ua_list)
34 | req.add_header('User-Agent', user_agent)
35 | while 1:
36 | try:
37 | res = urllib2.urlopen(req)
38 | except Exception, e:
39 | # pass
40 | continue
41 | else:
42 | return res
43 |
44 |
45 | # 创建一个队列,用于存放所有的url页面
46 | def page_queue(s_page, e_page):
47 | url = "http://wooyun.org/bugs/page/"
48 | # 循环放入到队列中,不包括e_page
49 | for i in xrange(s_page, e_page):
50 | url_num = url + str(i)
51 | url_queue.put(url_num)
52 | # 最后一个页面也放入队列中
53 | e_url = url + str(e_page)
54 | url_queue.put(e_url)
55 |
56 |
57 | # url页面分析,寻找关键字
58 | def search_key(url, key):
59 |
60 | # IDE输入中文是utf-8
61 | # key = unicode(key, 'utf-8')
62 | # cmd输入中文是gb18030
63 | key = unicode(key, 'gb18030')
64 | key_list = key.split('|')
65 | html = url_res(url).read()
66 | soup = BeautifulSoup(html, 'html.parser', from_encoding='UTF-8')
67 | url_tag = soup.find_all(href=re.compile(r'/bugs/.*\d{6}'), title=None)
68 | global _str
69 | # 检测是否有重名的漏洞被写入到_str中
70 |
71 | for each in url_tag:
72 | # print each.string
73 | for i in xrange(len(key_list)):
74 | if key_list[i] in each.string:
75 | # name = each.string.encode('gb18030')
76 | _name = each.string
77 | # 检测是否有重名的漏洞
78 | if _name not in test_name:
79 | test_name.append(_name)
80 | _url = "http://www.wooyun.org/" + each['href']
81 | _str += t_result.substitute({'name': _name, 'url': _url})
82 |
83 |
84 | # 多线程采集url分析关键字漏洞
85 | class SearchThread(Thread):
86 |
87 | def __init__(self, key):
88 | Thread.__init__(self)
89 | # super(Thread, self).__init__()
90 | self.key = key
91 | # self.lock = Lock()
92 |
93 | def run(self):
94 | # global start_time
95 | while True:
96 | if url_queue.empty(): break
97 | url_now = url_queue.get()
98 | # print url_now # 测试用
99 | search_key(url_now, self.key)
100 | url_queue.task_done()
101 |
102 | # 最下面显示实时信息,清除缓冲
103 | # self.lock.acquire() 因为光标会重新回到开头,队列又是安全的,所以不用锁
104 | msg = '%s remaining in %.2f seconds' % (
105 | url_queue.qsize(), time.time() - start_time)
106 | # 用 \r 来保证每个线程输出都在同一行,光标从新回到开头
107 | sys.stdout.write('\r' + ' ' * 40)
108 | sys.stdout.flush()
109 | sys.stdout.write('\r' + msg)
110 | # 清除缓冲一般都用于实时监测
111 | sys.stdout.flush()
112 | # self.lock.release()
113 |
114 |
115 | def main(num_t, key):
116 |
117 | threads = []
118 | for x in xrange(num_t):
119 | threads.append(SearchThread(key))
120 | threads[x].start()
121 |
122 | for y in threads:
123 | y.join()
124 |
125 |
126 | if __name__ == "__main__":
127 |
128 | url_queue = Queue()
129 | test_name = []
130 | start_time = time.time()
131 | args = parse_args()
132 | s_page = args.s
133 | e_page = args.e
134 | n_thread = args.t
135 | keywords = args.k
136 | page_queue(s_page, e_page)
137 |
138 | # 用于保存所有的漏洞名和漏洞链接
139 | _str = ''
140 | _content = ''
141 | t_html = Template(TEMPLATE_html)
142 | t_result = Template(TEMPLATE_result)
143 |
144 | main(n_thread, keywords)
145 | print "\n--------------------It's done.---------------------"
146 | cost_time = time.time() - start_time
147 | _content += _str
148 | cost_min = int(cost_time / 60)
149 | cost_seconds = '%.2f' % (cost_time % 60)
150 | total_name = len(test_name)
151 | # 模板替换
152 | html_doc = t_html.substitute({'cost_min': cost_min, 'cost_seconds': cost_seconds, 'total_name': total_name,
153 | 'content': _content})
154 | key_name = re.sub(r'\|', '_', keywords)
155 | report_name = key_name + '_' + str(s_page) + '_' + str(e_page) + '_' + \
156 | time.strftime('%Y%m%d_%H%M%S', time.localtime()) + '.html'
157 | with open('report/%s' % report_name, 'w') as outFile:
158 | # 输出保存到html,Unicode要编码成gb18030,否则乱码
159 | outFile.write(html_doc.encode('gb18030'))
160 | print "Current search is finished in %d mins %.2f seconds." % (cost_time / 60, cost_time % 60)
161 |
162 | if args.browser:
163 | try:
164 | webbrowser.open_new_tab(os.path.abspath('report/%s' % report_name))
165 | except:
166 | print '[ERROR] Fail to open file with web browser: report/%s' % report_name
167 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/WVSearch/cmdline.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding=utf-8
3 |
4 |
5 | import argparse
6 | import sys
7 |
8 | def parse_args():
9 |
10 | # 创建一个命令行参数对象
11 | parser = argparse.ArgumentParser(prog='WVSearch', usage="WVSearch.py [options]",
12 | description="* Wooyun Vulnerabilities Search *",
13 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
14 |
15 | parser.add_argument('-s', metavar='StartPage', type=int, default=1, help='Start page for searching')
16 | parser.add_argument('-e', metavar='EndPage', type=int, default=10, help='End page for searching')
17 | parser.add_argument('-t', metavar='ThreadNum', type=int, default=10, help='Num of threads')
18 | parser.add_argument('-k', metavar='KeyWord', type=str, default='SQL|XSS|CSRF', help='Keywords for searching')
19 | parser.add_argument('--browser', default=False, action='store_true',
20 | help="Open web browser to view report after after search was finished.")
21 |
22 | # 如果什么都没输入,就输入了一个脚本名,那么就是sys.argv只有一个参数
23 | if len(sys.argv) == 1:
24 | sys.argv.append('-h')
25 |
26 | # 返回一个保存命令行参数的命名空间
27 | args = parser.parse_args()
28 | return args
29 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/WVSearch/report.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 |
3 | TEMPLATE_html = """
4 |
5 |
6 | WVSearch Report
7 |
12 |
13 |
14 | Welcome to use the Wooyun Vulnerabilities Search. * WVSearch *
15 | Current Search was finished in ${cost_min} min ${cost_seconds} seconds.
16 | ${total_name} vulnerabilities match the requirements of searching in total.
17 | ${content}
18 |
19 |
20 | """
21 |
22 | TEMPLATE_result = """
23 | ${name}
24 | """
25 |
--------------------------------------------------------------------------------
/PythonSpider-Wooyun/WVSearch/report/.gitigonre:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PyxYuYu/MyBlog/77b644cce4ac0c1ad4befd00ccf028fa023fb10b/PythonSpider-Wooyun/WVSearch/report/.gitigonre
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PyxYuYu-blog
2 | >用`issue`来记录和分享今后的每一天。
3 |
4 |
5 | 其实一直想搭建一个博客,无论是记录,还是分享。因为对于时间来说,文字无疑是最好的见证者,但是却一直都没能好好去执行。
6 | 之前在知乎搜索`GitHub`的时候,无意中发现了关于用`issue`来写博客的问题,看了玉伯大牛的回答,或许对我来说,`issue`正是
7 | 我最佳的选择。
8 |
9 | ## [Issues](https://github.com/PyxYuYu/MyBlog/issues)
10 | * [安全舆情 -- 关于补天的轮子][109]
11 | * [动态爬虫(二)][108]
12 | * [动态爬虫(一)][107]
13 | * [一些知识点(一)][106]
14 | * [渗透测试总结(二)][105]
15 | * [渗透测试总结(一)][104]
16 | * [内网渗透][103]
17 | * [业务逻辑漏洞][102]
18 | * [XXE & XPath][101]
19 | * [SSRF][100]
20 | * [代码注入 & 文件包含][99]
21 | * [CSRF & 文件上传][98]
22 | * [XSS & WAF][97]
23 | * [XSS(一)][96]
24 | * [注入(七)& WAF][95]
25 | * [注入(六)][94]
26 | * [注入(五)][93]
27 | * [注入(四)][92]
28 | * [注入(三)][91]
29 | * [注入(二)][90]
30 | * [注入(一)][89]
31 | * [SQL语句(二)][88]
32 | * [SQL语句(一)][87]
33 | * [OWASP Top 10(二)][86]
34 | * [OWASP Top 10(一)][85]
35 | * [一个决定][84]
36 | * [Drops Wiki -- 服务配置(四)][83]
37 | * [Drops Wiki -- 服务配置(三)][82]
38 | * [Drops Wiki -- 服务配置(二)][81]
39 | * [Drops Wiki -- 服务配置][80]
40 | * [DSScan(完结)][79]
41 | * [DSScan(七)][78]
42 | * [DSScan(六)][77]
43 | * [DSScan(五)][76]
44 | * [DSScan(四)][75]
45 | * [DSScan(三)][74]
46 | * [DSScan(二)][73]
47 | * [DSScan(一)][72]
48 | * [元旦快乐][71]
49 | * [二零一六][70]
50 | * [SqlmapApi学习][69]
51 | * [DjangoProject-Blog完结篇][68]
52 | * [Django学习总结(七)][67]
53 | * [Django学习总结(六)][66]
54 | * [Django学习总结(五)][65]
55 | * [Django学习总结(四)][64]
56 | * [Django学习总结(三)][63]
57 | * [Django学习总结(二)][62]
58 | * [Django学习总结(一)][61]
59 | * [Django学习记录(二十四):Django by example -- Blog(二十)][60]
60 | * [Django学习记录(二十三):Django by example -- Blog(十九)][59]
61 | * [Django学习记录(二十二):Django by example -- Blog(十八)][58]
62 | * [Django学习记录(二十一):Django by example -- Blog(十七)][57]
63 | * [Django学习记录(二十):Django by example -- Blog(十六)][56]
64 | * [Django学习记录(十九):Django by example -- Blog(十五)][55]
65 | * [Django学习记录(十八):Django by example -- Blog(十四)][54]
66 | * [Django学习记录(十七):Django by example -- Blog(十三)][53]
67 | * [Django学习记录(十六):Django by example -- Blog(十二)][52]
68 | * [Django学习记录(十五):Django by example -- Blog(十一)][51]
69 | * [Django学习记录(十四):Django by example -- Blog(十)][50]
70 | * [Django学习记录(十三):Django by example -- Blog(九)][49]
71 | * [Django学习记录(十二):Django by example -- Blog(八)][48]
72 | * [Django学习记录(十一):Django by example -- Blog(七)][47]
73 | * [Django学习记录(十):Django by example -- Blog(六)][46]
74 | * [Django学习记录(九):Django by example -- Blog(五)][45]
75 | * [Django学习记录(八):Django by example -- Blog(四)][44]
76 | * [Django学习记录(七):Django by example -- Blog(三)][43]
77 | * [Django学习记录(六):Django by example -- Blog(二)][42]
78 | * [Django学习记录(五):Django by example -- Blog(一)][41]
79 | * [Django学习记录(四):Django by example 项目驱动学习][40]
80 | * [Django学习记录(三):templates模板][39]
81 | * [Django学习记录(二):URL解析][38]
82 | * [Django学习记录(一):Pycharm下项目创建][37]
83 | * [一个月][36]
84 | * [Wooyun_Seebug_Python_7][35]
85 | * [Wooyun_Seebug_Python_6][34]
86 | * [Wooyun_Seebug_Python_5][33]
87 | * [Wooyun_Seebug_Python_4][32]
88 | * [Wooyun_Seebug_Python_3][31]
89 | * [Wooyun_Seebug_Python_2][30]
90 | * [Wooyun_Seebug_Python_1][29]
91 | * [一个半月][28]
92 | * [关于小宝贝][27]
93 | * [Mongodb未授权访问][26]
94 | * [Python学习记录(二十二):知道创宇爬虫&requests模块][25]
95 | * [Python学习记录(二十一):知道创宇爬虫&分析][24]
96 | * [WooyunWiKi&企业IP收集][23]
97 | * [Python学习记录(二十):知道创宇爬虫&DFS和BFS][22]
98 | * [近期总结][21]
99 | * [Python学习记录(十九):Awvs自动化扫描&总结][20]
100 | * [Python学习记录(十八):Awvs自动化扫描&XML分析][19]
101 | * [Python学习记录(十七):Awvs自动化扫描&多线程扫描][18]
102 | * [Python学习记录(十六):Awvs自动化扫描&单步扫描实现][17]
103 | * [Python学习记录(十五):Awvs自动化扫描&扫描步骤][16]
104 | * [Python学习记录(十四):Awvs自动化扫描&wvs_console.exe][15]
105 | * [Python学习记录(十三):乌云漏洞搜索爬虫&保存html][14]
106 | * [Python学习记录(十二):乌云漏洞搜索爬虫&多线程][13]
107 | * [Python学习记录(十一):乌云忽略漏洞爬虫&多线程][12]
108 | * [Python学习记录(十):乌云忽略漏洞爬虫&命令行][11]
109 | * [Python学习记录(九):乌云爬虫&数据][10]
110 | * [Python学习记录(八):命令行参数&网络爬虫][9]
111 | * [Python学习记录(七):多线程v3&网络爬虫][8]
112 | * [Python学习记录(六):多线程v2&网络爬虫][7]
113 | * [Python学习记录(五):多线程&网络爬虫][6]
114 | * [Python学习记录(四):编码问题&网络爬虫][5]
115 | * [Python学习记录(三):正则表达式&网络爬虫][4]
116 | * [Python学习记录(二):网络爬虫][3]
117 | * [Python学习记录(一):urllib模块和urllib2模块][2]
118 |
119 |
120 |
121 | [2]: https://github.com/PyxYuYu/MyBlog/issues/2 "Python学习记录(一):urllib模块和urllib2模块"
122 | [3]: https://github.com/PyxYuYu/MyBlog/issues/3 "Python学习记录(二):网络爬虫"
123 | [4]: https://github.com/PyxYuYu/MyBlog/issues/4 "Python学习记录(三):正则表达式&网络爬虫"
124 | [5]: https://github.com/PyxYuYu/MyBlog/issues/5 "Python学习记录(四):编码问题&网络爬虫"
125 | [6]: https://github.com/PyxYuYu/MyBlog/issues/6 "Python学习记录(五):多线程&网络爬虫"
126 | [7]: https://github.com/PyxYuYu/MyBlog/issues/7 "Python学习记录(六):多线程v2&网络爬虫"
127 | [8]: https://github.com/PyxYuYu/MyBlog/issues/8 "Python学习记录(七):多线程v3&网络爬虫"
128 | [9]: https://github.com/PyxYuYu/MyBlog/issues/9 "Python学习记录(八):命令行参数&网络爬虫"
129 | [10]: https://github.com/PyxYuYu/MyBlog/issues/10 "Python学习记录(九):乌云爬虫&数据"
130 | [11]: https://github.com/PyxYuYu/MyBlog/issues/11 "Python学习记录(十):乌云忽略漏洞爬虫&命令行"
131 | [12]: https://github.com/PyxYuYu/MyBlog/issues/12 "Python学习记录(十一):乌云忽略漏洞爬虫&多线程"
132 | [13]: https://github.com/PyxYuYu/MyBlog/issues/13 "Python学习记录(十二):乌云漏洞搜索爬虫&多线程"
133 | [14]: https://github.com/PyxYuYu/MyBlog/issues/14 "Python学习记录(十三):乌云漏洞搜索爬虫&保存html"
134 | [15]: https://github.com/PyxYuYu/MyBlog/issues/15 "Python学习记录(十四):Awvs自动化扫描&wvs_console.exe"
135 | [16]: https://github.com/PyxYuYu/MyBlog/issues/16 "Python学习记录(十五):Awvs自动化扫描&扫描步骤"
136 | [17]: https://github.com/PyxYuYu/MyBlog/issues/17 "Python学习记录(十六):Awvs自动化扫描&单步扫描实现"
137 | [18]: https://github.com/PyxYuYu/MyBlog/issues/18 "Python学习记录(十七):Awvs自动化扫描&多线程扫描"
138 | [19]: https://github.com/PyxYuYu/MyBlog/issues/19 "Python学习记录(十八):Awvs自动化扫描&XML分析"
139 | [20]: https://github.com/PyxYuYu/MyBlog/issues/20 "Python学习记录(十九):Awvs自动化扫描&总结"
140 | [21]: https://github.com/PyxYuYu/MyBlog/issues/21 "近期总结"
141 | [22]: https://github.com/PyxYuYu/MyBlog/issues/22 "Python学习记录(二十):知道创宇爬虫&DFS和BFS"
142 | [23]: https://github.com/PyxYuYu/MyBlog/issues/23 "WooyunWiKi&企业IP收集"
143 | [24]: https://github.com/PyxYuYu/MyBlog/issues/24 "Python学习记录(二十一):知道创宇爬虫&分析"
144 | [25]: https://github.com/PyxYuYu/MyBlog/issues/25 "Python学习记录(二十二):知道创宇爬虫&requests模块"
145 | [26]: https://github.com/PyxYuYu/MyBlog/issues/26 "Mongodb未授权访问"
146 | [27]: https://github.com/PyxYuYu/MyBlog/issues/27 "关于小宝贝"
147 | [28]: https://github.com/PyxYuYu/MyBlog/issues/28 "一个半月"
148 | [29]: https://github.com/PyxYuYu/MyBlog/issues/29 "Wooyun_Seebug_Python_1"
149 | [30]: https://github.com/PyxYuYu/MyBlog/issues/30 "Wooyun_Seebug_Python_2"
150 | [31]: https://github.com/PyxYuYu/MyBlog/issues/31 "Wooyun_Seebug_Python_3"
151 | [32]: https://github.com/PyxYuYu/MyBlog/issues/32 "Wooyun_Seebug_Python_4"
152 | [33]: https://github.com/PyxYuYu/MyBlog/issues/33 "Wooyun_Seebug_Python_5"
153 | [34]: https://github.com/PyxYuYu/MyBlog/issues/34 "Wooyun_Seebug_Python_6"
154 | [35]: https://github.com/PyxYuYu/MyBlog/issues/34 "Wooyun_Seebug_Python_7"
155 | [36]: https://github.com/PyxYuYu/MyBlog/issues/36 "一个月"
156 | [37]: https://github.com/PyxYuYu/MyBlog/issues/37 "Django学习记录(一):Pycharm下项目创建"
157 | [38]: https://github.com/PyxYuYu/MyBlog/issues/38 "Django学习记录(二):URL解析"
158 | [39]: https://github.com/PyxYuYu/MyBlog/issues/39 "Django学习记录(三):templates模板"
159 | [40]: https://github.com/PyxYuYu/MyBlog/issues/40 "Django学习记录(四):Django by example 项目驱动学习"
160 | [41]: https://github.com/PyxYuYu/MyBlog/issues/41 "Django学习记录(五):Django by example -- Blog(一)"
161 | [42]: https://github.com/PyxYuYu/MyBlog/issues/42 "Django学习记录(六):Django by example -- Blog(二)"
162 | [43]: https://github.com/PyxYuYu/MyBlog/issues/43 "Django学习记录(七):Django by example -- Blog(三)"
163 | [44]: https://github.com/PyxYuYu/MyBlog/issues/44 "Django学习记录(八):Django by example -- Blog(四)"
164 | [45]: https://github.com/PyxYuYu/MyBlog/issues/45 "Django学习记录(九):Django by example -- Blog(五)"
165 | [46]: https://github.com/PyxYuYu/MyBlog/issues/46 "Django学习记录(十):Django by example -- Blog(六)"
166 | [47]: https://github.com/PyxYuYu/MyBlog/issues/47 "Django学习记录(十一):Django by example -- Blog(七)"
167 | [48]: https://github.com/PyxYuYu/MyBlog/issues/48 "Django学习记录(十二):Django by example -- Blog(八)"
168 | [49]: https://github.com/PyxYuYu/MyBlog/issues/49 "Django学习记录(十三):Django by example -- Blog(九)"
169 | [50]: https://github.com/PyxYuYu/MyBlog/issues/50 "Django学习记录(十四):Django by example -- Blog(十)"
170 | [51]: https://github.com/PyxYuYu/MyBlog/issues/51 "Django学习记录(十五):Django by example -- Blog(十一)"
171 | [52]: https://github.com/PyxYuYu/MyBlog/issues/52 "Django学习记录(十六):Django by example -- Blog(十二)"
172 | [53]: https://github.com/PyxYuYu/MyBlog/issues/53 "Django学习记录(十七):Django by example -- Blog(十三)"
173 | [54]: https://github.com/PyxYuYu/MyBlog/issues/54 "Django学习记录(十八):Django by example -- Blog(十四)"
174 | [55]: https://github.com/PyxYuYu/MyBlog/issues/55 "Django学习记录(十九):Django by example -- Blog(十五)"
175 | [56]: https://github.com/PyxYuYu/MyBlog/issues/56 "Django学习记录(二十):Django by example -- Blog(十六)"
176 | [57]: https://github.com/PyxYuYu/MyBlog/issues/57 "Django学习记录(二十一):Django by example -- Blog(十七)"
177 | [58]: https://github.com/PyxYuYu/MyBlog/issues/58 "Django学习记录(二十二):Django by example -- Blog(十八)"
178 | [59]: https://github.com/PyxYuYu/MyBlog/issues/59 "Django学习记录(二十三):Django by example -- Blog(十九)"
179 | [60]: https://github.com/PyxYuYu/MyBlog/issues/60 "Django学习记录(二十四):Django by example -- Blog(二十)"
180 | [61]: https://github.com/PyxYuYu/MyBlog/issues/61 "Django学习总结(一)"
181 | [62]: https://github.com/PyxYuYu/MyBlog/issues/62 "Django学习总结(二)"
182 | [63]: https://github.com/PyxYuYu/MyBlog/issues/63 "Django学习总结(三)"
183 | [64]: https://github.com/PyxYuYu/MyBlog/issues/64 "Django学习总结(四)"
184 | [65]: https://github.com/PyxYuYu/MyBlog/issues/65 "Django学习总结(五)"
185 | [66]: https://github.com/PyxYuYu/MyBlog/issues/66 "Django学习总结(六)"
186 | [67]: https://github.com/PyxYuYu/MyBlog/issues/67 "Django学习总结(七)"
187 | [68]: https://github.com/PyxYuYu/MyBlog/issues/68 "DjangoProject-Blog完结篇"
188 | [69]: https://github.com/PyxYuYu/MyBlog/issues/69 "SqlmapApi学习"
189 | [70]: https://github.com/PyxYuYu/MyBlog/issues/70 "二零一六"
190 | [71]: https://github.com/PyxYuYu/MyBlog/issues/71 "元旦快乐"
191 | [72]: https://github.com/PyxYuYu/MyBlog/issues/72 "DSScan(一)"
192 | [73]: https://github.com/PyxYuYu/MyBlog/issues/73 "DSScan(二)"
193 | [74]: https://github.com/PyxYuYu/MyBlog/issues/74 "DSScan(三)"
194 | [75]: https://github.com/PyxYuYu/MyBlog/issues/75 "DSScan(四)"
195 | [76]: https://github.com/PyxYuYu/MyBlog/issues/76 "DSScan(五)"
196 | [77]: https://github.com/PyxYuYu/MyBlog/issues/77 "DSScan(六)"
197 | [78]: https://github.com/PyxYuYu/MyBlog/issues/78 "DSScan(七)"
198 | [79]: https://github.com/PyxYuYu/MyBlog/issues/79 "DSScan(完结)"
199 | [80]: https://github.com/PyxYuYu/MyBlog/issues/80 "Drops Wiki -- 服务配置"
200 | [81]: https://github.com/PyxYuYu/MyBlog/issues/81 "Drops Wiki -- 服务配置(二)"
201 | [82]: https://github.com/PyxYuYu/MyBlog/issues/82 "Drops Wiki -- 服务配置(三)"
202 | [83]: https://github.com/PyxYuYu/MyBlog/issues/83 "Drops Wiki -- 服务配置(四)"
203 | [84]: https://github.com/PyxYuYu/MyBlog/issues/84 "一个决定"
204 | [85]: https://github.com/PyxYuYu/MyBlog/issues/85 "OWASP Top 10(一)"
205 | [86]: https://github.com/PyxYuYu/MyBlog/issues/86 "OWASP Top 10(二)"
206 | [87]: https://github.com/PyxYuYu/MyBlog/issues/87 "SQL语句(一)"
207 | [88]: https://github.com/PyxYuYu/MyBlog/issues/88 "SQL语句(二)"
208 | [89]: https://github.com/PyxYuYu/MyBlog/issues/89 "注入(一)"
209 | [90]: https://github.com/PyxYuYu/MyBlog/issues/90 "注入(二)"
210 | [91]: https://github.com/PyxYuYu/MyBlog/issues/91 "注入(三)"
211 | [92]: https://github.com/PyxYuYu/MyBlog/issues/92 "注入(四)"
212 | [93]: https://github.com/PyxYuYu/MyBlog/issues/93 "注入(五)"
213 | [94]: https://github.com/PyxYuYu/MyBlog/issues/94 "注入(六)"
214 | [95]: https://github.com/PyxYuYu/MyBlog/issues/95 "注入(七)& WAF"
215 | [96]: https://github.com/PyxYuYu/MyBlog/issues/96 "XSS(一)"
216 | [97]: https://github.com/PyxYuYu/MyBlog/issues/97 "XSS & WAF"
217 | [98]: https://github.com/PyxYuYu/MyBlog/issues/98 "CSRF & 文件上传"
218 | [99]: https://github.com/PyxYuYu/MyBlog/issues/99 "代码注入 & 文件包含"
219 | [100]: https://github.com/PyxYuYu/MyBlog/issues/100 "SSRF"
220 | [101]: https://github.com/PyxYuYu/MyBlog/issues/101 "XXE & XPATH"
221 | [102]: https://github.com/PyxYuYu/MyBlog/issues/102 "业务逻辑漏洞"
222 | [103]: https://github.com/PyxYuYu/MyBlog/issues/103 "内网渗透"
223 | [104]: https://github.com/PyxYuYu/MyBlog/issues/104 "渗透测试总结(一)"
224 | [105]: https://github.com/PyxYuYu/MyBlog/issues/105 "渗透测试总结(二)"
225 | [106]: https://github.com/PyxYuYu/MyBlog/issues/106 "一些知识点(一)"
226 | [107]: https://github.com/PyxYuYu/MyBlog/issues/107 "动态爬虫(一)"
227 | [108]: https://github.com/PyxYuYu/MyBlog/issues/108 "动态爬虫(二)"
228 | [109]: https://github.com/PyxYuYu/MyBlog/issues/109 "安全舆情 -- 关于补天的轮子"
229 |
230 |
231 |
232 | ----
233 | **2017年5月8日 22:50:51**
234 |
--------------------------------------------------------------------------------
55 | 56 | {{ comment.name }} 57 | 58 |
59 |{{ comment.body|linebreaks }}
60 |{{ comment.created }}
61 |