├── ChangeLog
├── LICENSE
├── README.md
├── demo.png
├── grafana.db
├── install.sh
├── site-packages
├── ci_tools
│ ├── pypy_upgrade.sh
│ ├── retry.bat
│ └── retry.sh
├── dateutil
│ ├── __init__.py
│ ├── _common.py
│ ├── easter.py
│ ├── parser.py
│ ├── relativedelta.py
│ ├── rrule.py
│ ├── test
│ │ ├── __init__.py
│ │ ├── _common.py
│ │ ├── test_easter.py
│ │ ├── test_imports.py
│ │ ├── test_parser.py
│ │ ├── test_relativedelta.py
│ │ ├── test_rrule.py
│ │ └── test_tz.py
│ ├── tz
│ │ ├── __init__.py
│ │ ├── _common.py
│ │ ├── tz.py
│ │ └── win.py
│ ├── tzwin.py
│ └── zoneinfo
│ │ ├── __init__.py
│ │ ├── dateutil-zoneinfo.tar.gz
│ │ └── rebuild.py
├── influxdb.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ ├── requires.txt
│ └── top_level.txt
├── influxdb
│ ├── __init__.py
│ ├── _dataframe_client.py
│ ├── chunked_json.py
│ ├── client.py
│ ├── dataframe_client.py
│ ├── exceptions.py
│ ├── helper.py
│ ├── influxdb08
│ │ ├── __init__.py
│ │ ├── chunked_json.py
│ │ ├── client.py
│ │ ├── dataframe_client.py
│ │ └── helper.py
│ ├── line_protocol.py
│ ├── resultset.py
│ └── tests
│ │ ├── __init__.py
│ │ ├── chunked_json_test.py
│ │ ├── client_test.py
│ │ ├── dataframe_client_test.py
│ │ ├── helper_test.py
│ │ ├── influxdb08
│ │ ├── __init__.py
│ │ ├── client_test.py
│ │ ├── dataframe_client_test.py
│ │ └── helper_test.py
│ │ ├── misc.py
│ │ ├── resultset_test.py
│ │ ├── server_tests
│ │ ├── __init__.py
│ │ ├── base.py
│ │ ├── client_test_with_server.py
│ │ └── influxdb_instance.py
│ │ └── test_line_protocol.py
├── python_dateutil.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ ├── requires.txt
│ ├── top_level.txt
│ └── zip-safe
├── pytz-2016.10-py2.6.egg
├── pytz.pth
├── requests.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ ├── not-zip-safe
│ ├── requires.txt
│ └── top_level.txt
├── requests
│ ├── __init__.py
│ ├── _internal_utils.py
│ ├── adapters.py
│ ├── api.py
│ ├── auth.py
│ ├── cacert.pem
│ ├── certs.py
│ ├── compat.py
│ ├── cookies.py
│ ├── exceptions.py
│ ├── hooks.py
│ ├── models.py
│ ├── packages
│ │ ├── __init__.py
│ │ ├── chardet
│ │ │ ├── __init__.py
│ │ │ ├── big5freq.py
│ │ │ ├── big5prober.py
│ │ │ ├── chardetect.py
│ │ │ ├── chardistribution.py
│ │ │ ├── charsetgroupprober.py
│ │ │ ├── charsetprober.py
│ │ │ ├── codingstatemachine.py
│ │ │ ├── compat.py
│ │ │ ├── constants.py
│ │ │ ├── cp949prober.py
│ │ │ ├── escprober.py
│ │ │ ├── escsm.py
│ │ │ ├── eucjpprober.py
│ │ │ ├── euckrfreq.py
│ │ │ ├── euckrprober.py
│ │ │ ├── euctwfreq.py
│ │ │ ├── euctwprober.py
│ │ │ ├── gb2312freq.py
│ │ │ ├── gb2312prober.py
│ │ │ ├── hebrewprober.py
│ │ │ ├── jisfreq.py
│ │ │ ├── jpcntx.py
│ │ │ ├── langbulgarianmodel.py
│ │ │ ├── langcyrillicmodel.py
│ │ │ ├── langgreekmodel.py
│ │ │ ├── langhebrewmodel.py
│ │ │ ├── langhungarianmodel.py
│ │ │ ├── langthaimodel.py
│ │ │ ├── latin1prober.py
│ │ │ ├── mbcharsetprober.py
│ │ │ ├── mbcsgroupprober.py
│ │ │ ├── mbcssm.py
│ │ │ ├── sbcharsetprober.py
│ │ │ ├── sbcsgroupprober.py
│ │ │ ├── sjisprober.py
│ │ │ ├── universaldetector.py
│ │ │ └── utf8prober.py
│ │ ├── idna
│ │ │ ├── __init__.py
│ │ │ ├── codec.py
│ │ │ ├── compat.py
│ │ │ ├── core.py
│ │ │ ├── idnadata.py
│ │ │ ├── intranges.py
│ │ │ └── uts46data.py
│ │ └── urllib3
│ │ │ ├── __init__.py
│ │ │ ├── _collections.py
│ │ │ ├── connection.py
│ │ │ ├── connectionpool.py
│ │ │ ├── contrib
│ │ │ ├── __init__.py
│ │ │ ├── appengine.py
│ │ │ ├── ntlmpool.py
│ │ │ ├── pyopenssl.py
│ │ │ └── socks.py
│ │ │ ├── exceptions.py
│ │ │ ├── fields.py
│ │ │ ├── filepost.py
│ │ │ ├── packages
│ │ │ ├── __init__.py
│ │ │ ├── backports
│ │ │ │ └── makefile.py
│ │ │ ├── ordered_dict.py
│ │ │ ├── six.py
│ │ │ └── ssl_match_hostname
│ │ │ │ ├── __init__.py
│ │ │ │ └── _implementation.py
│ │ │ ├── poolmanager.py
│ │ │ ├── request.py
│ │ │ ├── response.py
│ │ │ └── util
│ │ │ ├── __init__.py
│ │ │ ├── connection.py
│ │ │ ├── request.py
│ │ │ ├── response.py
│ │ │ ├── retry.py
│ │ │ ├── ssl_.py
│ │ │ ├── timeout.py
│ │ │ └── url.py
│ ├── sessions.py
│ ├── status_codes.py
│ ├── structures.py
│ └── utils.py
├── six.egg-info
│ ├── PKG-INFO
│ ├── SOURCES.txt
│ ├── dependency_links.txt
│ └── top_level.txt
└── six.py
└── src
├── conf
└── default.ini
├── data
├── data
│ └── _internal
│ │ └── monitor
│ │ └── 1
│ │ └── 1
├── meta
│ └── meta.db
└── wal
│ └── _internal
│ └── monitor
│ └── 1
│ └── _00001.wal
├── lib
├── __init__.py
├── dLog.py
├── dMail.py
├── dSniff.py
├── dStat.py
├── dUtil.py
├── influxDB.py
└── loadConf.py
├── logs
└── ChangeLog
├── sbin
├── daemon
├── dshield
├── inflctrl
└── influxd
└── test
├── __init__.py
├── testasync.py
├── testblock.py
├── testdstat.py
├── testinflux.py
└── testsniff.py
/ChangeLog:
--------------------------------------------------------------------------------
1 | Dshield V4.0.0 更新记录 2016-12-10
2 | ---------------------------------------
3 | 1. 改为使用SS命令来分析连接状态,SS在效率上极优于netstat,高并发连接情况下速度快,不影响系统性能。
4 | 2. 增加TTL检测进程,对异常TTL进行封锁。TTL封锁可对随机IP并发连接攻击类有一定保护效果,只要封定成功能保证系统不会因为消耗资源而宕机,但是无法解决带宽被占满的问题。
5 | 同时,TTL有可能会造成误杀情况,所以可酌情而开启。它与主进程是独立运作的,可以手工开启或关闭,又或者设为只监听状态。
6 | 3. 为了满足“高大上”的定义,提供了web可视化界面。后端逻辑也换成了写数据缓存方式。运行时不消耗系统性能。
7 | 4. 支持CRID的白名单书写规范,如 192.168.1.0/24。你再也不用一个个IP地填上去了。但是目前最多支持8组白名单。
8 | 5. 其它的告警邮件等即时通报都有了。
9 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Dshield
2 |
3 | Dshield is a lightweight tool for defending DDoS, which has good performance on defending DDoS attacks of CC, pressure measurement softwares and other DDoS tools . but actually it does not has the ability to intercept attacks. It is based on IPtables firewall, using SS command to filter suspicious IPs and acting together with IPtables firewall to defend. When DDos attacking, this tool will analyse the purpose of origin of these links in real time and add the origin ips to the DROP chain of IPtables. Meanwhile these IPs will be added to the database. But we will unblock the IP until preset ttl. It performs quite well on the basic tests of the concurrent attacks, cc attacks of single IP. It is not suitable for the truly big stream attacks, but it can handle the lightweight DDoS as long as the flow does not excess the maximum bandwidth of server which may crash the service. Dshield may be the easiest and simplest software-level DDoS defense solution. It is developed with python which is easy to read and convenient for further modifying.
4 |
5 | Dshield has been updated for 4 versions which origin name is “DDos-defender”, v4.0.0 adds GUI based on web visualization and was reconstructed totally on code-level. Dshield adapts influxDB+grafana on the basic constructure because of web visualized orientation. You can run it without installing any extra http services, because grafana has integrated a set of http service within, and the GUI is user-definable. It is very easy to use and hope you enjoy it.
6 |
7 | Dshield是一个轻量型的DDos防护工具,它在受到如CC、压测工具等拒绝服务攻击时,能进行比较有效的防御。实际上它并不具备阻截能力,它是基于IPtables防火墙,利用类似于SS命令过滤出可疑IP,与IPtables防火墙实现联动。在发生恶意拒绝服务攻击时,本工具会实时分析连接来源的企图,并自动将其加入iptables防火墙的DROP链表中进行阻截。同时将攻击IP记录数据库中,当达到预定时间后,工具自动从IPtables防火墙中解封对应IP。在基本测试过程中,应付单IP并发连接攻击、cc攻击等效果明显。但它并不适合于真正的大流量攻击,只要攻击流量不超过服务器的最高带宽一般不会造成服务宕机,能对抗轻量DDOS。它也许是在软件级别上安装最方便和最简单的一个解决方案。本工具完全由python开发,程序简单易读,方便后期修改。
8 |
9 | 本工具经过了4次更新,原名叫“DDoS-Defender”,本版本V4.0.0中新增了基于web可视化的图形界面,代码层基本上全部进行了重构。由于面向web可视化,所以底层架构上采用了influxDB + grafana的结合,你可以不需要安装任何额外的http服务来支持它的运行,因为grafana工具已集成了一套http服务,且图形是可自定义配置的。使用起来相当容易。保证你会喜欢上它!
10 |
11 | ## Constructure
12 | * Dshield/conf Configure files
13 | * Dshield/data Data buffer storage
14 | * Dshield/lib Library of modules
15 | * Dshield/sbin Main program
16 | * Dshield/logs Logs directory
17 | * Dshield/test Test cases
18 |
19 | ## Installation
20 |
21 | Install Dshield with root user:
22 |
23 | (1) Install grafana
24 | ```shell
25 | yum -y install https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.2-1481203731.x86_64.rpm
26 | service grafana-server start
27 | ```
28 | or install it by adding yum source, vi /etc/yum.repos.d/grafana.repo and add the content below.
29 | ```shell
30 | [grafana]
31 | name=grafana
32 | baseurl=https://packagecloud.io/grafana/stable/el/6/$basearch
33 | repo_gpgcheck=1
34 | enabled=1
35 | gpgcheck=1
36 | gpgkey=https://packagecloud.io/gpg.key https://grafanarel.s3.amazonaws.com/RPM-GPG-KEY-grafana
37 | sslverify=1
38 | sslcacert=/etc/pki/tls/certs/ca-bundle.crt
39 | ```
40 |
41 | Then install it by yum and start grafana-server by service command.
42 | ```shell
43 | yum install grafana
44 | service grafana-server start
45 | ```
46 |
47 | (2) Install Dshield
48 | ```shell
49 | wget https://github.com/ywjt/Dshield/archive/master.zip
50 | unzip master.zip
51 | cd Dshield-master/
52 | sh install.sh
53 | ```
54 |
55 | Installation finished and you can start it now!
56 | ```shell
57 | service grafana-server restart
58 | /usr/local/Dshield/sbin/dshield all start
59 | ```
60 | Now you can log in the administration backend by URL http://{your_ip}:3000
61 |
62 | username: admin
63 | password: admin
64 |
65 |
66 |
67 | ## Help
68 |
69 | **command usage**
70 | ```shell
71 | # /usr/local/Dshield/sbin/dshield all {start|stop|restart} #Start all service
72 | # /usr/local/Dshield/sbin/dshield cc {start|stop|restart} #Start cc process
73 | # /usr/local/Dshield/sbin/dshield sniff {start|stop|restart} #Start ttl modle
74 | # /usr/local/Dshield/sbin/inflctl {start|stop|restart} #Only start InfluxDB process
75 | ```
76 |
77 | **modified configure file**
78 |
79 | Open File: /usr/local/Dshield/conf/default.ini
80 |
81 |
82 | **white list**
83 |
84 | support CIRD format
85 | > whitelisted_ips = "10.10.10.0/24,172.16.0.0/16"
86 |
87 | > whitel_ttl_ips = "10.10.10.0/24,172.16.0.0/16"
88 |
89 | **monitor interface**
90 | > mont_interface = "eth0"
91 |
92 | **monitor port**
93 | > mont_port = "80,22"
94 |
95 | **listen mode**
96 | false means active defense, true means only record IP and ttl but not block
97 | > mont_listen = false
98 |
99 | **monitor interval**
100 | specified in seconds
101 | > rexec_time = 5
102 |
103 | **block connections**
104 | this parameter can assign the sensitivity of monitoring, 100 is recommanded
105 | > no_of_connections = 100
106 |
107 | **ip block time**
108 | support 1d/1h/1m format
109 | > block_period_ip = "1m"
110 |
111 | **monitor protocol**
112 | it is available for TTL monitor module, tcp-tcp only, udp-udp only, ‘’-all protocols are monitored
113 | > mont_protocol = "tcp"
114 |
115 | **block connections**
116 | this parameter can assign the sensitivity of monitoring, 20000~100000 is recommanded
117 | > no_ttl_connections = 20000
118 |
119 | **ttl unblock time**
120 | surpport 1d/1h/1m format
121 | > block_period_ttl = "1m"
122 |
123 |
124 | ## About
125 |
126 | **Original Author:** YWJT http://www.ywjt.org/ (Copyright (C) 2016)
127 |
128 | **Maintainer:** Sunshine Koo <350311204@qq.com>
129 |
130 |
131 |
--------------------------------------------------------------------------------
/demo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ywjt/Dshield/ef5b58d99bc479b92a97404b49d057d503e1a368/demo.png
--------------------------------------------------------------------------------
/grafana.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ywjt/Dshield/ef5b58d99bc479b92a97404b49d057d503e1a368/grafana.db
--------------------------------------------------------------------------------
/install.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # @ Dshield for Python
4 | ##############################################################################
5 | # Author: YWJT / Sunshine Koo #
6 | # Modify: 2016-12-10 #
7 | ##############################################################################
8 | # This program is distributed under the "Artistic License" Agreement #
9 | # The LICENSE file is located in the same directory as this program. Please #
10 | # read the LICENSE file before you make copies or distribute this program #
11 | ##############################################################################
12 | #
13 |
14 |
15 | # Check if user is root
16 | if [ $(id -u) != "0" ]; then
17 | echo "Error: You must be root to run this script, use sudo sh $0"
18 | exit 1
19 | fi
20 |
21 | SHELL_DIR=$(cd "$(dirname "$0")"; pwd)
22 | BASEDIR=$(dirname $SHELL_DIR)
23 | if [ `cat ~/.bash_profile|grep 'Dshield'|wc -l` -eq 0 ];then
24 | echo "PATH="$PATH:$SHELL_DIR >> ~/.bash_profile
25 | echo "export PATH" >> ~/.bash_profile
26 | export PATH=$PATH:$SHELL_DIR
27 | fi
28 |
29 |
30 | cd $SHELL_DIR
31 | function header()
32 | {
33 | echo
34 | echo "==========================================================================="
35 | echo "| Dshield v4.0.0 For Python"
36 | echo "| Copyright (C)2016,YWJT.org."
37 | echo "| Author: YWJT / Sunshine Koo"
38 | echo "| Github: https://github.com/ywjt/Dshield"
39 | echo "| AuBlog: http://www.ywjt.org"
40 | echo "==========================================================================="
41 | echo
42 | echo "[$(date)] Setup Begin >>>"
43 | }
44 |
45 | function option_irq() {
46 | if [ ! -f /var/log/irq_tmp ]
47 | then
48 | if [ ! -z ${MONT_INTERFACE} ]
49 | then
50 | /etc/init.d/irqbalance stop 1>/dev/null 2>&1
51 | IrqID=$(cat /proc/interrupts |grep ${MONT_INTERFACE}|awk -F ':' '{print $1}'|xargs)
52 | Nx=0
53 | for Cid in ${IrqID}
54 | do
55 | Mex=$(echo $((2**${Nx})))
56 | Hex=$(printf "%x" ${Mex})
57 | echo ${Hex} > /proc/irq/${Cid}/smp_affinity
58 | Nx=$((${Nx}+1))
59 | done
60 | echo 1 >> /var/log/irq_tmp
61 | fi
62 | fi
63 | }
64 |
65 |
66 | function chk_iptables() {
67 | echo "| Check iptables env ... "
68 | if ! rpm -qa iptables|grep 'iptables' >/dev/null
69 | then
70 | yum -y install iptables
71 | else
72 | /sbin/service iptables status 1>/dev/null 2>&1
73 | if [ $? -ne 0 ]; then
74 | #/etc/init.d/iptables start
75 | echo "你的iptables没有启动,请手工执行启动后再跑一次!"
76 | echo "/etc/init.d/iptables start"
77 | read -p "跳过请按'y', 退出按'n':" act
78 | if [[ "${act}" == "n" || "${act}" == "N" ]]
79 | then
80 | exit 1
81 | fi
82 | fi
83 | fi
84 | }
85 |
86 |
87 | function chk_tcpdump() {
88 | echo "| Check tcpdump env ... "
89 | if [ ! -x /usr/sbin/tcpdump ]
90 | then
91 | yum -y install tcpdump
92 | fi
93 | }
94 |
95 |
96 | function chk_pyenv() {
97 | echo "| Check python env ... "
98 | i=1
99 | for dir in $(find / -name "site-packages" -type d|grep -E '*/lib/python2.[0-9]/site-packages$')
100 | do
101 | echo "Python Env ($i):" $dir
102 | /bin/cp -rf site-packages/* $dir/
103 | let i=$i+1
104 | done
105 | }
106 |
107 | function grafana_init() {
108 | echo "| Install grafana ..."
109 | if ! rpm -qa grafana|grep 'grafana' >/dev/null
110 | then
111 | yum -y install https://grafanarel.s3.amazonaws.com/builds/grafana-4.0.2-1481203731.x86_64.rpm
112 | fi
113 | service grafana-server start
114 | }
115 |
116 |
117 | function dshield_init() {
118 | echo "| Install dshield ..."
119 | #if [ ! -f Dshield.zip ]
120 | #then
121 | # wget https://github.com/ywjt/Dshield/archive/master.zip -O Dshield.zip
122 | # [ -d Dshield ] && rm -rf Dshield
123 | # unzip Dshield.zip
124 | # cd Dshield
125 | #fi
126 | /bin/cp -rf src /usr/local/Dshield
127 | [ -f /var/lib/grafana/grafana.db ] && mv /var/lib/grafana/grafana.db /var/lib/grafana/grafana.db_
128 | /bin/cp -f grafana.db /var/lib/grafana/
129 | chown grafana:grafana /var/lib/grafana/grafana.db
130 | chmod 775 /var/lib/grafana/grafana.db
131 | chk_pyenv
132 | chown root:root /usr/local/Dshield
133 | chmod 775 -R /usr/local/Dshield
134 | }
135 |
136 | ####### >>>>>>>>>>>>>>>>>>>
137 | header
138 | chk_iptables
139 | chk_tcpdump
140 | option_irq
141 | grafana_init
142 | dshield_init
143 | ####### >>>>>>>>>>>>>>>>>>>
144 | echo
145 | echo "==========================================================================="
146 | echo "| 首先: service grafana-server restart"
147 | echo "| 然后: /usr/local/Dshield/sbin/dshield all start"
148 | echo "| 现在: 打开 http://your_ip:3000/ 输入用户名/密码: admin /admin"
149 | echo "| 尽情: Enjoy!"
150 | echo "==========================================================================="
151 | echo
152 |
--------------------------------------------------------------------------------
/site-packages/ci_tools/pypy_upgrade.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Need to install an upgraded version of pypy.
3 | if [ -f "$PYENV_ROOT/bin/pyenv" ]; then
4 | pushd "$PYENV_ROOT" && git pull && popd
5 | else
6 | rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/yyuu/pyenv.git "$PYENV_ROOT"
7 | fi
8 |
9 | "$PYENV_ROOT/bin/pyenv" install --skip-existing "pypy-$PYPY_VERSION"
10 | virtualenv --python="$PYENV_ROOT/versions/pypy-$PYPY_VERSION/bin/python" "$HOME/virtualenvs/pypy-$PYPY_VERSION"
11 | source "$HOME/virtualenvs/pypy-$PYPY_VERSION/bin/activate"
--------------------------------------------------------------------------------
/site-packages/ci_tools/retry.bat:
--------------------------------------------------------------------------------
1 | @echo off
2 | REM This script takes a command and retries it a few times if it fails, with a
3 | REM timeout between each retry.
4 |
5 | setlocal EnableDelayedExpansion
6 |
7 | REM Loop at most n_retries times, waiting sleep_time times between
8 | set sleep_time=60
9 | set n_retries=5
10 |
11 | for /l %%x in (1, 1, %n_retries%) do (
12 | call %*
13 | if not ERRORLEVEL 1 EXIT /B 0
14 | timeout /t %sleep_time% /nobreak > nul
15 | )
16 |
17 | REM If it failed all n_retries times, we can give up at last.
18 | EXIT /B 1
--------------------------------------------------------------------------------
/site-packages/ci_tools/retry.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | sleep_time=60
3 | n_retries=5
4 |
5 | for i in `seq 1 $n_retries`; do
6 | "$@" && exit 0
7 | sleep $sleep_time
8 | done
9 |
10 | exit 1
11 |
--------------------------------------------------------------------------------
/site-packages/dateutil/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | __version__ = "2.6.0"
3 |
--------------------------------------------------------------------------------
/site-packages/dateutil/_common.py:
--------------------------------------------------------------------------------
1 | """
2 | Common code used in multiple modules.
3 | """
4 |
5 | class weekday(object):
6 | __slots__ = ["weekday", "n"]
7 |
8 | def __init__(self, weekday, n=None):
9 | self.weekday = weekday
10 | self.n = n
11 |
12 | def __call__(self, n):
13 | if n == self.n:
14 | return self
15 | else:
16 | return self.__class__(self.weekday, n)
17 |
18 | def __eq__(self, other):
19 | try:
20 | if self.weekday != other.weekday or self.n != other.n:
21 | return False
22 | except AttributeError:
23 | return False
24 | return True
25 |
26 | __hash__ = None
27 |
28 | def __repr__(self):
29 | s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
30 | if not self.n:
31 | return s
32 | else:
33 | return "%s(%+d)" % (s, self.n)
34 |
--------------------------------------------------------------------------------
/site-packages/dateutil/easter.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | This module offers a generic easter computing method for any given year, using
4 | Western, Orthodox or Julian algorithms.
5 | """
6 |
7 | import datetime
8 |
9 | __all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
10 |
11 | EASTER_JULIAN = 1
12 | EASTER_ORTHODOX = 2
13 | EASTER_WESTERN = 3
14 |
15 |
16 | def easter(year, method=EASTER_WESTERN):
17 | """
18 | This method was ported from the work done by GM Arts,
19 | on top of the algorithm by Claus Tondering, which was
20 | based in part on the algorithm of Ouding (1940), as
21 | quoted in "Explanatory Supplement to the Astronomical
22 | Almanac", P. Kenneth Seidelmann, editor.
23 |
24 | This algorithm implements three different easter
25 | calculation methods:
26 |
27 | 1 - Original calculation in Julian calendar, valid in
28 | dates after 326 AD
29 | 2 - Original method, with date converted to Gregorian
30 | calendar, valid in years 1583 to 4099
31 | 3 - Revised method, in Gregorian calendar, valid in
32 | years 1583 to 4099 as well
33 |
34 | These methods are represented by the constants:
35 |
36 | * ``EASTER_JULIAN = 1``
37 | * ``EASTER_ORTHODOX = 2``
38 | * ``EASTER_WESTERN = 3``
39 |
40 | The default method is method 3.
41 |
42 | More about the algorithm may be found at:
43 |
44 | http://users.chariot.net.au/~gmarts/eastalg.htm
45 |
46 | and
47 |
48 | http://www.tondering.dk/claus/calendar.html
49 |
50 | """
51 |
52 | if not (1 <= method <= 3):
53 | raise ValueError("invalid method")
54 |
55 | # g - Golden year - 1
56 | # c - Century
57 | # h - (23 - Epact) mod 30
58 | # i - Number of days from March 21 to Paschal Full Moon
59 | # j - Weekday for PFM (0=Sunday, etc)
60 | # p - Number of days from March 21 to Sunday on or before PFM
61 | # (-6 to 28 methods 1 & 3, to 56 for method 2)
62 | # e - Extra days to add for method 2 (converting Julian
63 | # date to Gregorian date)
64 |
65 | y = year
66 | g = y % 19
67 | e = 0
68 | if method < 3:
69 | # Old method
70 | i = (19*g + 15) % 30
71 | j = (y + y//4 + i) % 7
72 | if method == 2:
73 | # Extra dates to convert Julian to Gregorian date
74 | e = 10
75 | if y > 1600:
76 | e = e + y//100 - 16 - (y//100 - 16)//4
77 | else:
78 | # New method
79 | c = y//100
80 | h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
81 | i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
82 | j = (y + y//4 + i + 2 - c + c//4) % 7
83 |
84 | # p can be from -6 to 56 corresponding to dates 22 March to 23 May
85 | # (later dates apply to method 2, although 23 May never actually occurs)
86 | p = i - j + e
87 | d = 1 + (p + 27 + (p + 6)//40) % 31
88 | m = 3 + (p + 26)//30
89 | return datetime.date(int(y), int(m), int(d))
90 |
--------------------------------------------------------------------------------
/site-packages/dateutil/test/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
--------------------------------------------------------------------------------
/site-packages/dateutil/test/test_easter.py:
--------------------------------------------------------------------------------
1 | from dateutil.easter import easter
2 | from dateutil.easter import EASTER_WESTERN, EASTER_ORTHODOX, EASTER_JULIAN
3 |
4 | from datetime import date
5 |
6 | try:
7 | import unittest2 as unittest
8 | except ImportError:
9 | import unittest
10 |
11 | # List of easters between 1990 and 2050
12 | western_easter_dates = [
13 | date(1990, 4, 15), date(1991, 3, 31), date(1992, 4, 19), date(1993, 4, 11),
14 | date(1994, 4, 3), date(1995, 4, 16), date(1996, 4, 7), date(1997, 3, 30),
15 | date(1998, 4, 12), date(1999, 4, 4),
16 |
17 | date(2000, 4, 23), date(2001, 4, 15), date(2002, 3, 31), date(2003, 4, 20),
18 | date(2004, 4, 11), date(2005, 3, 27), date(2006, 4, 16), date(2007, 4, 8),
19 | date(2008, 3, 23), date(2009, 4, 12),
20 |
21 | date(2010, 4, 4), date(2011, 4, 24), date(2012, 4, 8), date(2013, 3, 31),
22 | date(2014, 4, 20), date(2015, 4, 5), date(2016, 3, 27), date(2017, 4, 16),
23 | date(2018, 4, 1), date(2019, 4, 21),
24 |
25 | date(2020, 4, 12), date(2021, 4, 4), date(2022, 4, 17), date(2023, 4, 9),
26 | date(2024, 3, 31), date(2025, 4, 20), date(2026, 4, 5), date(2027, 3, 28),
27 | date(2028, 4, 16), date(2029, 4, 1),
28 |
29 | date(2030, 4, 21), date(2031, 4, 13), date(2032, 3, 28), date(2033, 4, 17),
30 | date(2034, 4, 9), date(2035, 3, 25), date(2036, 4, 13), date(2037, 4, 5),
31 | date(2038, 4, 25), date(2039, 4, 10),
32 |
33 | date(2040, 4, 1), date(2041, 4, 21), date(2042, 4, 6), date(2043, 3, 29),
34 | date(2044, 4, 17), date(2045, 4, 9), date(2046, 3, 25), date(2047, 4, 14),
35 | date(2048, 4, 5), date(2049, 4, 18), date(2050, 4, 10)
36 | ]
37 |
38 | orthodox_easter_dates = [
39 | date(1990, 4, 15), date(1991, 4, 7), date(1992, 4, 26), date(1993, 4, 18),
40 | date(1994, 5, 1), date(1995, 4, 23), date(1996, 4, 14), date(1997, 4, 27),
41 | date(1998, 4, 19), date(1999, 4, 11),
42 |
43 | date(2000, 4, 30), date(2001, 4, 15), date(2002, 5, 5), date(2003, 4, 27),
44 | date(2004, 4, 11), date(2005, 5, 1), date(2006, 4, 23), date(2007, 4, 8),
45 | date(2008, 4, 27), date(2009, 4, 19),
46 |
47 | date(2010, 4, 4), date(2011, 4, 24), date(2012, 4, 15), date(2013, 5, 5),
48 | date(2014, 4, 20), date(2015, 4, 12), date(2016, 5, 1), date(2017, 4, 16),
49 | date(2018, 4, 8), date(2019, 4, 28),
50 |
51 | date(2020, 4, 19), date(2021, 5, 2), date(2022, 4, 24), date(2023, 4, 16),
52 | date(2024, 5, 5), date(2025, 4, 20), date(2026, 4, 12), date(2027, 5, 2),
53 | date(2028, 4, 16), date(2029, 4, 8),
54 |
55 | date(2030, 4, 28), date(2031, 4, 13), date(2032, 5, 2), date(2033, 4, 24),
56 | date(2034, 4, 9), date(2035, 4, 29), date(2036, 4, 20), date(2037, 4, 5),
57 | date(2038, 4, 25), date(2039, 4, 17),
58 |
59 | date(2040, 5, 6), date(2041, 4, 21), date(2042, 4, 13), date(2043, 5, 3),
60 | date(2044, 4, 24), date(2045, 4, 9), date(2046, 4, 29), date(2047, 4, 21),
61 | date(2048, 4, 5), date(2049, 4, 25), date(2050, 4, 17)
62 | ]
63 |
64 | # A random smattering of Julian dates.
65 | # Pulled values from http://www.kevinlaughery.com/east4099.html
66 | julian_easter_dates = [
67 | date( 326, 4, 3), date( 375, 4, 5), date( 492, 4, 5), date( 552, 3, 31),
68 | date( 562, 4, 9), date( 569, 4, 21), date( 597, 4, 14), date( 621, 4, 19),
69 | date( 636, 3, 31), date( 655, 3, 29), date( 700, 4, 11), date( 725, 4, 8),
70 | date( 750, 3, 29), date( 782, 4, 7), date( 835, 4, 18), date( 849, 4, 14),
71 | date( 867, 3, 30), date( 890, 4, 12), date( 922, 4, 21), date( 934, 4, 6),
72 | date(1049, 3, 26), date(1058, 4, 19), date(1113, 4, 6), date(1119, 3, 30),
73 | date(1242, 4, 20), date(1255, 3, 28), date(1257, 4, 8), date(1258, 3, 24),
74 | date(1261, 4, 24), date(1278, 4, 17), date(1333, 4, 4), date(1351, 4, 17),
75 | date(1371, 4, 6), date(1391, 3, 26), date(1402, 3, 26), date(1412, 4, 3),
76 | date(1439, 4, 5), date(1445, 3, 28), date(1531, 4, 9), date(1555, 4, 14)
77 | ]
78 |
79 |
80 | class EasterTest(unittest.TestCase):
81 | def testEasterWestern(self):
82 | for easter_date in western_easter_dates:
83 | self.assertEqual(easter_date,
84 | easter(easter_date.year, EASTER_WESTERN))
85 |
86 | def testEasterOrthodox(self):
87 | for easter_date in orthodox_easter_dates:
88 | self.assertEqual(easter_date,
89 | easter(easter_date.year, EASTER_ORTHODOX))
90 |
91 | def testEasterJulian(self):
92 | for easter_date in julian_easter_dates:
93 | self.assertEqual(easter_date,
94 | easter(easter_date.year, EASTER_JULIAN))
95 |
96 | def testEasterBadMethod(self):
97 | # Invalid methods raise ValueError
98 | with self.assertRaises(ValueError):
99 | easter(1975, 4)
100 |
--------------------------------------------------------------------------------
/site-packages/dateutil/test/test_imports.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | try:
4 | import unittest2 as unittest
5 | except ImportError:
6 | import unittest
7 |
8 |
9 | class ImportEasterTest(unittest.TestCase):
10 | """ Test that dateutil.easter-related imports work properly """
11 |
12 | def testEasterDirect(self):
13 | import dateutil.easter
14 |
15 | def testEasterFrom(self):
16 | from dateutil import easter
17 |
18 | def testEasterStar(self):
19 | from dateutil.easter import easter
20 |
21 |
22 | class ImportParserTest(unittest.TestCase):
23 | """ Test that dateutil.parser-related imports work properly """
24 | def testParserDirect(self):
25 | import dateutil.parser
26 |
27 | def testParserFrom(self):
28 | from dateutil import parser
29 |
30 | def testParserAll(self):
31 | # All interface
32 | from dateutil.parser import parse
33 | from dateutil.parser import parserinfo
34 |
35 | # Other public classes
36 | from dateutil.parser import parser
37 |
38 | for var in (parse, parserinfo, parser):
39 | self.assertIsNot(var, None)
40 |
41 |
42 | class ImportRelativeDeltaTest(unittest.TestCase):
43 | """ Test that dateutil.relativedelta-related imports work properly """
44 | def testRelativeDeltaDirect(self):
45 | import dateutil.relativedelta
46 |
47 | def testRelativeDeltaFrom(self):
48 | from dateutil import relativedelta
49 |
50 | def testRelativeDeltaAll(self):
51 | from dateutil.relativedelta import relativedelta
52 | from dateutil.relativedelta import MO, TU, WE, TH, FR, SA, SU
53 |
54 | for var in (relativedelta, MO, TU, WE, TH, FR, SA, SU):
55 | self.assertIsNot(var, None)
56 |
57 | # In the public interface but not in all
58 | from dateutil.relativedelta import weekday
59 | self.assertIsNot(weekday, None)
60 |
61 |
62 | class ImportRRuleTest(unittest.TestCase):
63 | """ Test that dateutil.rrule related imports work properly """
64 | def testRRuleDirect(self):
65 | import dateutil.rrule
66 |
67 | def testRRuleFrom(self):
68 | from dateutil import rrule
69 |
70 | def testRRuleAll(self):
71 | from dateutil.rrule import rrule
72 | from dateutil.rrule import rruleset
73 | from dateutil.rrule import rrulestr
74 | from dateutil.rrule import YEARLY, MONTHLY, WEEKLY, DAILY
75 | from dateutil.rrule import HOURLY, MINUTELY, SECONDLY
76 | from dateutil.rrule import MO, TU, WE, TH, FR, SA, SU
77 |
78 | rr_all = (rrule, rruleset, rrulestr,
79 | YEARLY, MONTHLY, WEEKLY, DAILY,
80 | HOURLY, MINUTELY, SECONDLY,
81 | MO, TU, WE, TH, FR, SA, SU)
82 |
83 | for var in rr_all:
84 | self.assertIsNot(var, None)
85 |
86 | # In the public interface but not in all
87 | from dateutil.rrule import weekday
88 | self.assertIsNot(weekday, None)
89 |
90 |
91 | class ImportTZTest(unittest.TestCase):
92 | """ Test that dateutil.tz related imports work properly """
93 | def testTzDirect(self):
94 | import dateutil.tz
95 |
96 | def testTzFrom(self):
97 | from dateutil import tz
98 |
99 | def testTzAll(self):
100 | from dateutil.tz import tzutc
101 | from dateutil.tz import tzoffset
102 | from dateutil.tz import tzlocal
103 | from dateutil.tz import tzfile
104 | from dateutil.tz import tzrange
105 | from dateutil.tz import tzstr
106 | from dateutil.tz import tzical
107 | from dateutil.tz import gettz
108 | from dateutil.tz import tzwin
109 | from dateutil.tz import tzwinlocal
110 |
111 | tz_all = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
112 | "tzstr", "tzical", "gettz"]
113 |
114 | tz_all += ["tzwin", "tzwinlocal"] if sys.platform.startswith("win") else []
115 | lvars = locals()
116 |
117 | for var in tz_all:
118 | self.assertIsNot(lvars[var], None)
119 |
120 |
121 | @unittest.skipUnless(sys.platform.startswith('win'), "Requires Windows")
122 | class ImportTZWinTest(unittest.TestCase):
123 | """ Test that dateutil.tzwin related imports work properly """
124 | def testTzwinDirect(self):
125 | import dateutil.tzwin
126 |
127 | def testTzwinFrom(self):
128 | from dateutil import tzwin
129 |
130 | def testTzwinStar(self):
131 | tzwin_all = ["tzwin", "tzwinlocal"]
132 |
133 |
134 | class ImportZoneInfoTest(unittest.TestCase):
135 | def testZoneinfoDirect(self):
136 | import dateutil.zoneinfo
137 |
138 | def testZoneinfoFrom(self):
139 | from dateutil import zoneinfo
140 |
141 | def testZoneinfoStar(self):
142 | from dateutil.zoneinfo import gettz
143 | from dateutil.zoneinfo import gettz_db_metadata
144 | from dateutil.zoneinfo import rebuild
145 |
146 | zi_all = (gettz, gettz_db_metadata, rebuild)
147 |
148 | for var in zi_all:
149 | self.assertIsNot(var, None)
150 |
--------------------------------------------------------------------------------
/site-packages/dateutil/tz/__init__.py:
--------------------------------------------------------------------------------
1 | from .tz import *
2 |
3 | __all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
4 | "tzstr", "tzical", "tzwin", "tzwinlocal", "gettz"]
5 |
--------------------------------------------------------------------------------
/site-packages/dateutil/tzwin.py:
--------------------------------------------------------------------------------
1 | # tzwin has moved to dateutil.tz.win
2 | from .tz.win import *
--------------------------------------------------------------------------------
/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ywjt/Dshield/ef5b58d99bc479b92a97404b49d057d503e1a368/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz
--------------------------------------------------------------------------------
/site-packages/dateutil/zoneinfo/rebuild.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import tempfile
4 | import shutil
5 | import json
6 | from subprocess import check_call
7 |
8 | from dateutil.zoneinfo import tar_open, METADATA_FN, ZONEFILENAME
9 |
10 |
11 | def rebuild(filename, tag=None, format="gz", zonegroups=[], metadata=None):
12 | """Rebuild the internal timezone info in dateutil/zoneinfo/zoneinfo*tar*
13 |
14 | filename is the timezone tarball from ftp.iana.org/tz.
15 |
16 | """
17 | tmpdir = tempfile.mkdtemp()
18 | zonedir = os.path.join(tmpdir, "zoneinfo")
19 | moduledir = os.path.dirname(__file__)
20 | try:
21 | with tar_open(filename) as tf:
22 | for name in zonegroups:
23 | tf.extract(name, tmpdir)
24 | filepaths = [os.path.join(tmpdir, n) for n in zonegroups]
25 | try:
26 | check_call(["zic", "-d", zonedir] + filepaths)
27 | except OSError as e:
28 | _print_on_nosuchfile(e)
29 | raise
30 | # write metadata file
31 | with open(os.path.join(zonedir, METADATA_FN), 'w') as f:
32 | json.dump(metadata, f, indent=4, sort_keys=True)
33 | target = os.path.join(moduledir, ZONEFILENAME)
34 | with tar_open(target, "w:%s" % format) as tf:
35 | for entry in os.listdir(zonedir):
36 | entrypath = os.path.join(zonedir, entry)
37 | tf.add(entrypath, entry)
38 | finally:
39 | shutil.rmtree(tmpdir)
40 |
41 | def _print_on_nosuchfile(e):
42 | """Print helpful troubleshooting message
43 |
44 | e is an exception raised by subprocess.check_call()
45 |
46 | """
47 | if e.errno == 2:
48 | logging.error(
49 | "Could not find zic. Perhaps you need to install "
50 | "libc-bin or some other package that provides it, "
51 | "or it's not in your PATH?")
52 |
--------------------------------------------------------------------------------
/site-packages/influxdb.egg-info/SOURCES.txt:
--------------------------------------------------------------------------------
1 | LICENSE
2 | MANIFEST.in
3 | README.rst
4 | dev-requirements.txt
5 | requirements.txt
6 | setup.cfg
7 | setup.py
8 | test-requirements.txt
9 | influxdb/__init__.py
10 | influxdb/_dataframe_client.py
11 | influxdb/chunked_json.py
12 | influxdb/client.py
13 | influxdb/dataframe_client.py
14 | influxdb/exceptions.py
15 | influxdb/helper.py
16 | influxdb/line_protocol.py
17 | influxdb/resultset.py
18 | influxdb.egg-info/PKG-INFO
19 | influxdb.egg-info/SOURCES.txt
20 | influxdb.egg-info/dependency_links.txt
21 | influxdb.egg-info/requires.txt
22 | influxdb.egg-info/top_level.txt
23 | influxdb/influxdb08/__init__.py
24 | influxdb/influxdb08/chunked_json.py
25 | influxdb/influxdb08/client.py
26 | influxdb/influxdb08/dataframe_client.py
27 | influxdb/influxdb08/helper.py
28 | influxdb/tests/__init__.py
29 | influxdb/tests/chunked_json_test.py
30 | influxdb/tests/client_test.py
31 | influxdb/tests/dataframe_client_test.py
32 | influxdb/tests/helper_test.py
33 | influxdb/tests/misc.py
34 | influxdb/tests/resultset_test.py
35 | influxdb/tests/test_line_protocol.py
36 | influxdb/tests/influxdb08/__init__.py
37 | influxdb/tests/influxdb08/client_test.py
38 | influxdb/tests/influxdb08/dataframe_client_test.py
39 | influxdb/tests/influxdb08/helper_test.py
40 | influxdb/tests/server_tests/__init__.py
41 | influxdb/tests/server_tests/base.py
42 | influxdb/tests/server_tests/client_test_with_server.py
43 | influxdb/tests/server_tests/influxdb_instance.py
--------------------------------------------------------------------------------
/site-packages/influxdb.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/site-packages/influxdb.egg-info/requires.txt:
--------------------------------------------------------------------------------
1 | python-dateutil>=2.0.0
2 | pytz
3 | requests>=1.0.3
4 | six>=1.9.0
5 |
6 | [test]
7 | nose
8 | nose-cov
9 | mock
10 | requests-mock
11 |
--------------------------------------------------------------------------------
/site-packages/influxdb.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | influxdb
2 |
--------------------------------------------------------------------------------
/site-packages/influxdb/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | from .client import InfluxDBClient
9 | from .dataframe_client import DataFrameClient
10 | from .helper import SeriesHelper
11 |
12 |
13 | __all__ = [
14 | 'InfluxDBClient',
15 | 'DataFrameClient',
16 | 'SeriesHelper',
17 | ]
18 |
19 |
20 | __version__ = '4.0.0'
21 |
--------------------------------------------------------------------------------
/site-packages/influxdb/chunked_json.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | #
4 | # Author: Adrian Sampson
5 | # Source: https://gist.github.com/sampsyo/920215
6 | #
7 |
8 | from __future__ import absolute_import
9 | from __future__ import division
10 | from __future__ import print_function
11 | from __future__ import unicode_literals
12 |
13 | import json
14 |
15 | _decoder = json.JSONDecoder()
16 |
17 |
18 | def loads(s):
19 | """A generator reading a sequence of JSON values from a string."""
20 | while s:
21 | s = s.strip()
22 | obj, pos = _decoder.raw_decode(s)
23 | if not pos:
24 | raise ValueError('no JSON object found at %i' % pos)
25 | yield obj
26 | s = s[pos:]
27 |
--------------------------------------------------------------------------------
/site-packages/influxdb/dataframe_client.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | DataFrame client for InfluxDB
4 | """
5 | from __future__ import absolute_import
6 | from __future__ import division
7 | from __future__ import print_function
8 | from __future__ import unicode_literals
9 |
10 | __all__ = ['DataFrameClient']
11 |
12 | try:
13 | import pandas
14 | del pandas
15 | except ImportError as err:
16 | from .client import InfluxDBClient
17 |
18 | class DataFrameClient(InfluxDBClient):
19 | err = err
20 |
21 | def __init__(self, *a, **kw):
22 | raise ImportError("DataFrameClient requires Pandas "
23 | "which couldn't be imported: %s" % self.err)
24 | else:
25 | from ._dataframe_client import DataFrameClient
26 |
--------------------------------------------------------------------------------
/site-packages/influxdb/exceptions.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | from __future__ import division
3 | from __future__ import print_function
4 | from __future__ import unicode_literals
5 |
6 |
7 | class InfluxDBClientError(Exception):
8 | """Raised when an error occurs in the request."""
9 | def __init__(self, content, code=None):
10 | if isinstance(content, type(b'')):
11 | content = content.decode('UTF-8', 'replace')
12 |
13 | if code is not None:
14 | message = "%s: %s" % (code, content)
15 | else:
16 | message = content
17 |
18 | super(InfluxDBClientError, self).__init__(
19 | message
20 | )
21 | self.content = content
22 | self.code = code
23 |
24 |
25 | class InfluxDBServerError(Exception):
26 | """Raised when a server error occurs."""
27 | def __init__(self, content):
28 | super(InfluxDBServerError, self).__init__(content)
29 |
--------------------------------------------------------------------------------
/site-packages/influxdb/influxdb08/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | from .client import InfluxDBClient
9 | from .dataframe_client import DataFrameClient
10 | from .helper import SeriesHelper
11 |
12 |
13 | __all__ = [
14 | 'InfluxDBClient',
15 | 'DataFrameClient',
16 | 'SeriesHelper',
17 | ]
18 |
--------------------------------------------------------------------------------
/site-packages/influxdb/influxdb08/chunked_json.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | #
4 | # Author: Adrian Sampson
5 | # Source: https://gist.github.com/sampsyo/920215
6 | #
7 |
8 | from __future__ import absolute_import
9 | from __future__ import division
10 | from __future__ import print_function
11 | from __future__ import unicode_literals
12 |
13 | import json
14 |
15 | _decoder = json.JSONDecoder()
16 |
17 |
18 | def loads(s):
19 | """A generator reading a sequence of JSON values from a string."""
20 | while s:
21 | s = s.strip()
22 | obj, pos = _decoder.raw_decode(s)
23 | if not pos:
24 | raise ValueError('no JSON object found at %i' % pos)
25 | yield obj
26 | s = s[pos:]
27 |
--------------------------------------------------------------------------------
/site-packages/influxdb/influxdb08/helper.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Helper class for InfluxDB
4 | """
5 | from __future__ import absolute_import
6 | from __future__ import division
7 | from __future__ import print_function
8 | from __future__ import unicode_literals
9 |
10 | from collections import namedtuple, defaultdict
11 | from warnings import warn
12 |
13 | import six
14 |
15 |
16 | class SeriesHelper(object):
17 |
18 | """
19 | Subclassing this helper eases writing data points in bulk.
20 | All data points are immutable, insuring they do not get overwritten.
21 | Each subclass can write to its own database.
22 | The time series names can also be based on one or more defined fields.
23 |
24 | Annotated example::
25 |
26 | class MySeriesHelper(SeriesHelper):
27 | class Meta:
28 | # Meta class stores time series helper configuration.
29 | series_name = 'events.stats.{server_name}'
30 | # Series name must be a string, curly brackets for dynamic use.
31 | fields = ['time', 'server_name']
32 | # Defines all the fields in this time series.
33 | ### Following attributes are optional. ###
34 | client = TestSeriesHelper.client
35 | # Client should be an instance of InfluxDBClient.
36 | :warning: Only used if autocommit is True.
37 | bulk_size = 5
38 | # Defines the number of data points to write simultaneously.
39 | # Only applicable if autocommit is True.
40 | autocommit = True
41 | # If True and no bulk_size, then will set bulk_size to 1.
42 |
43 | """
44 | __initialized__ = False
45 |
46 | def __new__(cls, *args, **kwargs):
47 | """
48 | Initializes class attributes for subsequent constructor calls.
49 |
50 | :note: *args and **kwargs are not explicitly used in this function,
51 | but needed for Python 2 compatibility.
52 | """
53 | if not cls.__initialized__:
54 | cls.__initialized__ = True
55 | try:
56 | _meta = getattr(cls, 'Meta')
57 | except AttributeError:
58 | raise AttributeError(
59 | 'Missing Meta class in {0}.'.format(
60 | cls.__name__))
61 |
62 | for attr in ['series_name', 'fields']:
63 | try:
64 | setattr(cls, '_' + attr, getattr(_meta, attr))
65 | except AttributeError:
66 | raise AttributeError(
67 | 'Missing {0} in {1} Meta class.'.format(
68 | attr,
69 | cls.__name__))
70 |
71 | cls._autocommit = getattr(_meta, 'autocommit', False)
72 |
73 | cls._client = getattr(_meta, 'client', None)
74 | if cls._autocommit and not cls._client:
75 | raise AttributeError(
76 | 'In {0}, autocommit is set to True, but no client is set.'
77 | .format(cls.__name__))
78 |
79 | try:
80 | cls._bulk_size = getattr(_meta, 'bulk_size')
81 | if cls._bulk_size < 1 and cls._autocommit:
82 | warn(
83 | 'Definition of bulk_size in {0} forced to 1, '
84 | 'was less than 1.'.format(cls.__name__))
85 | cls._bulk_size = 1
86 | except AttributeError:
87 | cls._bulk_size = -1
88 | else:
89 | if not cls._autocommit:
90 | warn(
91 | 'Definition of bulk_size in {0} has no affect because'
92 | ' autocommit is false.'.format(cls.__name__))
93 |
94 | cls._datapoints = defaultdict(list)
95 | cls._type = namedtuple(cls.__name__, cls._fields)
96 |
97 | return super(SeriesHelper, cls).__new__(cls)
98 |
99 | def __init__(self, **kw):
100 | """
101 | Constructor call creates a new data point. All fields must be present.
102 |
103 | :note: Data points written when `bulk_size` is reached per Helper.
104 | :warning: Data points are *immutable* (`namedtuples`).
105 | """
106 | cls = self.__class__
107 |
108 | if sorted(cls._fields) != sorted(kw.keys()):
109 | raise NameError(
110 | 'Expected {0}, got {1}.'.format(
111 | cls._fields,
112 | kw.keys()))
113 |
114 | cls._datapoints[cls._series_name.format(**kw)].append(cls._type(**kw))
115 |
116 | if cls._autocommit and \
117 | sum(len(series) for series in cls._datapoints.values()) \
118 | >= cls._bulk_size:
119 | cls.commit()
120 |
121 | @classmethod
122 | def commit(cls, client=None):
123 | """
124 | Commit everything from datapoints via the client.
125 |
126 | :param client: InfluxDBClient instance for writing points to InfluxDB.
127 | :attention: any provided client will supersede the class client.
128 | :return: result of client.write_points.
129 | """
130 | if not client:
131 | client = cls._client
132 | rtn = client.write_points(cls._json_body_())
133 | cls._reset_()
134 | return rtn
135 |
136 | @classmethod
137 | def _json_body_(cls):
138 | """
139 | :return: JSON body of these datapoints.
140 | """
141 | json = []
142 | for series_name, data in six.iteritems(cls._datapoints):
143 | json.append({'name': series_name,
144 | 'columns': cls._fields,
145 | 'points': [[getattr(point, k) for k in cls._fields]
146 | for point in data]
147 | })
148 | return json
149 |
150 | @classmethod
151 | def _reset_(cls):
152 | """
153 | Reset data storage.
154 | """
155 | cls._datapoints = defaultdict(list)
156 |
--------------------------------------------------------------------------------
/site-packages/influxdb/line_protocol.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | from copy import copy
9 | from datetime import datetime
10 | from numbers import Integral
11 |
12 | from pytz import UTC
13 | from dateutil.parser import parse
14 | from six import binary_type, text_type, integer_types, PY2
15 |
16 | EPOCH = UTC.localize(datetime.utcfromtimestamp(0))
17 |
18 |
19 | def _convert_timestamp(timestamp, precision=None):
20 | if isinstance(timestamp, Integral):
21 | return timestamp # assume precision is correct if timestamp is int
22 | if isinstance(_get_unicode(timestamp), text_type):
23 | timestamp = parse(timestamp)
24 | if isinstance(timestamp, datetime):
25 | if not timestamp.tzinfo:
26 | timestamp = UTC.localize(timestamp)
27 | ns = (timestamp - EPOCH).total_seconds() * 1e9
28 | if precision is None or precision == 'n':
29 | return ns
30 | elif precision == 'u':
31 | return ns / 1e3
32 | elif precision == 'ms':
33 | return ns / 1e6
34 | elif precision == 's':
35 | return ns / 1e9
36 | elif precision == 'm':
37 | return ns / 1e9 / 60
38 | elif precision == 'h':
39 | return ns / 1e9 / 3600
40 | raise ValueError(timestamp)
41 |
42 |
43 | def _escape_tag(tag):
44 | tag = _get_unicode(tag, force=True)
45 | return tag.replace(
46 | "\\", "\\\\"
47 | ).replace(
48 | " ", "\\ "
49 | ).replace(
50 | ",", "\\,"
51 | ).replace(
52 | "=", "\\="
53 | )
54 |
55 |
56 | def quote_ident(value):
57 | return "\"{0}\"".format(
58 | value.replace(
59 | "\\", "\\\\"
60 | ).replace(
61 | "\"", "\\\""
62 | ).replace(
63 | "\n", "\\n"
64 | )
65 | )
66 |
67 |
68 | def quote_literal(value):
69 | return "'{0}'".format(
70 | value.replace(
71 | "\\", "\\\\"
72 | ).replace(
73 | "'", "\\'"
74 | )
75 | )
76 |
77 |
78 | def _escape_value(value):
79 | value = _get_unicode(value)
80 | if isinstance(value, text_type) and value != '':
81 | return quote_ident(value)
82 | elif isinstance(value, integer_types) and not isinstance(value, bool):
83 | return str(value) + 'i'
84 | else:
85 | return str(value)
86 |
87 |
88 | def _get_unicode(data, force=False):
89 | """
90 | Try to return a text aka unicode object from the given data.
91 | """
92 | if isinstance(data, binary_type):
93 | return data.decode('utf-8')
94 | elif data is None:
95 | return ''
96 | elif force:
97 | if PY2:
98 | return unicode(data)
99 | else:
100 | return str(data)
101 | else:
102 | return data
103 |
104 |
105 | def make_lines(data, precision=None):
106 | """
107 | Extracts the points from the given dict and returns a Unicode string
108 | matching the line protocol introduced in InfluxDB 0.9.0.
109 | """
110 | lines = []
111 | static_tags = data.get('tags', None)
112 | for point in data['points']:
113 | elements = []
114 |
115 | # add measurement name
116 | measurement = _escape_tag(_get_unicode(
117 | point.get('measurement', data.get('measurement'))
118 | ))
119 | key_values = [measurement]
120 |
121 | # add tags
122 | if static_tags is None:
123 | tags = point.get('tags', {})
124 | else:
125 | tags = copy(static_tags)
126 | tags.update(point.get('tags', {}))
127 |
128 | # tags should be sorted client-side to take load off server
129 | for tag_key in sorted(tags.keys()):
130 | key = _escape_tag(tag_key)
131 | value = _escape_tag(tags[tag_key])
132 |
133 | if key != '' and value != '':
134 | key_values.append("{key}={value}".format(key=key, value=value))
135 | key_values = ','.join(key_values)
136 | elements.append(key_values)
137 |
138 | # add fields
139 | field_values = []
140 | for field_key in sorted(point['fields'].keys()):
141 | key = _escape_tag(field_key)
142 | value = _escape_value(point['fields'][field_key])
143 | if key != '' and value != '':
144 | field_values.append("{key}={value}".format(
145 | key=key,
146 | value=value
147 | ))
148 | field_values = ','.join(field_values)
149 | elements.append(field_values)
150 |
151 | # add timestamp
152 | if 'time' in point:
153 | timestamp = _get_unicode(str(int(
154 | _convert_timestamp(point['time'], precision)
155 | )))
156 | elements.append(timestamp)
157 |
158 | line = ' '.join(elements)
159 | lines.append(line)
160 | lines = '\n'.join(lines)
161 | return lines + '\n'
162 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | import sys
9 | import os
10 |
11 | import unittest
12 |
13 | using_pypy = hasattr(sys, "pypy_version_info")
14 | skipIfPYpy = unittest.skipIf(using_pypy, "Skipping this test on pypy.")
15 |
16 | _skip_server_tests = os.environ.get(
17 | 'INFLUXDB_PYTHON_SKIP_SERVER_TESTS',
18 | None) == 'True'
19 | skipServerTests = unittest.skipIf(_skip_server_tests,
20 | "Skipping server tests...")
21 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/chunked_json_test.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | from influxdb import chunked_json
9 |
10 | import unittest
11 |
12 |
13 | class TestChunkJson(unittest.TestCase):
14 |
15 | @classmethod
16 | def setUpClass(cls):
17 | super(TestChunkJson, cls).setUpClass()
18 |
19 | def test_load(self):
20 | """
21 | Tests reading a sequence of JSON values from a string
22 | """
23 | example_response = \
24 | '{"results": [{"series": [{"measurement": "sdfsdfsdf", ' \
25 | '"columns": ["time", "value"], "values": ' \
26 | '[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": ' \
27 | '[{"measurement": "cpu_load_short", "columns": ["time", "value"],'\
28 | '"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
29 |
30 | res = list(chunked_json.loads(example_response))
31 | # import ipdb; ipdb.set_trace()
32 |
33 | # self.assertTrue(res)
34 | self.assertListEqual(
35 | [
36 | {
37 | 'results': [
38 | {'series': [{
39 | 'values': [['2009-11-10T23:00:00Z', 0.64]],
40 | 'measurement': 'sdfsdfsdf',
41 | 'columns':
42 | ['time', 'value']}]},
43 | {'series': [{
44 | 'values': [['2009-11-10T23:00:00Z', 0.64]],
45 | 'measurement': 'cpu_load_short',
46 | 'columns': ['time', 'value']}]}
47 | ]
48 | }
49 | ],
50 | res
51 | )
52 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/influxdb08/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/misc.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | import socket
9 |
10 |
11 | def get_free_ports(num_ports, ip='127.0.0.1'):
12 | """Get `num_ports` free/available ports on the interface linked to the `ip´
13 | :param int num_ports: The number of free ports to get
14 | :param str ip: The ip on which the ports have to be taken
15 | :return: a set of ports number
16 | """
17 | sock_ports = []
18 | ports = set()
19 | try:
20 | for _ in range(num_ports):
21 | sock = socket.socket()
22 | cur = [sock, -1]
23 | # append the socket directly,
24 | # so that it'll be also closed (no leaked resource)
25 | # in the finally here after.
26 | sock_ports.append(cur)
27 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
28 | sock.bind((ip, 0))
29 | cur[1] = sock.getsockname()[1]
30 | finally:
31 | for sock, port in sock_ports:
32 | sock.close()
33 | ports.add(port)
34 | assert num_ports == len(ports)
35 | return ports
36 |
37 |
38 | def is_port_open(port, ip='127.0.0.1'):
39 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
40 | try:
41 | result = sock.connect_ex((ip, port))
42 | if not result:
43 | sock.shutdown(socket.SHUT_RDWR)
44 | return result == 0
45 | finally:
46 | sock.close()
47 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/resultset_test.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | import unittest
9 |
10 | from influxdb.exceptions import InfluxDBClientError
11 | from influxdb.resultset import ResultSet
12 |
13 |
14 | class TestResultSet(unittest.TestCase):
15 |
16 | def setUp(self):
17 | self.query_response = {
18 | "results": [
19 | {"series": [{"measurement": "cpu_load_short",
20 | "tags": {"host": "server01",
21 | "region": "us-west"},
22 | "columns": ["time", "value"],
23 | "values": [
24 | ["2015-01-29T21:51:28.968422294Z", 0.64]
25 | ]},
26 | {"measurement": "cpu_load_short",
27 | "tags": {"host": "server02",
28 | "region": "us-west"},
29 | "columns": ["time", "value"],
30 | "values": [
31 | ["2015-01-29T21:51:28.968422294Z", 0.65]
32 | ]},
33 | {"measurement": "other_serie",
34 | "tags": {"host": "server01",
35 | "region": "us-west"},
36 | "columns": ["time", "value"],
37 | "values": [
38 | ["2015-01-29T21:51:28.968422294Z", 0.66]
39 | ]}]}
40 | ]
41 | }
42 | self.rs = ResultSet(self.query_response['results'][0])
43 |
44 | def test_filter_by_name(self):
45 | expected = [
46 | {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'},
47 | {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}
48 | ]
49 |
50 | self.assertEqual(expected, list(self.rs['cpu_load_short']))
51 | self.assertEqual(expected,
52 | list(self.rs.get_points(
53 | measurement='cpu_load_short')))
54 |
55 | def test_filter_by_tags(self):
56 | expected = [
57 | {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64},
58 | {'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.66}
59 | ]
60 |
61 | self.assertEqual(
62 | expected,
63 | list(self.rs[{"host": "server01"}])
64 | )
65 |
66 | self.assertEqual(
67 | expected,
68 | list(self.rs.get_points(tags={'host': 'server01'}))
69 | )
70 |
71 | def test_filter_by_name_and_tags(self):
72 | self.assertEqual(
73 | list(self.rs[('cpu_load_short', {"host": "server01"})]),
74 | [{'time': '2015-01-29T21:51:28.968422294Z', 'value': 0.64}]
75 | )
76 |
77 | self.assertEqual(
78 | list(self.rs[('cpu_load_short', {"region": "us-west"})]),
79 | [
80 | {'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'},
81 | {'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}
82 | ]
83 | )
84 |
85 | def test_keys(self):
86 | self.assertEqual(
87 | self.rs.keys(),
88 | [
89 | ('cpu_load_short', {'host': 'server01', 'region': 'us-west'}),
90 | ('cpu_load_short', {'host': 'server02', 'region': 'us-west'}),
91 | ('other_serie', {'host': 'server01', 'region': 'us-west'})
92 | ]
93 | )
94 |
95 | def test_len(self):
96 | self.assertEqual(
97 | len(self.rs),
98 | 3
99 | )
100 |
101 | def test_items(self):
102 | items = list(self.rs.items())
103 | items_lists = [(item[0], list(item[1])) for item in items]
104 |
105 | self.assertEqual(
106 | items_lists,
107 | [
108 | (
109 | ('cpu_load_short',
110 | {'host': 'server01', 'region': 'us-west'}),
111 | [{'value': 0.64, 'time': '2015-01-29T21:51:28.968422294Z'}]
112 | ),
113 | (
114 | ('cpu_load_short',
115 | {'host': 'server02', 'region': 'us-west'}),
116 | [{'value': 0.65, 'time': '2015-01-29T21:51:28.968422294Z'}]
117 | ),
118 | (
119 | ('other_serie',
120 | {'host': 'server01', 'region': 'us-west'}),
121 | [{'value': 0.66, 'time': '2015-01-29T21:51:28.968422294Z'}]
122 | )
123 | ]
124 | )
125 |
126 | def test_point_from_cols_vals(self):
127 | cols = ['col1', 'col2']
128 | vals = [1, '2']
129 |
130 | point = ResultSet.point_from_cols_vals(cols, vals)
131 | self.assertDictEqual(
132 | point,
133 | {'col1': 1, 'col2': '2'}
134 | )
135 |
136 | def test_system_query(self):
137 | rs = ResultSet(
138 | {'series': [
139 | {'values': [['another', '48h0m0s', 3, False],
140 | ['default', '0', 1, False],
141 | ['somename', '24h0m0s', 4, True]],
142 | 'columns': ['name', 'duration',
143 | 'replicaN', 'default']}]}
144 | )
145 |
146 | self.assertEqual(
147 | rs.keys(),
148 | [('results', None)]
149 | )
150 |
151 | self.assertEqual(
152 | list(rs['results']),
153 | [
154 | {'duration': '48h0m0s', 'default': False, 'replicaN': 3,
155 | 'name': 'another'},
156 | {'duration': '0', 'default': False, 'replicaN': 1,
157 | 'name': 'default'},
158 | {'duration': '24h0m0s', 'default': True, 'replicaN': 4,
159 | 'name': 'somename'}
160 | ]
161 | )
162 |
163 | def test_resultset_error(self):
164 | with self.assertRaises(InfluxDBClientError):
165 | ResultSet({
166 | "series": [],
167 | "error": "Big error, many problems."
168 | })
169 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/server_tests/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/server_tests/base.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | import sys
9 |
10 | from influxdb.tests import using_pypy
11 | from influxdb.tests.server_tests.influxdb_instance import InfluxDbInstance
12 |
13 | from influxdb.client import InfluxDBClient
14 |
15 | if not using_pypy:
16 | from influxdb.dataframe_client import DataFrameClient
17 |
18 |
19 | def _setup_influxdb_server(inst):
20 | inst.influxd_inst = InfluxDbInstance(
21 | inst.influxdb_template_conf,
22 | udp_enabled=getattr(inst, 'influxdb_udp_enabled', False),
23 | )
24 |
25 | inst.cli = InfluxDBClient('localhost',
26 | inst.influxd_inst.http_port,
27 | 'root',
28 | '',
29 | database='db')
30 | if not using_pypy:
31 | inst.cliDF = DataFrameClient('localhost',
32 | inst.influxd_inst.http_port,
33 | 'root',
34 | '',
35 | database='db')
36 |
37 |
38 | def _teardown_influxdb_server(inst):
39 | remove_tree = sys.exc_info() == (None, None, None)
40 | inst.influxd_inst.close(remove_tree=remove_tree)
41 |
42 |
43 | class SingleTestCaseWithServerMixin(object):
44 | ''' A mixin for unittest.TestCase to start an influxdb server instance
45 | in a temporary directory **for each test function/case**
46 | '''
47 |
48 | # 'influxdb_template_conf' attribute must be set
49 | # on the TestCase class or instance.
50 |
51 | setUp = _setup_influxdb_server
52 | tearDown = _teardown_influxdb_server
53 |
54 |
55 | class ManyTestCasesWithServerMixin(object):
56 | ''' Same than SingleTestCaseWithServerMixin
57 | but creates a single instance for the whole class.
58 | Also pre-creates a fresh database: 'db'.
59 | '''
60 |
61 | # 'influxdb_template_conf' attribute must be set on the class itself !
62 |
63 | @classmethod
64 | def setUpClass(cls):
65 | _setup_influxdb_server(cls)
66 |
67 | def setUp(self):
68 | self.cli.create_database('db')
69 |
70 | @classmethod
71 | def tearDownClass(cls):
72 | _teardown_influxdb_server(cls)
73 |
74 | def tearDown(self):
75 | self.cli.drop_database('db')
76 |
--------------------------------------------------------------------------------
/site-packages/influxdb/tests/test_line_protocol.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from __future__ import absolute_import
4 | from __future__ import division
5 | from __future__ import print_function
6 | from __future__ import unicode_literals
7 |
8 | from datetime import datetime
9 | import unittest
10 | from pytz import UTC, timezone
11 |
12 | from influxdb import line_protocol
13 |
14 |
15 | class TestLineProtocol(unittest.TestCase):
16 |
17 | def test_make_lines(self):
18 | data = {
19 | "tags": {
20 | "empty_tag": "",
21 | "none_tag": None,
22 | "integer_tag": 2,
23 | "string_tag": "hello"
24 | },
25 | "points": [
26 | {
27 | "measurement": "test",
28 | "fields": {
29 | "string_val": "hello!",
30 | "int_val": 1,
31 | "float_val": 1.1,
32 | "none_field": None,
33 | "bool_val": True,
34 | }
35 | }
36 | ]
37 | }
38 |
39 | self.assertEqual(
40 | line_protocol.make_lines(data),
41 | 'test,integer_tag=2,string_tag=hello '
42 | 'bool_val=True,float_val=1.1,int_val=1i,string_val="hello!"\n'
43 | )
44 |
45 | def test_timezone(self):
46 | dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
47 | utc = UTC.localize(dt)
48 | berlin = timezone('Europe/Berlin').localize(dt)
49 | eastern = berlin.astimezone(timezone('US/Eastern'))
50 | data = {
51 | "points": [
52 | {"measurement": "A", "fields": {"val": 1},
53 | "time": 0},
54 | {"measurement": "A", "fields": {"val": 1},
55 | "time": "2009-11-10T23:00:00.123456Z"},
56 | {"measurement": "A", "fields": {"val": 1}, "time": dt},
57 | {"measurement": "A", "fields": {"val": 1}, "time": utc},
58 | {"measurement": "A", "fields": {"val": 1}, "time": berlin},
59 | {"measurement": "A", "fields": {"val": 1}, "time": eastern},
60 | ]
61 | }
62 | self.assertEqual(
63 | line_protocol.make_lines(data),
64 | '\n'.join([
65 | 'A val=1i 0',
66 | 'A val=1i 1257894000123456000',
67 | 'A val=1i 1257894000123456000',
68 | 'A val=1i 1257894000123456000',
69 | 'A val=1i 1257890400123456000',
70 | 'A val=1i 1257890400123456000',
71 | ]) + '\n'
72 | )
73 |
74 | def test_string_val_newline(self):
75 | data = {
76 | "points": [
77 | {
78 | "measurement": "m1",
79 | "fields": {
80 | "multi_line": "line1\nline1\nline3"
81 | }
82 | }
83 | ]
84 | }
85 |
86 | self.assertEqual(
87 | line_protocol.make_lines(data),
88 | 'm1 multi_line="line1\\nline1\\nline3"\n'
89 | )
90 |
91 | def test_make_lines_unicode(self):
92 | data = {
93 | "tags": {
94 | "unicode_tag": "\'Привет!\'" # Hello! in Russian
95 | },
96 | "points": [
97 | {
98 | "measurement": "test",
99 | "fields": {
100 | "unicode_val": "Привет!", # Hello! in Russian
101 | }
102 | }
103 | ]
104 | }
105 |
106 | self.assertEqual(
107 | line_protocol.make_lines(data),
108 | 'test,unicode_tag=\'Привет!\' unicode_val="Привет!"\n'
109 | )
110 |
111 | def test_quote_ident(self):
112 | self.assertEqual(
113 | line_protocol.quote_ident(r"""\foo ' bar " Örf"""),
114 | r'''"\\foo ' bar \" Örf"'''
115 | )
116 |
117 | def test_quote_literal(self):
118 | self.assertEqual(
119 | line_protocol.quote_literal(r"""\foo ' bar " Örf"""),
120 | r"""'\\foo \' bar " Örf'"""
121 | )
122 |
--------------------------------------------------------------------------------
/site-packages/python_dateutil.egg-info/PKG-INFO:
--------------------------------------------------------------------------------
1 | Metadata-Version: 1.1
2 | Name: python-dateutil
3 | Version: 2.6.0
4 | Summary: Extensions to the standard Python datetime module
5 | Home-page: https://dateutil.readthedocs.io
6 | Author: Paul Ganssle, Yaron de Leeuw
7 | Author-email: dateutil@python.org
8 | License: Simplified BSD
9 | Description:
10 | The dateutil module provides powerful extensions to the
11 | datetime module available in the Python standard library.
12 |
13 | Platform: UNKNOWN
14 | Classifier: Development Status :: 5 - Production/Stable
15 | Classifier: Intended Audience :: Developers
16 | Classifier: License :: OSI Approved :: BSD License
17 | Classifier: Programming Language :: Python
18 | Classifier: Programming Language :: Python :: 2
19 | Classifier: Programming Language :: Python :: 2.6
20 | Classifier: Programming Language :: Python :: 2.7
21 | Classifier: Programming Language :: Python :: 3
22 | Classifier: Programming Language :: Python :: 3.2
23 | Classifier: Programming Language :: Python :: 3.3
24 | Classifier: Programming Language :: Python :: 3.4
25 | Classifier: Programming Language :: Python :: 3.5
26 | Classifier: Programming Language :: Python :: 3.6
27 | Classifier: Topic :: Software Development :: Libraries
28 | Requires: six
29 |
--------------------------------------------------------------------------------
/site-packages/python_dateutil.egg-info/SOURCES.txt:
--------------------------------------------------------------------------------
1 | .gitattributes
2 | .gitignore
3 | .travis.yml
4 | LICENSE
5 | MANIFEST.in
6 | NEWS
7 | README.rst
8 | RELEASING
9 | appveyor.yml
10 | codecov.yml
11 | setup.cfg
12 | setup.py
13 | tox.ini
14 | updatezinfo.py
15 | zonefile_metadata.json
16 | ci_tools/pypy_upgrade.sh
17 | ci_tools/retry.bat
18 | ci_tools/retry.sh
19 | dateutil/__init__.py
20 | dateutil/_common.py
21 | dateutil/easter.py
22 | dateutil/parser.py
23 | dateutil/relativedelta.py
24 | dateutil/rrule.py
25 | dateutil/tzwin.py
26 | dateutil/test/__init__.py
27 | dateutil/test/_common.py
28 | dateutil/test/test_easter.py
29 | dateutil/test/test_imports.py
30 | dateutil/test/test_parser.py
31 | dateutil/test/test_relativedelta.py
32 | dateutil/test/test_rrule.py
33 | dateutil/test/test_tz.py
34 | dateutil/tz/__init__.py
35 | dateutil/tz/_common.py
36 | dateutil/tz/tz.py
37 | dateutil/tz/win.py
38 | dateutil/zoneinfo/__init__.py
39 | dateutil/zoneinfo/dateutil-zoneinfo.tar.gz
40 | dateutil/zoneinfo/rebuild.py
41 | docs/Makefile
42 | docs/conf.py
43 | docs/easter.rst
44 | docs/examples.rst
45 | docs/index.rst
46 | docs/make.bat
47 | docs/parser.rst
48 | docs/relativedelta.rst
49 | docs/rrule.rst
50 | docs/tz.rst
51 | docs/zoneinfo.rst
52 | docs/samples/EST5EDT.ics
53 | python_dateutil.egg-info/PKG-INFO
54 | python_dateutil.egg-info/SOURCES.txt
55 | python_dateutil.egg-info/dependency_links.txt
56 | python_dateutil.egg-info/requires.txt
57 | python_dateutil.egg-info/top_level.txt
58 | python_dateutil.egg-info/zip-safe
--------------------------------------------------------------------------------
/site-packages/python_dateutil.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/site-packages/python_dateutil.egg-info/requires.txt:
--------------------------------------------------------------------------------
1 | six >=1.5
2 |
--------------------------------------------------------------------------------
/site-packages/python_dateutil.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | dateutil
2 |
--------------------------------------------------------------------------------
/site-packages/python_dateutil.egg-info/zip-safe:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/site-packages/pytz-2016.10-py2.6.egg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ywjt/Dshield/ef5b58d99bc479b92a97404b49d057d503e1a368/site-packages/pytz-2016.10-py2.6.egg
--------------------------------------------------------------------------------
/site-packages/pytz.pth:
--------------------------------------------------------------------------------
1 | import sys; sys.__plen = len(sys.path)
2 | ./pytz-2016.10-py2.6.egg
3 | import sys; new = sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p = getattr(sys, '__egginsert', 0); sys.path[p:p] = new; sys.__egginsert = p + len(new)
4 |
--------------------------------------------------------------------------------
/site-packages/requests.egg-info/SOURCES.txt:
--------------------------------------------------------------------------------
1 | HISTORY.rst
2 | LICENSE
3 | MANIFEST.in
4 | NOTICE
5 | README.rst
6 | requirements.txt
7 | setup.py
8 | requests/__init__.py
9 | requests/_internal_utils.py
10 | requests/adapters.py
11 | requests/api.py
12 | requests/auth.py
13 | requests/cacert.pem
14 | requests/certs.py
15 | requests/compat.py
16 | requests/cookies.py
17 | requests/exceptions.py
18 | requests/hooks.py
19 | requests/models.py
20 | requests/sessions.py
21 | requests/status_codes.py
22 | requests/structures.py
23 | requests/utils.py
24 | requests.egg-info/PKG-INFO
25 | requests.egg-info/SOURCES.txt
26 | requests.egg-info/dependency_links.txt
27 | requests.egg-info/not-zip-safe
28 | requests.egg-info/requires.txt
29 | requests.egg-info/top_level.txt
30 | requests/packages/__init__.py
31 | requests/packages/chardet/__init__.py
32 | requests/packages/chardet/big5freq.py
33 | requests/packages/chardet/big5prober.py
34 | requests/packages/chardet/chardetect.py
35 | requests/packages/chardet/chardistribution.py
36 | requests/packages/chardet/charsetgroupprober.py
37 | requests/packages/chardet/charsetprober.py
38 | requests/packages/chardet/codingstatemachine.py
39 | requests/packages/chardet/compat.py
40 | requests/packages/chardet/constants.py
41 | requests/packages/chardet/cp949prober.py
42 | requests/packages/chardet/escprober.py
43 | requests/packages/chardet/escsm.py
44 | requests/packages/chardet/eucjpprober.py
45 | requests/packages/chardet/euckrfreq.py
46 | requests/packages/chardet/euckrprober.py
47 | requests/packages/chardet/euctwfreq.py
48 | requests/packages/chardet/euctwprober.py
49 | requests/packages/chardet/gb2312freq.py
50 | requests/packages/chardet/gb2312prober.py
51 | requests/packages/chardet/hebrewprober.py
52 | requests/packages/chardet/jisfreq.py
53 | requests/packages/chardet/jpcntx.py
54 | requests/packages/chardet/langbulgarianmodel.py
55 | requests/packages/chardet/langcyrillicmodel.py
56 | requests/packages/chardet/langgreekmodel.py
57 | requests/packages/chardet/langhebrewmodel.py
58 | requests/packages/chardet/langhungarianmodel.py
59 | requests/packages/chardet/langthaimodel.py
60 | requests/packages/chardet/latin1prober.py
61 | requests/packages/chardet/mbcharsetprober.py
62 | requests/packages/chardet/mbcsgroupprober.py
63 | requests/packages/chardet/mbcssm.py
64 | requests/packages/chardet/sbcharsetprober.py
65 | requests/packages/chardet/sbcsgroupprober.py
66 | requests/packages/chardet/sjisprober.py
67 | requests/packages/chardet/universaldetector.py
68 | requests/packages/chardet/utf8prober.py
69 | requests/packages/idna/__init__.py
70 | requests/packages/idna/codec.py
71 | requests/packages/idna/compat.py
72 | requests/packages/idna/core.py
73 | requests/packages/idna/idnadata.py
74 | requests/packages/idna/intranges.py
75 | requests/packages/idna/uts46data.py
76 | requests/packages/urllib3/__init__.py
77 | requests/packages/urllib3/_collections.py
78 | requests/packages/urllib3/connection.py
79 | requests/packages/urllib3/connectionpool.py
80 | requests/packages/urllib3/exceptions.py
81 | requests/packages/urllib3/fields.py
82 | requests/packages/urllib3/filepost.py
83 | requests/packages/urllib3/poolmanager.py
84 | requests/packages/urllib3/request.py
85 | requests/packages/urllib3/response.py
86 | requests/packages/urllib3/contrib/__init__.py
87 | requests/packages/urllib3/contrib/appengine.py
88 | requests/packages/urllib3/contrib/ntlmpool.py
89 | requests/packages/urllib3/contrib/pyopenssl.py
90 | requests/packages/urllib3/contrib/socks.py
91 | requests/packages/urllib3/packages/__init__.py
92 | requests/packages/urllib3/packages/ordered_dict.py
93 | requests/packages/urllib3/packages/six.py
94 | requests/packages/urllib3/packages/backports/__init__.py
95 | requests/packages/urllib3/packages/backports/makefile.py
96 | requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
97 | requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
98 | requests/packages/urllib3/util/__init__.py
99 | requests/packages/urllib3/util/connection.py
100 | requests/packages/urllib3/util/request.py
101 | requests/packages/urllib3/util/response.py
102 | requests/packages/urllib3/util/retry.py
103 | requests/packages/urllib3/util/ssl_.py
104 | requests/packages/urllib3/util/timeout.py
105 | requests/packages/urllib3/util/url.py
--------------------------------------------------------------------------------
/site-packages/requests.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/site-packages/requests.egg-info/not-zip-safe:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/site-packages/requests.egg-info/requires.txt:
--------------------------------------------------------------------------------
1 |
2 | [security]
3 | pyOpenSSL>=0.14
4 | cryptography>=1.3.4
5 | idna>=2.0.0
6 |
7 | [socks]
8 | PySocks>=1.5.6, !=1.5.7
9 |
--------------------------------------------------------------------------------
/site-packages/requests.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/site-packages/requests/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # __
4 | # /__) _ _ _ _ _/ _
5 | # / ( (- (/ (/ (- _) / _)
6 | # /
7 |
8 | """
9 | Requests HTTP library
10 | ~~~~~~~~~~~~~~~~~~~~~
11 |
12 | Requests is an HTTP library, written in Python, for human beings. Basic GET
13 | usage:
14 |
15 | >>> import requests
16 | >>> r = requests.get('https://www.python.org')
17 | >>> r.status_code
18 | 200
19 | >>> 'Python is a programming language' in r.content
20 | True
21 |
22 | ... or POST:
23 |
24 | >>> payload = dict(key1='value1', key2='value2')
25 | >>> r = requests.post('http://httpbin.org/post', data=payload)
26 | >>> print(r.text)
27 | {
28 | ...
29 | "form": {
30 | "key2": "value2",
31 | "key1": "value1"
32 | },
33 | ...
34 | }
35 |
36 | The other HTTP methods are supported - see `requests.api`. Full documentation
37 | is at .
38 |
39 | :copyright: (c) 2016 by Kenneth Reitz.
40 | :license: Apache 2.0, see LICENSE for more details.
41 | """
42 |
43 | __title__ = 'requests'
44 | __version__ = '2.12.3'
45 | __build__ = 0x021203
46 | __author__ = 'Kenneth Reitz'
47 | __license__ = 'Apache 2.0'
48 | __copyright__ = 'Copyright 2016 Kenneth Reitz'
49 |
50 | # Attempt to enable urllib3's SNI support, if possible
51 | try:
52 | from .packages.urllib3.contrib import pyopenssl
53 | pyopenssl.inject_into_urllib3()
54 | except ImportError:
55 | pass
56 |
57 | import warnings
58 |
59 | # urllib3's DependencyWarnings should be silenced.
60 | from .packages.urllib3.exceptions import DependencyWarning
61 | warnings.simplefilter('ignore', DependencyWarning)
62 |
63 | from . import utils
64 | from .models import Request, Response, PreparedRequest
65 | from .api import request, get, head, post, patch, put, delete, options
66 | from .sessions import session, Session
67 | from .status_codes import codes
68 | from .exceptions import (
69 | RequestException, Timeout, URLRequired,
70 | TooManyRedirects, HTTPError, ConnectionError,
71 | FileModeWarning, ConnectTimeout, ReadTimeout
72 | )
73 |
74 | # Set default logging handler to avoid "No handler found" warnings.
75 | import logging
76 | try: # Python 2.7+
77 | from logging import NullHandler
78 | except ImportError:
79 | class NullHandler(logging.Handler):
80 | def emit(self, record):
81 | pass
82 |
83 | logging.getLogger(__name__).addHandler(NullHandler())
84 |
85 | # FileModeWarnings go off per the default.
86 | warnings.simplefilter('default', FileModeWarning, append=True)
87 |
--------------------------------------------------------------------------------
/site-packages/requests/_internal_utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests._internal_utils
5 | ~~~~~~~~~~~~~~
6 |
7 | Provides utility functions that are consumed internally by Requests
8 | which depend on extremely few external helpers (such as compat)
9 | """
10 |
11 | from .compat import is_py2, builtin_str, str
12 |
13 |
14 | def to_native_string(string, encoding='ascii'):
15 | """Given a string object, regardless of type, returns a representation of
16 | that string in the native string type, encoding and decoding where
17 | necessary. This assumes ASCII unless told otherwise.
18 | """
19 | if isinstance(string, builtin_str):
20 | out = string
21 | else:
22 | if is_py2:
23 | out = string.encode(encoding)
24 | else:
25 | out = string.decode(encoding)
26 |
27 | return out
28 |
29 |
30 | def unicode_is_ascii(u_string):
31 | """Determine if unicode string only contains ASCII characters.
32 |
33 | :param str u_string: unicode string to check. Must be unicode
34 | and not Python 2 `str`.
35 | :rtype: bool
36 | """
37 | assert isinstance(u_string, str)
38 | try:
39 | u_string.encode('ascii')
40 | return True
41 | except UnicodeEncodeError:
42 | return False
43 |
--------------------------------------------------------------------------------
/site-packages/requests/certs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | requests.certs
6 | ~~~~~~~~~~~~~~
7 |
8 | This module returns the preferred default CA certificate bundle.
9 |
10 | If you are packaging Requests, e.g., for a Linux distribution or a managed
11 | environment, you can change the definition of where() to return a separately
12 | packaged CA bundle.
13 | """
14 | import os.path
15 |
16 | try:
17 | from certifi import where
18 | except ImportError:
19 | def where():
20 | """Return the preferred certificate bundle."""
21 | # vendored bundle inside Requests
22 | return os.path.join(os.path.dirname(__file__), 'cacert.pem')
23 |
24 | if __name__ == '__main__':
25 | print(where())
26 |
--------------------------------------------------------------------------------
/site-packages/requests/compat.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.compat
5 | ~~~~~~~~~~~~~~~
6 |
7 | This module handles import compatibility issues between Python 2 and
8 | Python 3.
9 | """
10 |
11 | from .packages import chardet
12 |
13 | import sys
14 |
15 | # -------
16 | # Pythons
17 | # -------
18 |
19 | # Syntax sugar.
20 | _ver = sys.version_info
21 |
22 | #: Python 2.x?
23 | is_py2 = (_ver[0] == 2)
24 |
25 | #: Python 3.x?
26 | is_py3 = (_ver[0] == 3)
27 |
28 | try:
29 | import simplejson as json
30 | except (ImportError, SyntaxError):
31 | # simplejson does not support Python 3.2, it throws a SyntaxError
32 | # because of u'...' Unicode literals.
33 | import json
34 |
35 | # ---------
36 | # Specifics
37 | # ---------
38 |
39 | if is_py2:
40 | from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
41 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
42 | from urllib2 import parse_http_list
43 | import cookielib
44 | from Cookie import Morsel
45 | from StringIO import StringIO
46 | from .packages.urllib3.packages.ordered_dict import OrderedDict
47 |
48 | builtin_str = str
49 | bytes = str
50 | str = unicode
51 | basestring = basestring
52 | numeric_types = (int, long, float)
53 | integer_types = (int, long)
54 |
55 | elif is_py3:
56 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
57 | from urllib.request import parse_http_list, getproxies, proxy_bypass
58 | from http import cookiejar as cookielib
59 | from http.cookies import Morsel
60 | from io import StringIO
61 | from collections import OrderedDict
62 |
63 | builtin_str = str
64 | str = str
65 | bytes = bytes
66 | basestring = (str, bytes)
67 | numeric_types = (int, float)
68 | integer_types = (int,)
69 |
--------------------------------------------------------------------------------
/site-packages/requests/exceptions.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.exceptions
5 | ~~~~~~~~~~~~~~~~~~~
6 |
7 | This module contains the set of Requests' exceptions.
8 | """
9 | from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
10 |
11 |
12 | class RequestException(IOError):
13 | """There was an ambiguous exception that occurred while handling your
14 | request.
15 | """
16 |
17 | def __init__(self, *args, **kwargs):
18 | """Initialize RequestException with `request` and `response` objects."""
19 | response = kwargs.pop('response', None)
20 | self.response = response
21 | self.request = kwargs.pop('request', None)
22 | if (response is not None and not self.request and
23 | hasattr(response, 'request')):
24 | self.request = self.response.request
25 | super(RequestException, self).__init__(*args, **kwargs)
26 |
27 |
28 | class HTTPError(RequestException):
29 | """An HTTP error occurred."""
30 |
31 |
32 | class ConnectionError(RequestException):
33 | """A Connection error occurred."""
34 |
35 |
36 | class ProxyError(ConnectionError):
37 | """A proxy error occurred."""
38 |
39 |
40 | class SSLError(ConnectionError):
41 | """An SSL error occurred."""
42 |
43 |
44 | class Timeout(RequestException):
45 | """The request timed out.
46 |
47 | Catching this error will catch both
48 | :exc:`~requests.exceptions.ConnectTimeout` and
49 | :exc:`~requests.exceptions.ReadTimeout` errors.
50 | """
51 |
52 |
53 | class ConnectTimeout(ConnectionError, Timeout):
54 | """The request timed out while trying to connect to the remote server.
55 |
56 | Requests that produced this error are safe to retry.
57 | """
58 |
59 |
60 | class ReadTimeout(Timeout):
61 | """The server did not send any data in the allotted amount of time."""
62 |
63 |
64 | class URLRequired(RequestException):
65 | """A valid URL is required to make a request."""
66 |
67 |
68 | class TooManyRedirects(RequestException):
69 | """Too many redirects."""
70 |
71 |
72 | class MissingSchema(RequestException, ValueError):
73 | """The URL schema (e.g. http or https) is missing."""
74 |
75 |
76 | class InvalidSchema(RequestException, ValueError):
77 | """See defaults.py for valid schemas."""
78 |
79 |
80 | class InvalidURL(RequestException, ValueError):
81 | """The URL provided was somehow invalid."""
82 |
83 |
84 | class InvalidHeader(RequestException, ValueError):
85 | """The header value provided was somehow invalid."""
86 |
87 |
88 | class ChunkedEncodingError(RequestException):
89 | """The server declared chunked encoding but sent an invalid chunk."""
90 |
91 |
92 | class ContentDecodingError(RequestException, BaseHTTPError):
93 | """Failed to decode response content"""
94 |
95 |
96 | class StreamConsumedError(RequestException, TypeError):
97 | """The content for this response was already consumed"""
98 |
99 |
100 | class RetryError(RequestException):
101 | """Custom retries logic failed"""
102 |
103 | class UnrewindableBodyError(RequestException):
104 | """Requests encountered an error when trying to rewind a body"""
105 |
106 | # Warnings
107 |
108 |
109 | class RequestsWarning(Warning):
110 | """Base warning for Requests."""
111 | pass
112 |
113 |
114 | class FileModeWarning(RequestsWarning, DeprecationWarning):
115 | """A file was opened in text mode, but Requests determined its binary length."""
116 | pass
117 |
--------------------------------------------------------------------------------
/site-packages/requests/hooks.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.hooks
5 | ~~~~~~~~~~~~~~
6 |
7 | This module provides the capabilities for the Requests hooks system.
8 |
9 | Available hooks:
10 |
11 | ``response``:
12 | The response generated from a Request.
13 | """
14 | HOOKS = ['response']
15 |
16 |
17 | def default_hooks():
18 | return dict((event, []) for event in HOOKS)
19 |
20 | # TODO: response is the only one
21 |
22 |
23 | def dispatch_hook(key, hooks, hook_data, **kwargs):
24 | """Dispatches a hook dictionary on a given piece of data."""
25 | hooks = hooks or dict()
26 | hooks = hooks.get(key)
27 | if hooks:
28 | if hasattr(hooks, '__call__'):
29 | hooks = [hooks]
30 | for hook in hooks:
31 | _hook_data = hook(hook_data, **kwargs)
32 | if _hook_data is not None:
33 | hook_data = _hook_data
34 | return hook_data
35 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/__init__.py:
--------------------------------------------------------------------------------
1 | '''
2 | Debian and other distributions "unbundle" requests' vendored dependencies, and
3 | rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
4 | The problem with this is that not only requests itself imports those
5 | dependencies, but third-party code outside of the distros' control too.
6 |
7 | In reaction to these problems, the distro maintainers replaced
8 | ``requests.packages`` with a magical "stub module" that imports the correct
9 | modules. The implementations were varying in quality and all had severe
10 | problems. For example, a symlink (or hardlink) that links the correct modules
11 | into place introduces problems regarding object identity, since you now have
12 | two modules in `sys.modules` with the same API, but different identities::
13 |
14 | requests.packages.urllib3 is not urllib3
15 |
16 | With version ``2.5.2``, requests started to maintain its own stub, so that
17 | distro-specific breakage would be reduced to a minimum, even though the whole
18 | issue is not requests' fault in the first place. See
19 | https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
20 | request.
21 | '''
22 |
23 | from __future__ import absolute_import
24 | import sys
25 |
26 | try:
27 | from . import urllib3
28 | except ImportError:
29 | import urllib3
30 | sys.modules['%s.urllib3' % __name__] = urllib3
31 |
32 | try:
33 | from . import chardet
34 | except ImportError:
35 | import chardet
36 | sys.modules['%s.chardet' % __name__] = chardet
37 |
38 | try:
39 | from . import idna
40 | except ImportError:
41 | import idna
42 | sys.modules['%s.idna' % __name__] = idna
43 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/__init__.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # This library is free software; you can redistribute it and/or
3 | # modify it under the terms of the GNU Lesser General Public
4 | # License as published by the Free Software Foundation; either
5 | # version 2.1 of the License, or (at your option) any later version.
6 | #
7 | # This library is distributed in the hope that it will be useful,
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
10 | # Lesser General Public License for more details.
11 | #
12 | # You should have received a copy of the GNU Lesser General Public
13 | # License along with this library; if not, write to the Free Software
14 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
15 | # 02110-1301 USA
16 | ######################### END LICENSE BLOCK #########################
17 |
18 | __version__ = "2.3.0"
19 | from sys import version_info
20 |
21 |
22 | def detect(aBuf):
23 | if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
24 | (version_info >= (3, 0) and not isinstance(aBuf, bytes))):
25 | raise ValueError('Expected a bytes object, not a unicode object')
26 |
27 | from . import universaldetector
28 | u = universaldetector.UniversalDetector()
29 | u.reset()
30 | u.feed(aBuf)
31 | u.close()
32 | return u.result
33 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/big5prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Communicator client code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import Big5DistributionAnalysis
31 | from .mbcssm import Big5SMModel
32 |
33 |
34 | class Big5Prober(MultiByteCharSetProber):
35 | def __init__(self):
36 | MultiByteCharSetProber.__init__(self)
37 | self._mCodingSM = CodingStateMachine(Big5SMModel)
38 | self._mDistributionAnalyzer = Big5DistributionAnalysis()
39 | self.reset()
40 |
41 | def get_charset_name(self):
42 | return "Big5"
43 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/chardetect.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Script which takes one or more file paths and reports on their detected
4 | encodings
5 |
6 | Example::
7 |
8 | % chardetect somefile someotherfile
9 | somefile: windows-1252 with confidence 0.5
10 | someotherfile: ascii with confidence 1.0
11 |
12 | If no paths are provided, it takes its input from stdin.
13 |
14 | """
15 |
16 | from __future__ import absolute_import, print_function, unicode_literals
17 |
18 | import argparse
19 | import sys
20 | from io import open
21 |
22 | from chardet import __version__
23 | from chardet.universaldetector import UniversalDetector
24 |
25 |
26 | def description_of(lines, name='stdin'):
27 | """
28 | Return a string describing the probable encoding of a file or
29 | list of strings.
30 |
31 | :param lines: The lines to get the encoding of.
32 | :type lines: Iterable of bytes
33 | :param name: Name of file or collection of lines
34 | :type name: str
35 | """
36 | u = UniversalDetector()
37 | for line in lines:
38 | u.feed(line)
39 | u.close()
40 | result = u.result
41 | if result['encoding']:
42 | return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
43 | result['confidence'])
44 | else:
45 | return '{0}: no result'.format(name)
46 |
47 |
48 | def main(argv=None):
49 | '''
50 | Handles command line arguments and gets things started.
51 |
52 | :param argv: List of arguments, as if specified on the command-line.
53 | If None, ``sys.argv[1:]`` is used instead.
54 | :type argv: list of str
55 | '''
56 | # Get command line arguments
57 | parser = argparse.ArgumentParser(
58 | description="Takes one or more file paths and reports their detected \
59 | encodings",
60 | formatter_class=argparse.ArgumentDefaultsHelpFormatter,
61 | conflict_handler='resolve')
62 | parser.add_argument('input',
63 | help='File whose encoding we would like to determine.',
64 | type=argparse.FileType('rb'), nargs='*',
65 | default=[sys.stdin])
66 | parser.add_argument('--version', action='version',
67 | version='%(prog)s {0}'.format(__version__))
68 | args = parser.parse_args(argv)
69 |
70 | for f in args.input:
71 | if f.isatty():
72 | print("You are running chardetect interactively. Press " +
73 | "CTRL-D twice at the start of a blank line to signal the " +
74 | "end of your input. If you want help, run chardetect " +
75 | "--help\n", file=sys.stderr)
76 | print(description_of(f, f.name))
77 |
78 |
79 | if __name__ == '__main__':
80 | main()
81 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/charsetgroupprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Communicator client code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from . import constants
29 | import sys
30 | from .charsetprober import CharSetProber
31 |
32 |
33 | class CharSetGroupProber(CharSetProber):
34 | def __init__(self):
35 | CharSetProber.__init__(self)
36 | self._mActiveNum = 0
37 | self._mProbers = []
38 | self._mBestGuessProber = None
39 |
40 | def reset(self):
41 | CharSetProber.reset(self)
42 | self._mActiveNum = 0
43 | for prober in self._mProbers:
44 | if prober:
45 | prober.reset()
46 | prober.active = True
47 | self._mActiveNum += 1
48 | self._mBestGuessProber = None
49 |
50 | def get_charset_name(self):
51 | if not self._mBestGuessProber:
52 | self.get_confidence()
53 | if not self._mBestGuessProber:
54 | return None
55 | # self._mBestGuessProber = self._mProbers[0]
56 | return self._mBestGuessProber.get_charset_name()
57 |
58 | def feed(self, aBuf):
59 | for prober in self._mProbers:
60 | if not prober:
61 | continue
62 | if not prober.active:
63 | continue
64 | st = prober.feed(aBuf)
65 | if not st:
66 | continue
67 | if st == constants.eFoundIt:
68 | self._mBestGuessProber = prober
69 | return self.get_state()
70 | elif st == constants.eNotMe:
71 | prober.active = False
72 | self._mActiveNum -= 1
73 | if self._mActiveNum <= 0:
74 | self._mState = constants.eNotMe
75 | return self.get_state()
76 | return self.get_state()
77 |
78 | def get_confidence(self):
79 | st = self.get_state()
80 | if st == constants.eFoundIt:
81 | return 0.99
82 | elif st == constants.eNotMe:
83 | return 0.01
84 | bestConf = 0.0
85 | self._mBestGuessProber = None
86 | for prober in self._mProbers:
87 | if not prober:
88 | continue
89 | if not prober.active:
90 | if constants._debug:
91 | sys.stderr.write(prober.get_charset_name()
92 | + ' not active\n')
93 | continue
94 | cf = prober.get_confidence()
95 | if constants._debug:
96 | sys.stderr.write('%s confidence = %s\n' %
97 | (prober.get_charset_name(), cf))
98 | if bestConf < cf:
99 | bestConf = cf
100 | self._mBestGuessProber = prober
101 | if not self._mBestGuessProber:
102 | return 0.0
103 | return bestConf
104 | # else:
105 | # self._mBestGuessProber = self._mProbers[0]
106 | # return self._mBestGuessProber.get_confidence()
107 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/charsetprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | #
13 | # This library is free software; you can redistribute it and/or
14 | # modify it under the terms of the GNU Lesser General Public
15 | # License as published by the Free Software Foundation; either
16 | # version 2.1 of the License, or (at your option) any later version.
17 | #
18 | # This library is distributed in the hope that it will be useful,
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 | # Lesser General Public License for more details.
22 | #
23 | # You should have received a copy of the GNU Lesser General Public
24 | # License along with this library; if not, write to the Free Software
25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
26 | # 02110-1301 USA
27 | ######################### END LICENSE BLOCK #########################
28 |
29 | from . import constants
30 | import re
31 |
32 |
33 | class CharSetProber:
34 | def __init__(self):
35 | pass
36 |
37 | def reset(self):
38 | self._mState = constants.eDetecting
39 |
40 | def get_charset_name(self):
41 | return None
42 |
43 | def feed(self, aBuf):
44 | pass
45 |
46 | def get_state(self):
47 | return self._mState
48 |
49 | def get_confidence(self):
50 | return 0.0
51 |
52 | def filter_high_bit_only(self, aBuf):
53 | aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
54 | return aBuf
55 |
56 | def filter_without_english_letters(self, aBuf):
57 | aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
58 | return aBuf
59 |
60 | def filter_with_english_letters(self, aBuf):
61 | # TODO
62 | return aBuf
63 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/codingstatemachine.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .constants import eStart
29 | from .compat import wrap_ord
30 |
31 |
32 | class CodingStateMachine:
33 | def __init__(self, sm):
34 | self._mModel = sm
35 | self._mCurrentBytePos = 0
36 | self._mCurrentCharLen = 0
37 | self.reset()
38 |
39 | def reset(self):
40 | self._mCurrentState = eStart
41 |
42 | def next_state(self, c):
43 | # for each byte we get its class
44 | # if it is first byte, we also get byte length
45 | # PY3K: aBuf is a byte stream, so c is an int, not a byte
46 | byteCls = self._mModel['classTable'][wrap_ord(c)]
47 | if self._mCurrentState == eStart:
48 | self._mCurrentBytePos = 0
49 | self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
50 | # from byte's class and stateTable, we get its next state
51 | curr_state = (self._mCurrentState * self._mModel['classFactor']
52 | + byteCls)
53 | self._mCurrentState = self._mModel['stateTable'][curr_state]
54 | self._mCurrentBytePos += 1
55 | return self._mCurrentState
56 |
57 | def get_current_charlen(self):
58 | return self._mCurrentCharLen
59 |
60 | def get_coding_state_machine(self):
61 | return self._mModel['name']
62 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/compat.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # Contributor(s):
3 | # Ian Cordasco - port to Python
4 | #
5 | # This library is free software; you can redistribute it and/or
6 | # modify it under the terms of the GNU Lesser General Public
7 | # License as published by the Free Software Foundation; either
8 | # version 2.1 of the License, or (at your option) any later version.
9 | #
10 | # This library is distributed in the hope that it will be useful,
11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 | # Lesser General Public License for more details.
14 | #
15 | # You should have received a copy of the GNU Lesser General Public
16 | # License along with this library; if not, write to the Free Software
17 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
18 | # 02110-1301 USA
19 | ######################### END LICENSE BLOCK #########################
20 |
21 | import sys
22 |
23 |
24 | if sys.version_info < (3, 0):
25 | base_str = (str, unicode)
26 | else:
27 | base_str = (bytes, str)
28 |
29 |
30 | def wrap_ord(a):
31 | if sys.version_info < (3, 0) and isinstance(a, base_str):
32 | return ord(a)
33 | else:
34 | return a
35 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/constants.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | #
13 | # This library is free software; you can redistribute it and/or
14 | # modify it under the terms of the GNU Lesser General Public
15 | # License as published by the Free Software Foundation; either
16 | # version 2.1 of the License, or (at your option) any later version.
17 | #
18 | # This library is distributed in the hope that it will be useful,
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 | # Lesser General Public License for more details.
22 | #
23 | # You should have received a copy of the GNU Lesser General Public
24 | # License along with this library; if not, write to the Free Software
25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
26 | # 02110-1301 USA
27 | ######################### END LICENSE BLOCK #########################
28 |
29 | _debug = 0
30 |
31 | eDetecting = 0
32 | eFoundIt = 1
33 | eNotMe = 2
34 |
35 | eStart = 0
36 | eError = 1
37 | eItsMe = 2
38 |
39 | SHORTCUT_THRESHOLD = 0.95
40 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/cp949prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import EUCKRDistributionAnalysis
31 | from .mbcssm import CP949SMModel
32 |
33 |
34 | class CP949Prober(MultiByteCharSetProber):
35 | def __init__(self):
36 | MultiByteCharSetProber.__init__(self)
37 | self._mCodingSM = CodingStateMachine(CP949SMModel)
38 | # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
39 | # not different.
40 | self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
41 | self.reset()
42 |
43 | def get_charset_name(self):
44 | return "CP949"
45 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/escprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from . import constants
29 | from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
30 | ISO2022KRSMModel)
31 | from .charsetprober import CharSetProber
32 | from .codingstatemachine import CodingStateMachine
33 | from .compat import wrap_ord
34 |
35 |
36 | class EscCharSetProber(CharSetProber):
37 | def __init__(self):
38 | CharSetProber.__init__(self)
39 | self._mCodingSM = [
40 | CodingStateMachine(HZSMModel),
41 | CodingStateMachine(ISO2022CNSMModel),
42 | CodingStateMachine(ISO2022JPSMModel),
43 | CodingStateMachine(ISO2022KRSMModel)
44 | ]
45 | self.reset()
46 |
47 | def reset(self):
48 | CharSetProber.reset(self)
49 | for codingSM in self._mCodingSM:
50 | if not codingSM:
51 | continue
52 | codingSM.active = True
53 | codingSM.reset()
54 | self._mActiveSM = len(self._mCodingSM)
55 | self._mDetectedCharset = None
56 |
57 | def get_charset_name(self):
58 | return self._mDetectedCharset
59 |
60 | def get_confidence(self):
61 | if self._mDetectedCharset:
62 | return 0.99
63 | else:
64 | return 0.00
65 |
66 | def feed(self, aBuf):
67 | for c in aBuf:
68 | # PY3K: aBuf is a byte array, so c is an int, not a byte
69 | for codingSM in self._mCodingSM:
70 | if not codingSM:
71 | continue
72 | if not codingSM.active:
73 | continue
74 | codingState = codingSM.next_state(wrap_ord(c))
75 | if codingState == constants.eError:
76 | codingSM.active = False
77 | self._mActiveSM -= 1
78 | if self._mActiveSM <= 0:
79 | self._mState = constants.eNotMe
80 | return self.get_state()
81 | elif codingState == constants.eItsMe:
82 | self._mState = constants.eFoundIt
83 | self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
84 | return self.get_state()
85 |
86 | return self.get_state()
87 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/eucjpprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | import sys
29 | from . import constants
30 | from .mbcharsetprober import MultiByteCharSetProber
31 | from .codingstatemachine import CodingStateMachine
32 | from .chardistribution import EUCJPDistributionAnalysis
33 | from .jpcntx import EUCJPContextAnalysis
34 | from .mbcssm import EUCJPSMModel
35 |
36 |
37 | class EUCJPProber(MultiByteCharSetProber):
38 | def __init__(self):
39 | MultiByteCharSetProber.__init__(self)
40 | self._mCodingSM = CodingStateMachine(EUCJPSMModel)
41 | self._mDistributionAnalyzer = EUCJPDistributionAnalysis()
42 | self._mContextAnalyzer = EUCJPContextAnalysis()
43 | self.reset()
44 |
45 | def reset(self):
46 | MultiByteCharSetProber.reset(self)
47 | self._mContextAnalyzer.reset()
48 |
49 | def get_charset_name(self):
50 | return "EUC-JP"
51 |
52 | def feed(self, aBuf):
53 | aLen = len(aBuf)
54 | for i in range(0, aLen):
55 | # PY3K: aBuf is a byte array, so aBuf[i] is an int, not a byte
56 | codingState = self._mCodingSM.next_state(aBuf[i])
57 | if codingState == constants.eError:
58 | if constants._debug:
59 | sys.stderr.write(self.get_charset_name()
60 | + ' prober hit error at byte ' + str(i)
61 | + '\n')
62 | self._mState = constants.eNotMe
63 | break
64 | elif codingState == constants.eItsMe:
65 | self._mState = constants.eFoundIt
66 | break
67 | elif codingState == constants.eStart:
68 | charLen = self._mCodingSM.get_current_charlen()
69 | if i == 0:
70 | self._mLastChar[1] = aBuf[0]
71 | self._mContextAnalyzer.feed(self._mLastChar, charLen)
72 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
73 | else:
74 | self._mContextAnalyzer.feed(aBuf[i - 1:i + 1], charLen)
75 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
76 | charLen)
77 |
78 | self._mLastChar[0] = aBuf[aLen - 1]
79 |
80 | if self.get_state() == constants.eDetecting:
81 | if (self._mContextAnalyzer.got_enough_data() and
82 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
83 | self._mState = constants.eFoundIt
84 |
85 | return self.get_state()
86 |
87 | def get_confidence(self):
88 | contxtCf = self._mContextAnalyzer.get_confidence()
89 | distribCf = self._mDistributionAnalyzer.get_confidence()
90 | return max(contxtCf, distribCf)
91 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/euckrprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import EUCKRDistributionAnalysis
31 | from .mbcssm import EUCKRSMModel
32 |
33 |
34 | class EUCKRProber(MultiByteCharSetProber):
35 | def __init__(self):
36 | MultiByteCharSetProber.__init__(self)
37 | self._mCodingSM = CodingStateMachine(EUCKRSMModel)
38 | self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
39 | self.reset()
40 |
41 | def get_charset_name(self):
42 | return "EUC-KR"
43 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/euctwprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import EUCTWDistributionAnalysis
31 | from .mbcssm import EUCTWSMModel
32 |
33 | class EUCTWProber(MultiByteCharSetProber):
34 | def __init__(self):
35 | MultiByteCharSetProber.__init__(self)
36 | self._mCodingSM = CodingStateMachine(EUCTWSMModel)
37 | self._mDistributionAnalyzer = EUCTWDistributionAnalysis()
38 | self.reset()
39 |
40 | def get_charset_name(self):
41 | return "EUC-TW"
42 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/gb2312prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import GB2312DistributionAnalysis
31 | from .mbcssm import GB2312SMModel
32 |
33 | class GB2312Prober(MultiByteCharSetProber):
34 | def __init__(self):
35 | MultiByteCharSetProber.__init__(self)
36 | self._mCodingSM = CodingStateMachine(GB2312SMModel)
37 | self._mDistributionAnalyzer = GB2312DistributionAnalysis()
38 | self.reset()
39 |
40 | def get_charset_name(self):
41 | return "GB2312"
42 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/latin1prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | #
13 | # This library is free software; you can redistribute it and/or
14 | # modify it under the terms of the GNU Lesser General Public
15 | # License as published by the Free Software Foundation; either
16 | # version 2.1 of the License, or (at your option) any later version.
17 | #
18 | # This library is distributed in the hope that it will be useful,
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 | # Lesser General Public License for more details.
22 | #
23 | # You should have received a copy of the GNU Lesser General Public
24 | # License along with this library; if not, write to the Free Software
25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
26 | # 02110-1301 USA
27 | ######################### END LICENSE BLOCK #########################
28 |
29 | from .charsetprober import CharSetProber
30 | from .constants import eNotMe
31 | from .compat import wrap_ord
32 |
33 | FREQ_CAT_NUM = 4
34 |
35 | UDF = 0 # undefined
36 | OTH = 1 # other
37 | ASC = 2 # ascii capital letter
38 | ASS = 3 # ascii small letter
39 | ACV = 4 # accent capital vowel
40 | ACO = 5 # accent capital other
41 | ASV = 6 # accent small vowel
42 | ASO = 7 # accent small other
43 | CLASS_NUM = 8 # total classes
44 |
45 | Latin1_CharToClass = (
46 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
47 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
48 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
49 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
50 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
51 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
52 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
53 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
54 | OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
55 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
56 | ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
57 | ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
58 | OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
59 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
60 | ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
61 | ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
62 | OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
63 | OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
64 | UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
65 | OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
66 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
67 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
68 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
69 | OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
70 | ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
71 | ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
72 | ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
73 | ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
74 | ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
75 | ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
76 | ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
77 | ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
78 | )
79 |
80 | # 0 : illegal
81 | # 1 : very unlikely
82 | # 2 : normal
83 | # 3 : very likely
84 | Latin1ClassModel = (
85 | # UDF OTH ASC ASS ACV ACO ASV ASO
86 | 0, 0, 0, 0, 0, 0, 0, 0, # UDF
87 | 0, 3, 3, 3, 3, 3, 3, 3, # OTH
88 | 0, 3, 3, 3, 3, 3, 3, 3, # ASC
89 | 0, 3, 3, 3, 1, 1, 3, 3, # ASS
90 | 0, 3, 3, 3, 1, 2, 1, 2, # ACV
91 | 0, 3, 3, 3, 3, 3, 3, 3, # ACO
92 | 0, 3, 1, 3, 1, 1, 1, 3, # ASV
93 | 0, 3, 1, 3, 1, 1, 3, 3, # ASO
94 | )
95 |
96 |
97 | class Latin1Prober(CharSetProber):
98 | def __init__(self):
99 | CharSetProber.__init__(self)
100 | self.reset()
101 |
102 | def reset(self):
103 | self._mLastCharClass = OTH
104 | self._mFreqCounter = [0] * FREQ_CAT_NUM
105 | CharSetProber.reset(self)
106 |
107 | def get_charset_name(self):
108 | return "windows-1252"
109 |
110 | def feed(self, aBuf):
111 | aBuf = self.filter_with_english_letters(aBuf)
112 | for c in aBuf:
113 | charClass = Latin1_CharToClass[wrap_ord(c)]
114 | freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
115 | + charClass]
116 | if freq == 0:
117 | self._mState = eNotMe
118 | break
119 | self._mFreqCounter[freq] += 1
120 | self._mLastCharClass = charClass
121 |
122 | return self.get_state()
123 |
124 | def get_confidence(self):
125 | if self.get_state() == eNotMe:
126 | return 0.01
127 |
128 | total = sum(self._mFreqCounter)
129 | if total < 0.01:
130 | confidence = 0.0
131 | else:
132 | confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
133 | / total)
134 | if confidence < 0.0:
135 | confidence = 0.0
136 | # lower the confidence of latin1 so that other more accurate
137 | # detector can take priority.
138 | confidence = confidence * 0.73
139 | return confidence
140 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/mbcharsetprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | # Proofpoint, Inc.
13 | #
14 | # This library is free software; you can redistribute it and/or
15 | # modify it under the terms of the GNU Lesser General Public
16 | # License as published by the Free Software Foundation; either
17 | # version 2.1 of the License, or (at your option) any later version.
18 | #
19 | # This library is distributed in the hope that it will be useful,
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 | # Lesser General Public License for more details.
23 | #
24 | # You should have received a copy of the GNU Lesser General Public
25 | # License along with this library; if not, write to the Free Software
26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
27 | # 02110-1301 USA
28 | ######################### END LICENSE BLOCK #########################
29 |
30 | import sys
31 | from . import constants
32 | from .charsetprober import CharSetProber
33 |
34 |
35 | class MultiByteCharSetProber(CharSetProber):
36 | def __init__(self):
37 | CharSetProber.__init__(self)
38 | self._mDistributionAnalyzer = None
39 | self._mCodingSM = None
40 | self._mLastChar = [0, 0]
41 |
42 | def reset(self):
43 | CharSetProber.reset(self)
44 | if self._mCodingSM:
45 | self._mCodingSM.reset()
46 | if self._mDistributionAnalyzer:
47 | self._mDistributionAnalyzer.reset()
48 | self._mLastChar = [0, 0]
49 |
50 | def get_charset_name(self):
51 | pass
52 |
53 | def feed(self, aBuf):
54 | aLen = len(aBuf)
55 | for i in range(0, aLen):
56 | codingState = self._mCodingSM.next_state(aBuf[i])
57 | if codingState == constants.eError:
58 | if constants._debug:
59 | sys.stderr.write(self.get_charset_name()
60 | + ' prober hit error at byte ' + str(i)
61 | + '\n')
62 | self._mState = constants.eNotMe
63 | break
64 | elif codingState == constants.eItsMe:
65 | self._mState = constants.eFoundIt
66 | break
67 | elif codingState == constants.eStart:
68 | charLen = self._mCodingSM.get_current_charlen()
69 | if i == 0:
70 | self._mLastChar[1] = aBuf[0]
71 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
72 | else:
73 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
74 | charLen)
75 |
76 | self._mLastChar[0] = aBuf[aLen - 1]
77 |
78 | if self.get_state() == constants.eDetecting:
79 | if (self._mDistributionAnalyzer.got_enough_data() and
80 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
81 | self._mState = constants.eFoundIt
82 |
83 | return self.get_state()
84 |
85 | def get_confidence(self):
86 | return self._mDistributionAnalyzer.get_confidence()
87 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/mbcsgroupprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | # Proofpoint, Inc.
13 | #
14 | # This library is free software; you can redistribute it and/or
15 | # modify it under the terms of the GNU Lesser General Public
16 | # License as published by the Free Software Foundation; either
17 | # version 2.1 of the License, or (at your option) any later version.
18 | #
19 | # This library is distributed in the hope that it will be useful,
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 | # Lesser General Public License for more details.
23 | #
24 | # You should have received a copy of the GNU Lesser General Public
25 | # License along with this library; if not, write to the Free Software
26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
27 | # 02110-1301 USA
28 | ######################### END LICENSE BLOCK #########################
29 |
30 | from .charsetgroupprober import CharSetGroupProber
31 | from .utf8prober import UTF8Prober
32 | from .sjisprober import SJISProber
33 | from .eucjpprober import EUCJPProber
34 | from .gb2312prober import GB2312Prober
35 | from .euckrprober import EUCKRProber
36 | from .cp949prober import CP949Prober
37 | from .big5prober import Big5Prober
38 | from .euctwprober import EUCTWProber
39 |
40 |
41 | class MBCSGroupProber(CharSetGroupProber):
42 | def __init__(self):
43 | CharSetGroupProber.__init__(self)
44 | self._mProbers = [
45 | UTF8Prober(),
46 | SJISProber(),
47 | EUCJPProber(),
48 | GB2312Prober(),
49 | EUCKRProber(),
50 | CP949Prober(),
51 | Big5Prober(),
52 | EUCTWProber()
53 | ]
54 | self.reset()
55 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/sbcharsetprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | #
13 | # This library is free software; you can redistribute it and/or
14 | # modify it under the terms of the GNU Lesser General Public
15 | # License as published by the Free Software Foundation; either
16 | # version 2.1 of the License, or (at your option) any later version.
17 | #
18 | # This library is distributed in the hope that it will be useful,
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 | # Lesser General Public License for more details.
22 | #
23 | # You should have received a copy of the GNU Lesser General Public
24 | # License along with this library; if not, write to the Free Software
25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
26 | # 02110-1301 USA
27 | ######################### END LICENSE BLOCK #########################
28 |
29 | import sys
30 | from . import constants
31 | from .charsetprober import CharSetProber
32 | from .compat import wrap_ord
33 |
34 | SAMPLE_SIZE = 64
35 | SB_ENOUGH_REL_THRESHOLD = 1024
36 | POSITIVE_SHORTCUT_THRESHOLD = 0.95
37 | NEGATIVE_SHORTCUT_THRESHOLD = 0.05
38 | SYMBOL_CAT_ORDER = 250
39 | NUMBER_OF_SEQ_CAT = 4
40 | POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
41 | #NEGATIVE_CAT = 0
42 |
43 |
44 | class SingleByteCharSetProber(CharSetProber):
45 | def __init__(self, model, reversed=False, nameProber=None):
46 | CharSetProber.__init__(self)
47 | self._mModel = model
48 | # TRUE if we need to reverse every pair in the model lookup
49 | self._mReversed = reversed
50 | # Optional auxiliary prober for name decision
51 | self._mNameProber = nameProber
52 | self.reset()
53 |
54 | def reset(self):
55 | CharSetProber.reset(self)
56 | # char order of last character
57 | self._mLastOrder = 255
58 | self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
59 | self._mTotalSeqs = 0
60 | self._mTotalChar = 0
61 | # characters that fall in our sampling range
62 | self._mFreqChar = 0
63 |
64 | def get_charset_name(self):
65 | if self._mNameProber:
66 | return self._mNameProber.get_charset_name()
67 | else:
68 | return self._mModel['charsetName']
69 |
70 | def feed(self, aBuf):
71 | if not self._mModel['keepEnglishLetter']:
72 | aBuf = self.filter_without_english_letters(aBuf)
73 | aLen = len(aBuf)
74 | if not aLen:
75 | return self.get_state()
76 | for c in aBuf:
77 | order = self._mModel['charToOrderMap'][wrap_ord(c)]
78 | if order < SYMBOL_CAT_ORDER:
79 | self._mTotalChar += 1
80 | if order < SAMPLE_SIZE:
81 | self._mFreqChar += 1
82 | if self._mLastOrder < SAMPLE_SIZE:
83 | self._mTotalSeqs += 1
84 | if not self._mReversed:
85 | i = (self._mLastOrder * SAMPLE_SIZE) + order
86 | model = self._mModel['precedenceMatrix'][i]
87 | else: # reverse the order of the letters in the lookup
88 | i = (order * SAMPLE_SIZE) + self._mLastOrder
89 | model = self._mModel['precedenceMatrix'][i]
90 | self._mSeqCounters[model] += 1
91 | self._mLastOrder = order
92 |
93 | if self.get_state() == constants.eDetecting:
94 | if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
95 | cf = self.get_confidence()
96 | if cf > POSITIVE_SHORTCUT_THRESHOLD:
97 | if constants._debug:
98 | sys.stderr.write('%s confidence = %s, we have a'
99 | 'winner\n' %
100 | (self._mModel['charsetName'], cf))
101 | self._mState = constants.eFoundIt
102 | elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
103 | if constants._debug:
104 | sys.stderr.write('%s confidence = %s, below negative'
105 | 'shortcut threshhold %s\n' %
106 | (self._mModel['charsetName'], cf,
107 | NEGATIVE_SHORTCUT_THRESHOLD))
108 | self._mState = constants.eNotMe
109 |
110 | return self.get_state()
111 |
112 | def get_confidence(self):
113 | r = 0.01
114 | if self._mTotalSeqs > 0:
115 | r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
116 | / self._mModel['mTypicalPositiveRatio'])
117 | r = r * self._mFreqChar / self._mTotalChar
118 | if r >= 1.0:
119 | r = 0.99
120 | return r
121 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/sbcsgroupprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | #
13 | # This library is free software; you can redistribute it and/or
14 | # modify it under the terms of the GNU Lesser General Public
15 | # License as published by the Free Software Foundation; either
16 | # version 2.1 of the License, or (at your option) any later version.
17 | #
18 | # This library is distributed in the hope that it will be useful,
19 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
21 | # Lesser General Public License for more details.
22 | #
23 | # You should have received a copy of the GNU Lesser General Public
24 | # License along with this library; if not, write to the Free Software
25 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
26 | # 02110-1301 USA
27 | ######################### END LICENSE BLOCK #########################
28 |
29 | from .charsetgroupprober import CharSetGroupProber
30 | from .sbcharsetprober import SingleByteCharSetProber
31 | from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
32 | Latin5CyrillicModel, MacCyrillicModel,
33 | Ibm866Model, Ibm855Model)
34 | from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
35 | from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
36 | from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
37 | from .langthaimodel import TIS620ThaiModel
38 | from .langhebrewmodel import Win1255HebrewModel
39 | from .hebrewprober import HebrewProber
40 |
41 |
42 | class SBCSGroupProber(CharSetGroupProber):
43 | def __init__(self):
44 | CharSetGroupProber.__init__(self)
45 | self._mProbers = [
46 | SingleByteCharSetProber(Win1251CyrillicModel),
47 | SingleByteCharSetProber(Koi8rModel),
48 | SingleByteCharSetProber(Latin5CyrillicModel),
49 | SingleByteCharSetProber(MacCyrillicModel),
50 | SingleByteCharSetProber(Ibm866Model),
51 | SingleByteCharSetProber(Ibm855Model),
52 | SingleByteCharSetProber(Latin7GreekModel),
53 | SingleByteCharSetProber(Win1253GreekModel),
54 | SingleByteCharSetProber(Latin5BulgarianModel),
55 | SingleByteCharSetProber(Win1251BulgarianModel),
56 | SingleByteCharSetProber(Latin2HungarianModel),
57 | SingleByteCharSetProber(Win1250HungarianModel),
58 | SingleByteCharSetProber(TIS620ThaiModel),
59 | ]
60 | hebrewProber = HebrewProber()
61 | logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel,
62 | False, hebrewProber)
63 | visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True,
64 | hebrewProber)
65 | hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)
66 | self._mProbers.extend([hebrewProber, logicalHebrewProber,
67 | visualHebrewProber])
68 |
69 | self.reset()
70 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/sjisprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | import sys
29 | from .mbcharsetprober import MultiByteCharSetProber
30 | from .codingstatemachine import CodingStateMachine
31 | from .chardistribution import SJISDistributionAnalysis
32 | from .jpcntx import SJISContextAnalysis
33 | from .mbcssm import SJISSMModel
34 | from . import constants
35 |
36 |
37 | class SJISProber(MultiByteCharSetProber):
38 | def __init__(self):
39 | MultiByteCharSetProber.__init__(self)
40 | self._mCodingSM = CodingStateMachine(SJISSMModel)
41 | self._mDistributionAnalyzer = SJISDistributionAnalysis()
42 | self._mContextAnalyzer = SJISContextAnalysis()
43 | self.reset()
44 |
45 | def reset(self):
46 | MultiByteCharSetProber.reset(self)
47 | self._mContextAnalyzer.reset()
48 |
49 | def get_charset_name(self):
50 | return self._mContextAnalyzer.get_charset_name()
51 |
52 | def feed(self, aBuf):
53 | aLen = len(aBuf)
54 | for i in range(0, aLen):
55 | codingState = self._mCodingSM.next_state(aBuf[i])
56 | if codingState == constants.eError:
57 | if constants._debug:
58 | sys.stderr.write(self.get_charset_name()
59 | + ' prober hit error at byte ' + str(i)
60 | + '\n')
61 | self._mState = constants.eNotMe
62 | break
63 | elif codingState == constants.eItsMe:
64 | self._mState = constants.eFoundIt
65 | break
66 | elif codingState == constants.eStart:
67 | charLen = self._mCodingSM.get_current_charlen()
68 | if i == 0:
69 | self._mLastChar[1] = aBuf[0]
70 | self._mContextAnalyzer.feed(self._mLastChar[2 - charLen:],
71 | charLen)
72 | self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
73 | else:
74 | self._mContextAnalyzer.feed(aBuf[i + 1 - charLen:i + 3
75 | - charLen], charLen)
76 | self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
77 | charLen)
78 |
79 | self._mLastChar[0] = aBuf[aLen - 1]
80 |
81 | if self.get_state() == constants.eDetecting:
82 | if (self._mContextAnalyzer.got_enough_data() and
83 | (self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
84 | self._mState = constants.eFoundIt
85 |
86 | return self.get_state()
87 |
88 | def get_confidence(self):
89 | contxtCf = self._mContextAnalyzer.get_confidence()
90 | distribCf = self._mDistributionAnalyzer.get_confidence()
91 | return max(contxtCf, distribCf)
92 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/chardet/utf8prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from . import constants
29 | from .charsetprober import CharSetProber
30 | from .codingstatemachine import CodingStateMachine
31 | from .mbcssm import UTF8SMModel
32 |
33 | ONE_CHAR_PROB = 0.5
34 |
35 |
36 | class UTF8Prober(CharSetProber):
37 | def __init__(self):
38 | CharSetProber.__init__(self)
39 | self._mCodingSM = CodingStateMachine(UTF8SMModel)
40 | self.reset()
41 |
42 | def reset(self):
43 | CharSetProber.reset(self)
44 | self._mCodingSM.reset()
45 | self._mNumOfMBChar = 0
46 |
47 | def get_charset_name(self):
48 | return "utf-8"
49 |
50 | def feed(self, aBuf):
51 | for c in aBuf:
52 | codingState = self._mCodingSM.next_state(c)
53 | if codingState == constants.eError:
54 | self._mState = constants.eNotMe
55 | break
56 | elif codingState == constants.eItsMe:
57 | self._mState = constants.eFoundIt
58 | break
59 | elif codingState == constants.eStart:
60 | if self._mCodingSM.get_current_charlen() >= 2:
61 | self._mNumOfMBChar += 1
62 |
63 | if self.get_state() == constants.eDetecting:
64 | if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
65 | self._mState = constants.eFoundIt
66 |
67 | return self.get_state()
68 |
69 | def get_confidence(self):
70 | unlike = 0.99
71 | if self._mNumOfMBChar < 6:
72 | for i in range(0, self._mNumOfMBChar):
73 | unlike = unlike * ONE_CHAR_PROB
74 | return 1.0 - unlike
75 | else:
76 | return unlike
77 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/idna/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import *
2 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/idna/codec.py:
--------------------------------------------------------------------------------
1 | from .core import encode, decode, alabel, ulabel, IDNAError
2 | import codecs
3 | import re
4 |
5 | _unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
6 |
7 | class Codec(codecs.Codec):
8 |
9 | def encode(self, data, errors='strict'):
10 |
11 | if errors != 'strict':
12 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
13 |
14 | if not data:
15 | return "", 0
16 |
17 | return encode(data), len(data)
18 |
19 | def decode(self, data, errors='strict'):
20 |
21 | if errors != 'strict':
22 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
23 |
24 | if not data:
25 | return u"", 0
26 |
27 | return decode(data), len(data)
28 |
29 | class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
30 | def _buffer_encode(self, data, errors, final):
31 | if errors != 'strict':
32 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
33 |
34 | if not data:
35 | return ("", 0)
36 |
37 | labels = _unicode_dots_re.split(data)
38 | trailing_dot = u''
39 | if labels:
40 | if not labels[-1]:
41 | trailing_dot = '.'
42 | del labels[-1]
43 | elif not final:
44 | # Keep potentially unfinished label until the next call
45 | del labels[-1]
46 | if labels:
47 | trailing_dot = '.'
48 |
49 | result = []
50 | size = 0
51 | for label in labels:
52 | result.append(alabel(label))
53 | if size:
54 | size += 1
55 | size += len(label)
56 |
57 | # Join with U+002E
58 | result = ".".join(result) + trailing_dot
59 | size += len(trailing_dot)
60 | return (result, size)
61 |
62 | class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
63 | def _buffer_decode(self, data, errors, final):
64 | if errors != 'strict':
65 | raise IDNAError("Unsupported error handling \"{0}\"".format(errors))
66 |
67 | if not data:
68 | return (u"", 0)
69 |
70 | # IDNA allows decoding to operate on Unicode strings, too.
71 | if isinstance(data, unicode):
72 | labels = _unicode_dots_re.split(data)
73 | else:
74 | # Must be ASCII string
75 | data = str(data)
76 | unicode(data, "ascii")
77 | labels = data.split(".")
78 |
79 | trailing_dot = u''
80 | if labels:
81 | if not labels[-1]:
82 | trailing_dot = u'.'
83 | del labels[-1]
84 | elif not final:
85 | # Keep potentially unfinished label until the next call
86 | del labels[-1]
87 | if labels:
88 | trailing_dot = u'.'
89 |
90 | result = []
91 | size = 0
92 | for label in labels:
93 | result.append(ulabel(label))
94 | if size:
95 | size += 1
96 | size += len(label)
97 |
98 | result = u".".join(result) + trailing_dot
99 | size += len(trailing_dot)
100 | return (result, size)
101 |
102 |
103 | class StreamWriter(Codec, codecs.StreamWriter):
104 | pass
105 |
106 | class StreamReader(Codec, codecs.StreamReader):
107 | pass
108 |
109 | def getregentry():
110 | return codecs.CodecInfo(
111 | name='idna',
112 | encode=Codec().encode,
113 | decode=Codec().decode,
114 | incrementalencoder=IncrementalEncoder,
115 | incrementaldecoder=IncrementalDecoder,
116 | streamwriter=StreamWriter,
117 | streamreader=StreamReader,
118 | )
119 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/idna/compat.py:
--------------------------------------------------------------------------------
1 | from .core import *
2 | from .codec import *
3 |
4 | def ToASCII(label):
5 | return encode(label)
6 |
7 | def ToUnicode(label):
8 | return decode(label)
9 |
10 | def nameprep(s):
11 | raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
12 |
13 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/idna/intranges.py:
--------------------------------------------------------------------------------
1 | """
2 | Given a list of integers, made up of (hopefully) a small number of long runs
3 | of consecutive integers, compute a representation of the form
4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present
5 | in the original list?" in time O(log(# runs)).
6 | """
7 |
8 | import bisect
9 |
10 | def intranges_from_list(list_):
11 | """Represent a list of integers as a sequence of ranges:
12 | ((start_0, end_0), (start_1, end_1), ...), such that the original
13 | integers are exactly those x such that start_i <= x < end_i for some i.
14 | """
15 |
16 | sorted_list = sorted(list_)
17 | ranges = []
18 | last_write = -1
19 | for i in range(len(sorted_list)):
20 | if i+1 < len(sorted_list):
21 | if sorted_list[i] == sorted_list[i+1]-1:
22 | continue
23 | current_range = sorted_list[last_write+1:i+1]
24 | range_tuple = (current_range[0], current_range[-1] + 1)
25 | ranges.append(range_tuple)
26 | last_write = i
27 |
28 | return tuple(ranges)
29 |
30 |
31 | def intranges_contain(int_, ranges):
32 | """Determine if `int_` falls into one of the ranges in `ranges`."""
33 | tuple_ = (int_, int_)
34 | pos = bisect.bisect_left(ranges, tuple_)
35 | # we could be immediately ahead of a tuple (start, end)
36 | # with start < int_ <= end
37 | if pos > 0:
38 | left, right = ranges[pos-1]
39 | if left <= int_ < right:
40 | return True
41 | # or we could be immediately behind a tuple (int_, end)
42 | if pos < len(ranges):
43 | left, _ = ranges[pos]
44 | if left == int_:
45 | return True
46 | return False
47 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | urllib3 - Thread-safe connection pooling and re-using.
3 | """
4 |
5 | from __future__ import absolute_import
6 | import warnings
7 |
8 | from .connectionpool import (
9 | HTTPConnectionPool,
10 | HTTPSConnectionPool,
11 | connection_from_url
12 | )
13 |
14 | from . import exceptions
15 | from .filepost import encode_multipart_formdata
16 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url
17 | from .response import HTTPResponse
18 | from .util.request import make_headers
19 | from .util.url import get_host
20 | from .util.timeout import Timeout
21 | from .util.retry import Retry
22 |
23 |
24 | # Set default logging handler to avoid "No handler found" warnings.
25 | import logging
26 | try: # Python 2.7+
27 | from logging import NullHandler
28 | except ImportError:
29 | class NullHandler(logging.Handler):
30 | def emit(self, record):
31 | pass
32 |
33 | __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
34 | __license__ = 'MIT'
35 | __version__ = '1.19.1'
36 |
37 | __all__ = (
38 | 'HTTPConnectionPool',
39 | 'HTTPSConnectionPool',
40 | 'PoolManager',
41 | 'ProxyManager',
42 | 'HTTPResponse',
43 | 'Retry',
44 | 'Timeout',
45 | 'add_stderr_logger',
46 | 'connection_from_url',
47 | 'disable_warnings',
48 | 'encode_multipart_formdata',
49 | 'get_host',
50 | 'make_headers',
51 | 'proxy_from_url',
52 | )
53 |
54 | logging.getLogger(__name__).addHandler(NullHandler())
55 |
56 |
57 | def add_stderr_logger(level=logging.DEBUG):
58 | """
59 | Helper for quickly adding a StreamHandler to the logger. Useful for
60 | debugging.
61 |
62 | Returns the handler after adding it.
63 | """
64 | # This method needs to be in this __init__.py to get the __name__ correct
65 | # even if urllib3 is vendored within another package.
66 | logger = logging.getLogger(__name__)
67 | handler = logging.StreamHandler()
68 | handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
69 | logger.addHandler(handler)
70 | logger.setLevel(level)
71 | logger.debug('Added a stderr logging handler to logger: %s', __name__)
72 | return handler
73 |
74 |
75 | # ... Clean up.
76 | del NullHandler
77 |
78 |
79 | # All warning filters *must* be appended unless you're really certain that they
80 | # shouldn't be: otherwise, it's very hard for users to use most Python
81 | # mechanisms to silence them.
82 | # SecurityWarning's always go off by default.
83 | warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
84 | # SubjectAltNameWarning's should go off once per host
85 | warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True)
86 | # InsecurePlatformWarning's don't vary between requests, so we keep it default.
87 | warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
88 | append=True)
89 | # SNIMissingWarnings should go off only once.
90 | warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True)
91 |
92 |
93 | def disable_warnings(category=exceptions.HTTPWarning):
94 | """
95 | Helper for quickly disabling all urllib3 warnings.
96 | """
97 | warnings.simplefilter('ignore', category)
98 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/contrib/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/contrib/ntlmpool.py:
--------------------------------------------------------------------------------
1 | """
2 | NTLM authenticating pool, contributed by erikcederstran
3 |
4 | Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
5 | """
6 | from __future__ import absolute_import
7 |
8 | from logging import getLogger
9 | from ntlm import ntlm
10 |
11 | from .. import HTTPSConnectionPool
12 | from ..packages.six.moves.http_client import HTTPSConnection
13 |
14 |
15 | log = getLogger(__name__)
16 |
17 |
18 | class NTLMConnectionPool(HTTPSConnectionPool):
19 | """
20 | Implements an NTLM authentication version of an urllib3 connection pool
21 | """
22 |
23 | scheme = 'https'
24 |
25 | def __init__(self, user, pw, authurl, *args, **kwargs):
26 | """
27 | authurl is a random URL on the server that is protected by NTLM.
28 | user is the Windows user, probably in the DOMAIN\\username format.
29 | pw is the password for the user.
30 | """
31 | super(NTLMConnectionPool, self).__init__(*args, **kwargs)
32 | self.authurl = authurl
33 | self.rawuser = user
34 | user_parts = user.split('\\', 1)
35 | self.domain = user_parts[0].upper()
36 | self.user = user_parts[1]
37 | self.pw = pw
38 |
39 | def _new_conn(self):
40 | # Performs the NTLM handshake that secures the connection. The socket
41 | # must be kept open while requests are performed.
42 | self.num_connections += 1
43 | log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
44 | self.num_connections, self.host, self.authurl)
45 |
46 | headers = {}
47 | headers['Connection'] = 'Keep-Alive'
48 | req_header = 'Authorization'
49 | resp_header = 'www-authenticate'
50 |
51 | conn = HTTPSConnection(host=self.host, port=self.port)
52 |
53 | # Send negotiation message
54 | headers[req_header] = (
55 | 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
56 | log.debug('Request headers: %s', headers)
57 | conn.request('GET', self.authurl, None, headers)
58 | res = conn.getresponse()
59 | reshdr = dict(res.getheaders())
60 | log.debug('Response status: %s %s', res.status, res.reason)
61 | log.debug('Response headers: %s', reshdr)
62 | log.debug('Response data: %s [...]', res.read(100))
63 |
64 | # Remove the reference to the socket, so that it can not be closed by
65 | # the response object (we want to keep the socket open)
66 | res.fp = None
67 |
68 | # Server should respond with a challenge message
69 | auth_header_values = reshdr[resp_header].split(', ')
70 | auth_header_value = None
71 | for s in auth_header_values:
72 | if s[:5] == 'NTLM ':
73 | auth_header_value = s[5:]
74 | if auth_header_value is None:
75 | raise Exception('Unexpected %s response header: %s' %
76 | (resp_header, reshdr[resp_header]))
77 |
78 | # Send authentication message
79 | ServerChallenge, NegotiateFlags = \
80 | ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
81 | auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
82 | self.user,
83 | self.domain,
84 | self.pw,
85 | NegotiateFlags)
86 | headers[req_header] = 'NTLM %s' % auth_msg
87 | log.debug('Request headers: %s', headers)
88 | conn.request('GET', self.authurl, None, headers)
89 | res = conn.getresponse()
90 | log.debug('Response status: %s %s', res.status, res.reason)
91 | log.debug('Response headers: %s', dict(res.getheaders()))
92 | log.debug('Response data: %s [...]', res.read()[:100])
93 | if res.status != 200:
94 | if res.status == 401:
95 | raise Exception('Server rejected request: wrong '
96 | 'username or password')
97 | raise Exception('Wrong server response: %s %s' %
98 | (res.status, res.reason))
99 |
100 | res.fp = None
101 | log.debug('Connection established')
102 | return conn
103 |
104 | def urlopen(self, method, url, body=None, headers=None, retries=3,
105 | redirect=True, assert_same_host=True):
106 | if headers is None:
107 | headers = {}
108 | headers['Connection'] = 'Keep-Alive'
109 | return super(NTLMConnectionPool, self).urlopen(method, url, body,
110 | headers, retries,
111 | redirect,
112 | assert_same_host)
113 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/filepost.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | import codecs
3 |
4 | from uuid import uuid4
5 | from io import BytesIO
6 |
7 | from .packages import six
8 | from .packages.six import b
9 | from .fields import RequestField
10 |
11 | writer = codecs.lookup('utf-8')[3]
12 |
13 |
14 | def choose_boundary():
15 | """
16 | Our embarrassingly-simple replacement for mimetools.choose_boundary.
17 | """
18 | return uuid4().hex
19 |
20 |
21 | def iter_field_objects(fields):
22 | """
23 | Iterate over fields.
24 |
25 | Supports list of (k, v) tuples and dicts, and lists of
26 | :class:`~urllib3.fields.RequestField`.
27 |
28 | """
29 | if isinstance(fields, dict):
30 | i = six.iteritems(fields)
31 | else:
32 | i = iter(fields)
33 |
34 | for field in i:
35 | if isinstance(field, RequestField):
36 | yield field
37 | else:
38 | yield RequestField.from_tuples(*field)
39 |
40 |
41 | def iter_fields(fields):
42 | """
43 | .. deprecated:: 1.6
44 |
45 | Iterate over fields.
46 |
47 | The addition of :class:`~urllib3.fields.RequestField` makes this function
48 | obsolete. Instead, use :func:`iter_field_objects`, which returns
49 | :class:`~urllib3.fields.RequestField` objects.
50 |
51 | Supports list of (k, v) tuples and dicts.
52 | """
53 | if isinstance(fields, dict):
54 | return ((k, v) for k, v in six.iteritems(fields))
55 |
56 | return ((k, v) for k, v in fields)
57 |
58 |
59 | def encode_multipart_formdata(fields, boundary=None):
60 | """
61 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
62 |
63 | :param fields:
64 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
65 |
66 | :param boundary:
67 | If not specified, then a random boundary will be generated using
68 | :func:`mimetools.choose_boundary`.
69 | """
70 | body = BytesIO()
71 | if boundary is None:
72 | boundary = choose_boundary()
73 |
74 | for field in iter_field_objects(fields):
75 | body.write(b('--%s\r\n' % (boundary)))
76 |
77 | writer(body).write(field.render_headers())
78 | data = field.data
79 |
80 | if isinstance(data, int):
81 | data = str(data) # Backwards compatibility
82 |
83 | if isinstance(data, six.text_type):
84 | writer(body).write(data)
85 | else:
86 | body.write(data)
87 |
88 | body.write(b'\r\n')
89 |
90 | body.write(b('--%s--\r\n' % (boundary)))
91 |
92 | content_type = str('multipart/form-data; boundary=%s' % boundary)
93 |
94 | return body.getvalue(), content_type
95 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/packages/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from . import ssl_match_hostname
4 |
5 | __all__ = ('ssl_match_hostname', )
6 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/packages/backports/makefile.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | backports.makefile
4 | ~~~~~~~~~~~~~~~~~~
5 |
6 | Backports the Python 3 ``socket.makefile`` method for use with anything that
7 | wants to create a "fake" socket object.
8 | """
9 | import io
10 |
11 | from socket import SocketIO
12 |
13 |
14 | def backport_makefile(self, mode="r", buffering=None, encoding=None,
15 | errors=None, newline=None):
16 | """
17 | Backport of ``socket.makefile`` from Python 3.5.
18 | """
19 | if not set(mode) <= set(["r", "w", "b"]):
20 | raise ValueError(
21 | "invalid mode %r (only r, w, b allowed)" % (mode,)
22 | )
23 | writing = "w" in mode
24 | reading = "r" in mode or not writing
25 | assert reading or writing
26 | binary = "b" in mode
27 | rawmode = ""
28 | if reading:
29 | rawmode += "r"
30 | if writing:
31 | rawmode += "w"
32 | raw = SocketIO(self, rawmode)
33 | self._makefile_refs += 1
34 | if buffering is None:
35 | buffering = -1
36 | if buffering < 0:
37 | buffering = io.DEFAULT_BUFFER_SIZE
38 | if buffering == 0:
39 | if not binary:
40 | raise ValueError("unbuffered streams must be binary")
41 | return raw
42 | if reading and writing:
43 | buffer = io.BufferedRWPair(raw, raw, buffering)
44 | elif reading:
45 | buffer = io.BufferedReader(raw, buffering)
46 | else:
47 | assert writing
48 | buffer = io.BufferedWriter(raw, buffering)
49 | if binary:
50 | return buffer
51 | text = io.TextIOWrapper(buffer, encoding, errors, newline)
52 | text.mode = mode
53 | return text
54 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | try:
4 | # Our match_hostname function is the same as 3.5's, so we only want to
5 | # import the match_hostname function if it's at least that good.
6 | if sys.version_info < (3, 5):
7 | raise ImportError("Fallback to vendored code")
8 |
9 | from ssl import CertificateError, match_hostname
10 | except ImportError:
11 | try:
12 | # Backport of the function from a pypi module
13 | from backports.ssl_match_hostname import CertificateError, match_hostname
14 | except ImportError:
15 | # Our vendored copy
16 | from ._implementation import CertificateError, match_hostname
17 |
18 | # Not needed, but documenting what we provide.
19 | __all__ = ('CertificateError', 'match_hostname')
20 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py:
--------------------------------------------------------------------------------
1 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
2 |
3 | # Note: This file is under the PSF license as the code comes from the python
4 | # stdlib. http://docs.python.org/3/license.html
5 |
6 | import re
7 | import sys
8 |
9 | # ipaddress has been backported to 2.6+ in pypi. If it is installed on the
10 | # system, use it to handle IPAddress ServerAltnames (this was added in
11 | # python-3.5) otherwise only do DNS matching. This allows
12 | # backports.ssl_match_hostname to continue to be used all the way back to
13 | # python-2.4.
14 | try:
15 | import ipaddress
16 | except ImportError:
17 | ipaddress = None
18 |
19 | __version__ = '3.5.0.1'
20 |
21 |
22 | class CertificateError(ValueError):
23 | pass
24 |
25 |
26 | def _dnsname_match(dn, hostname, max_wildcards=1):
27 | """Matching according to RFC 6125, section 6.4.3
28 |
29 | http://tools.ietf.org/html/rfc6125#section-6.4.3
30 | """
31 | pats = []
32 | if not dn:
33 | return False
34 |
35 | # Ported from python3-syntax:
36 | # leftmost, *remainder = dn.split(r'.')
37 | parts = dn.split(r'.')
38 | leftmost = parts[0]
39 | remainder = parts[1:]
40 |
41 | wildcards = leftmost.count('*')
42 | if wildcards > max_wildcards:
43 | # Issue #17980: avoid denials of service by refusing more
44 | # than one wildcard per fragment. A survey of established
45 | # policy among SSL implementations showed it to be a
46 | # reasonable choice.
47 | raise CertificateError(
48 | "too many wildcards in certificate DNS name: " + repr(dn))
49 |
50 | # speed up common case w/o wildcards
51 | if not wildcards:
52 | return dn.lower() == hostname.lower()
53 |
54 | # RFC 6125, section 6.4.3, subitem 1.
55 | # The client SHOULD NOT attempt to match a presented identifier in which
56 | # the wildcard character comprises a label other than the left-most label.
57 | if leftmost == '*':
58 | # When '*' is a fragment by itself, it matches a non-empty dotless
59 | # fragment.
60 | pats.append('[^.]+')
61 | elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
62 | # RFC 6125, section 6.4.3, subitem 3.
63 | # The client SHOULD NOT attempt to match a presented identifier
64 | # where the wildcard character is embedded within an A-label or
65 | # U-label of an internationalized domain name.
66 | pats.append(re.escape(leftmost))
67 | else:
68 | # Otherwise, '*' matches any dotless string, e.g. www*
69 | pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
70 |
71 | # add the remaining fragments, ignore any wildcards
72 | for frag in remainder:
73 | pats.append(re.escape(frag))
74 |
75 | pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
76 | return pat.match(hostname)
77 |
78 |
79 | def _to_unicode(obj):
80 | if isinstance(obj, str) and sys.version_info < (3,):
81 | obj = unicode(obj, encoding='ascii', errors='strict')
82 | return obj
83 |
84 | def _ipaddress_match(ipname, host_ip):
85 | """Exact matching of IP addresses.
86 |
87 | RFC 6125 explicitly doesn't define an algorithm for this
88 | (section 1.7.2 - "Out of Scope").
89 | """
90 | # OpenSSL may add a trailing newline to a subjectAltName's IP address
91 | # Divergence from upstream: ipaddress can't handle byte str
92 | ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
93 | return ip == host_ip
94 |
95 |
96 | def match_hostname(cert, hostname):
97 | """Verify that *cert* (in decoded format as returned by
98 | SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
99 | rules are followed, but IP addresses are not accepted for *hostname*.
100 |
101 | CertificateError is raised on failure. On success, the function
102 | returns nothing.
103 | """
104 | if not cert:
105 | raise ValueError("empty or no certificate, match_hostname needs a "
106 | "SSL socket or SSL context with either "
107 | "CERT_OPTIONAL or CERT_REQUIRED")
108 | try:
109 | # Divergence from upstream: ipaddress can't handle byte str
110 | host_ip = ipaddress.ip_address(_to_unicode(hostname))
111 | except ValueError:
112 | # Not an IP address (common case)
113 | host_ip = None
114 | except UnicodeError:
115 | # Divergence from upstream: Have to deal with ipaddress not taking
116 | # byte strings. addresses should be all ascii, so we consider it not
117 | # an ipaddress in this case
118 | host_ip = None
119 | except AttributeError:
120 | # Divergence from upstream: Make ipaddress library optional
121 | if ipaddress is None:
122 | host_ip = None
123 | else:
124 | raise
125 | dnsnames = []
126 | san = cert.get('subjectAltName', ())
127 | for key, value in san:
128 | if key == 'DNS':
129 | if host_ip is None and _dnsname_match(value, hostname):
130 | return
131 | dnsnames.append(value)
132 | elif key == 'IP Address':
133 | if host_ip is not None and _ipaddress_match(value, host_ip):
134 | return
135 | dnsnames.append(value)
136 | if not dnsnames:
137 | # The subject is only checked when there is no dNSName entry
138 | # in subjectAltName
139 | for sub in cert.get('subject', ()):
140 | for key, value in sub:
141 | # XXX according to RFC 2818, the most specific Common Name
142 | # must be used.
143 | if key == 'commonName':
144 | if _dnsname_match(value, hostname):
145 | return
146 | dnsnames.append(value)
147 | if len(dnsnames) > 1:
148 | raise CertificateError("hostname %r "
149 | "doesn't match either of %s"
150 | % (hostname, ', '.join(map(repr, dnsnames))))
151 | elif len(dnsnames) == 1:
152 | raise CertificateError("hostname %r "
153 | "doesn't match %r"
154 | % (hostname, dnsnames[0]))
155 | else:
156 | raise CertificateError("no appropriate commonName or "
157 | "subjectAltName fields were found")
158 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/util/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | # For backwards compatibility, provide imports that used to be here.
3 | from .connection import is_connection_dropped
4 | from .request import make_headers
5 | from .response import is_fp_closed
6 | from .ssl_ import (
7 | SSLContext,
8 | HAS_SNI,
9 | IS_PYOPENSSL,
10 | assert_fingerprint,
11 | resolve_cert_reqs,
12 | resolve_ssl_version,
13 | ssl_wrap_socket,
14 | )
15 | from .timeout import (
16 | current_time,
17 | Timeout,
18 | )
19 |
20 | from .retry import Retry
21 | from .url import (
22 | get_host,
23 | parse_url,
24 | split_first,
25 | Url,
26 | )
27 |
28 | __all__ = (
29 | 'HAS_SNI',
30 | 'IS_PYOPENSSL',
31 | 'SSLContext',
32 | 'Retry',
33 | 'Timeout',
34 | 'Url',
35 | 'assert_fingerprint',
36 | 'current_time',
37 | 'is_connection_dropped',
38 | 'is_fp_closed',
39 | 'get_host',
40 | 'parse_url',
41 | 'make_headers',
42 | 'resolve_cert_reqs',
43 | 'resolve_ssl_version',
44 | 'split_first',
45 | 'ssl_wrap_socket',
46 | )
47 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/util/connection.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | import socket
3 | try:
4 | from select import poll, POLLIN
5 | except ImportError: # `poll` doesn't exist on OSX and other platforms
6 | poll = False
7 | try:
8 | from select import select
9 | except ImportError: # `select` doesn't exist on AppEngine.
10 | select = False
11 |
12 |
13 | def is_connection_dropped(conn): # Platform-specific
14 | """
15 | Returns True if the connection is dropped and should be closed.
16 |
17 | :param conn:
18 | :class:`httplib.HTTPConnection` object.
19 |
20 | Note: For platforms like AppEngine, this will always return ``False`` to
21 | let the platform handle connection recycling transparently for us.
22 | """
23 | sock = getattr(conn, 'sock', False)
24 | if sock is False: # Platform-specific: AppEngine
25 | return False
26 | if sock is None: # Connection already closed (such as by httplib).
27 | return True
28 |
29 | if not poll:
30 | if not select: # Platform-specific: AppEngine
31 | return False
32 |
33 | try:
34 | return select([sock], [], [], 0.0)[0]
35 | except socket.error:
36 | return True
37 |
38 | # This version is better on platforms that support it.
39 | p = poll()
40 | p.register(sock, POLLIN)
41 | for (fno, ev) in p.poll(0.0):
42 | if fno == sock.fileno():
43 | # Either data is buffered (bad), or the connection is dropped.
44 | return True
45 |
46 |
47 | # This function is copied from socket.py in the Python 2.7 standard
48 | # library test suite. Added to its signature is only `socket_options`.
49 | # One additional modification is that we avoid binding to IPv6 servers
50 | # discovered in DNS if the system doesn't have IPv6 functionality.
51 | def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
52 | source_address=None, socket_options=None):
53 | """Connect to *address* and return the socket object.
54 |
55 | Convenience function. Connect to *address* (a 2-tuple ``(host,
56 | port)``) and return the socket object. Passing the optional
57 | *timeout* parameter will set the timeout on the socket instance
58 | before attempting to connect. If no *timeout* is supplied, the
59 | global default timeout setting returned by :func:`getdefaulttimeout`
60 | is used. If *source_address* is set it must be a tuple of (host, port)
61 | for the socket to bind as a source address before making the connection.
62 | An host of '' or port 0 tells the OS to use the default.
63 | """
64 |
65 | host, port = address
66 | if host.startswith('['):
67 | host = host.strip('[]')
68 | err = None
69 |
70 | # Using the value from allowed_gai_family() in the context of getaddrinfo lets
71 | # us select whether to work with IPv4 DNS records, IPv6 records, or both.
72 | # The original create_connection function always returns all records.
73 | family = allowed_gai_family()
74 |
75 | for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
76 | af, socktype, proto, canonname, sa = res
77 | sock = None
78 | try:
79 | sock = socket.socket(af, socktype, proto)
80 |
81 | # If provided, set socket level options before connecting.
82 | _set_socket_options(sock, socket_options)
83 |
84 | if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
85 | sock.settimeout(timeout)
86 | if source_address:
87 | sock.bind(source_address)
88 | sock.connect(sa)
89 | return sock
90 |
91 | except socket.error as e:
92 | err = e
93 | if sock is not None:
94 | sock.close()
95 | sock = None
96 |
97 | if err is not None:
98 | raise err
99 |
100 | raise socket.error("getaddrinfo returns an empty list")
101 |
102 |
103 | def _set_socket_options(sock, options):
104 | if options is None:
105 | return
106 |
107 | for opt in options:
108 | sock.setsockopt(*opt)
109 |
110 |
111 | def allowed_gai_family():
112 | """This function is designed to work in the context of
113 | getaddrinfo, where family=socket.AF_UNSPEC is the default and
114 | will perform a DNS search for both IPv6 and IPv4 records."""
115 |
116 | family = socket.AF_INET
117 | if HAS_IPV6:
118 | family = socket.AF_UNSPEC
119 | return family
120 |
121 |
122 | def _has_ipv6(host):
123 | """ Returns True if the system can bind an IPv6 address. """
124 | sock = None
125 | has_ipv6 = False
126 |
127 | if socket.has_ipv6:
128 | # has_ipv6 returns true if cPython was compiled with IPv6 support.
129 | # It does not tell us if the system has IPv6 support enabled. To
130 | # determine that we must bind to an IPv6 address.
131 | # https://github.com/shazow/urllib3/pull/611
132 | # https://bugs.python.org/issue658327
133 | try:
134 | sock = socket.socket(socket.AF_INET6)
135 | sock.bind((host, 0))
136 | has_ipv6 = True
137 | except Exception:
138 | pass
139 |
140 | if sock:
141 | sock.close()
142 | return has_ipv6
143 |
144 |
145 | HAS_IPV6 = _has_ipv6('::1')
146 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/util/request.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | from base64 import b64encode
3 |
4 | from ..packages.six import b
5 |
6 | ACCEPT_ENCODING = 'gzip,deflate'
7 |
8 |
9 | def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
10 | basic_auth=None, proxy_basic_auth=None, disable_cache=None):
11 | """
12 | Shortcuts for generating request headers.
13 |
14 | :param keep_alive:
15 | If ``True``, adds 'connection: keep-alive' header.
16 |
17 | :param accept_encoding:
18 | Can be a boolean, list, or string.
19 | ``True`` translates to 'gzip,deflate'.
20 | List will get joined by comma.
21 | String will be used as provided.
22 |
23 | :param user_agent:
24 | String representing the user-agent you want, such as
25 | "python-urllib3/0.6"
26 |
27 | :param basic_auth:
28 | Colon-separated username:password string for 'authorization: basic ...'
29 | auth header.
30 |
31 | :param proxy_basic_auth:
32 | Colon-separated username:password string for 'proxy-authorization: basic ...'
33 | auth header.
34 |
35 | :param disable_cache:
36 | If ``True``, adds 'cache-control: no-cache' header.
37 |
38 | Example::
39 |
40 | >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
41 | {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
42 | >>> make_headers(accept_encoding=True)
43 | {'accept-encoding': 'gzip,deflate'}
44 | """
45 | headers = {}
46 | if accept_encoding:
47 | if isinstance(accept_encoding, str):
48 | pass
49 | elif isinstance(accept_encoding, list):
50 | accept_encoding = ','.join(accept_encoding)
51 | else:
52 | accept_encoding = ACCEPT_ENCODING
53 | headers['accept-encoding'] = accept_encoding
54 |
55 | if user_agent:
56 | headers['user-agent'] = user_agent
57 |
58 | if keep_alive:
59 | headers['connection'] = 'keep-alive'
60 |
61 | if basic_auth:
62 | headers['authorization'] = 'Basic ' + \
63 | b64encode(b(basic_auth)).decode('utf-8')
64 |
65 | if proxy_basic_auth:
66 | headers['proxy-authorization'] = 'Basic ' + \
67 | b64encode(b(proxy_basic_auth)).decode('utf-8')
68 |
69 | if disable_cache:
70 | headers['cache-control'] = 'no-cache'
71 |
72 | return headers
73 |
--------------------------------------------------------------------------------
/site-packages/requests/packages/urllib3/util/response.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | from ..packages.six.moves import http_client as httplib
3 |
4 | from ..exceptions import HeaderParsingError
5 |
6 |
7 | def is_fp_closed(obj):
8 | """
9 | Checks whether a given file-like object is closed.
10 |
11 | :param obj:
12 | The file-like object to check.
13 | """
14 |
15 | try:
16 | # Check `isclosed()` first, in case Python3 doesn't set `closed`.
17 | # GH Issue #928
18 | return obj.isclosed()
19 | except AttributeError:
20 | pass
21 |
22 | try:
23 | # Check via the official file-like-object way.
24 | return obj.closed
25 | except AttributeError:
26 | pass
27 |
28 | try:
29 | # Check if the object is a container for another file-like object that
30 | # gets released on exhaustion (e.g. HTTPResponse).
31 | return obj.fp is None
32 | except AttributeError:
33 | pass
34 |
35 | raise ValueError("Unable to determine whether fp is closed.")
36 |
37 |
38 | def assert_header_parsing(headers):
39 | """
40 | Asserts whether all headers have been successfully parsed.
41 | Extracts encountered errors from the result of parsing headers.
42 |
43 | Only works on Python 3.
44 |
45 | :param headers: Headers to verify.
46 | :type headers: `httplib.HTTPMessage`.
47 |
48 | :raises urllib3.exceptions.HeaderParsingError:
49 | If parsing errors are found.
50 | """
51 |
52 | # This will fail silently if we pass in the wrong kind of parameter.
53 | # To make debugging easier add an explicit check.
54 | if not isinstance(headers, httplib.HTTPMessage):
55 | raise TypeError('expected httplib.Message, got {0}.'.format(
56 | type(headers)))
57 |
58 | defects = getattr(headers, 'defects', None)
59 | get_payload = getattr(headers, 'get_payload', None)
60 |
61 | unparsed_data = None
62 | if get_payload: # Platform-specific: Python 3.
63 | unparsed_data = get_payload()
64 |
65 | if defects or unparsed_data:
66 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
67 |
68 |
69 | def is_response_to_head(response):
70 | """
71 | Checks whether the request of a response has been a HEAD-request.
72 | Handles the quirks of AppEngine.
73 |
74 | :param conn:
75 | :type conn: :class:`httplib.HTTPResponse`
76 | """
77 | # FIXME: Can we do this somehow without accessing private httplib _method?
78 | method = response._method
79 | if isinstance(method, int): # Platform-specific: Appengine
80 | return method == 3
81 | return method.upper() == 'HEAD'
82 |
--------------------------------------------------------------------------------
/site-packages/requests/status_codes.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from .structures import LookupDict
4 |
5 | _codes = {
6 |
7 | # Informational.
8 | 100: ('continue',),
9 | 101: ('switching_protocols',),
10 | 102: ('processing',),
11 | 103: ('checkpoint',),
12 | 122: ('uri_too_long', 'request_uri_too_long'),
13 | 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
14 | 201: ('created',),
15 | 202: ('accepted',),
16 | 203: ('non_authoritative_info', 'non_authoritative_information'),
17 | 204: ('no_content',),
18 | 205: ('reset_content', 'reset'),
19 | 206: ('partial_content', 'partial'),
20 | 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
21 | 208: ('already_reported',),
22 | 226: ('im_used',),
23 |
24 | # Redirection.
25 | 300: ('multiple_choices',),
26 | 301: ('moved_permanently', 'moved', '\\o-'),
27 | 302: ('found',),
28 | 303: ('see_other', 'other'),
29 | 304: ('not_modified',),
30 | 305: ('use_proxy',),
31 | 306: ('switch_proxy',),
32 | 307: ('temporary_redirect', 'temporary_moved', 'temporary'),
33 | 308: ('permanent_redirect',
34 | 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0
35 |
36 | # Client Error.
37 | 400: ('bad_request', 'bad'),
38 | 401: ('unauthorized',),
39 | 402: ('payment_required', 'payment'),
40 | 403: ('forbidden',),
41 | 404: ('not_found', '-o-'),
42 | 405: ('method_not_allowed', 'not_allowed'),
43 | 406: ('not_acceptable',),
44 | 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
45 | 408: ('request_timeout', 'timeout'),
46 | 409: ('conflict',),
47 | 410: ('gone',),
48 | 411: ('length_required',),
49 | 412: ('precondition_failed', 'precondition'),
50 | 413: ('request_entity_too_large',),
51 | 414: ('request_uri_too_large',),
52 | 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
53 | 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
54 | 417: ('expectation_failed',),
55 | 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
56 | 421: ('misdirected_request',),
57 | 422: ('unprocessable_entity', 'unprocessable'),
58 | 423: ('locked',),
59 | 424: ('failed_dependency', 'dependency'),
60 | 425: ('unordered_collection', 'unordered'),
61 | 426: ('upgrade_required', 'upgrade'),
62 | 428: ('precondition_required', 'precondition'),
63 | 429: ('too_many_requests', 'too_many'),
64 | 431: ('header_fields_too_large', 'fields_too_large'),
65 | 444: ('no_response', 'none'),
66 | 449: ('retry_with', 'retry'),
67 | 450: ('blocked_by_windows_parental_controls', 'parental_controls'),
68 | 451: ('unavailable_for_legal_reasons', 'legal_reasons'),
69 | 499: ('client_closed_request',),
70 |
71 | # Server Error.
72 | 500: ('internal_server_error', 'server_error', '/o\\', '✗'),
73 | 501: ('not_implemented',),
74 | 502: ('bad_gateway',),
75 | 503: ('service_unavailable', 'unavailable'),
76 | 504: ('gateway_timeout',),
77 | 505: ('http_version_not_supported', 'http_version'),
78 | 506: ('variant_also_negotiates',),
79 | 507: ('insufficient_storage',),
80 | 509: ('bandwidth_limit_exceeded', 'bandwidth'),
81 | 510: ('not_extended',),
82 | 511: ('network_authentication_required', 'network_auth', 'network_authentication'),
83 | }
84 |
85 | codes = LookupDict(name='status_codes')
86 |
87 | for code, titles in _codes.items():
88 | for title in titles:
89 | setattr(codes, title, code)
90 | if not title.startswith('\\'):
91 | setattr(codes, title.upper(), code)
92 |
--------------------------------------------------------------------------------
/site-packages/requests/structures.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.structures
5 | ~~~~~~~~~~~~~~~~~~~
6 |
7 | Data structures that power Requests.
8 | """
9 |
10 | import collections
11 |
12 | from .compat import OrderedDict
13 |
14 |
15 | class CaseInsensitiveDict(collections.MutableMapping):
16 | """A case-insensitive ``dict``-like object.
17 |
18 | Implements all methods and operations of
19 | ``collections.MutableMapping`` as well as dict's ``copy``. Also
20 | provides ``lower_items``.
21 |
22 | All keys are expected to be strings. The structure remembers the
23 | case of the last key to be set, and ``iter(instance)``,
24 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
25 | will contain case-sensitive keys. However, querying and contains
26 | testing is case insensitive::
27 |
28 | cid = CaseInsensitiveDict()
29 | cid['Accept'] = 'application/json'
30 | cid['aCCEPT'] == 'application/json' # True
31 | list(cid) == ['Accept'] # True
32 |
33 | For example, ``headers['content-encoding']`` will return the
34 | value of a ``'Content-Encoding'`` response header, regardless
35 | of how the header name was originally stored.
36 |
37 | If the constructor, ``.update``, or equality comparison
38 | operations are given keys that have equal ``.lower()``s, the
39 | behavior is undefined.
40 | """
41 |
42 | def __init__(self, data=None, **kwargs):
43 | self._store = OrderedDict()
44 | if data is None:
45 | data = {}
46 | self.update(data, **kwargs)
47 |
48 | def __setitem__(self, key, value):
49 | # Use the lowercased key for lookups, but store the actual
50 | # key alongside the value.
51 | self._store[key.lower()] = (key, value)
52 |
53 | def __getitem__(self, key):
54 | return self._store[key.lower()][1]
55 |
56 | def __delitem__(self, key):
57 | del self._store[key.lower()]
58 |
59 | def __iter__(self):
60 | return (casedkey for casedkey, mappedvalue in self._store.values())
61 |
62 | def __len__(self):
63 | return len(self._store)
64 |
65 | def lower_items(self):
66 | """Like iteritems(), but with all lowercase keys."""
67 | return (
68 | (lowerkey, keyval[1])
69 | for (lowerkey, keyval)
70 | in self._store.items()
71 | )
72 |
73 | def __eq__(self, other):
74 | if isinstance(other, collections.Mapping):
75 | other = CaseInsensitiveDict(other)
76 | else:
77 | return NotImplemented
78 | # Compare insensitively
79 | return dict(self.lower_items()) == dict(other.lower_items())
80 |
81 | # Copy is required
82 | def copy(self):
83 | return CaseInsensitiveDict(self._store.values())
84 |
85 | def __repr__(self):
86 | return str(dict(self.items()))
87 |
88 |
89 | class LookupDict(dict):
90 | """Dictionary lookup object."""
91 |
92 | def __init__(self, name=None):
93 | self.name = name
94 | super(LookupDict, self).__init__()
95 |
96 | def __repr__(self):
97 | return '' % (self.name)
98 |
99 | def __getitem__(self, key):
100 | # We allow fall-through here, so values default to None
101 |
102 | return self.__dict__.get(key, None)
103 |
104 | def get(self, key, default=None):
105 | return self.__dict__.get(key, default)
106 |
--------------------------------------------------------------------------------
/site-packages/six.egg-info/PKG-INFO:
--------------------------------------------------------------------------------
1 | Metadata-Version: 1.1
2 | Name: six
3 | Version: 1.10.0
4 | Summary: Python 2 and 3 compatibility utilities
5 | Home-page: http://pypi.python.org/pypi/six/
6 | Author: Benjamin Peterson
7 | Author-email: benjamin@python.org
8 | License: MIT
9 | Description: Six is a Python 2 and 3 compatibility library. It provides utility functions
10 | for smoothing over the differences between the Python versions with the goal of
11 | writing Python code that is compatible on both Python versions. See the
12 | documentation for more information on what is provided.
13 |
14 | Six supports every Python version since 2.6. It is contained in only one Python
15 | file, so it can be easily copied into your project. (The copyright and license
16 | notice must be retained.)
17 |
18 | Online documentation is at https://pythonhosted.org/six/.
19 |
20 | Bugs can be reported to https://bitbucket.org/gutworth/six. The code can also
21 | be found there.
22 |
23 | For questions about six or porting in general, email the python-porting mailing
24 | list: https://mail.python.org/mailman/listinfo/python-porting
25 |
26 | Platform: UNKNOWN
27 | Classifier: Programming Language :: Python :: 2
28 | Classifier: Programming Language :: Python :: 3
29 | Classifier: Intended Audience :: Developers
30 | Classifier: License :: OSI Approved :: MIT License
31 | Classifier: Topic :: Software Development :: Libraries
32 | Classifier: Topic :: Utilities
33 |
--------------------------------------------------------------------------------
/site-packages/six.egg-info/SOURCES.txt:
--------------------------------------------------------------------------------
1 | CHANGES
2 | LICENSE
3 | MANIFEST.in
4 | README
5 | setup.cfg
6 | setup.py
7 | six.py
8 | test_six.py
9 | documentation/Makefile
10 | documentation/conf.py
11 | documentation/index.rst
12 | six.egg-info/PKG-INFO
13 | six.egg-info/SOURCES.txt
14 | six.egg-info/dependency_links.txt
15 | six.egg-info/top_level.txt
--------------------------------------------------------------------------------
/site-packages/six.egg-info/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/site-packages/six.egg-info/top_level.txt:
--------------------------------------------------------------------------------
1 | six
2 |
--------------------------------------------------------------------------------
/src/conf/default.ini:
--------------------------------------------------------------------------------
1 | #
2 | # @ Dshield for Python
3 | ##############################################################################
4 | # Author: YWJT / Sunshine Koo #
5 | # Modify: 2016-12-08 #
6 | ##############################################################################
7 | # This program is distributed under the "Artistic License" Agreement #
8 | # The LICENSE file is located in the same directory as this program. Please #
9 | # read the LICENSE file before you make copies or distribute this program #
10 | ##############################################################################
11 | #
12 |
13 | [system]
14 | log_file = "running.log"
15 |
16 | [main]
17 | ##white list
18 | # whitelisted_ips ddos white list
19 | # whitel_ttl_ips ttl white list
20 | # support CIRD format
21 | whitelisted_ips = "10.10.10.0/24,172.16.0.0/16"
22 | whitel_ttl_ips = "10.10.10.0/24,172.16.0.0/16"
23 |
24 | ##monitor interface
25 | mont_interface = "eth0"
26 |
27 | ##monitor port
28 | mont_port = "80,22"
29 |
30 | ##listen mode
31 | # false: means active defense
32 | # true: means only record IP and ttl but not block
33 | mont_listen = false
34 |
35 | ##monitor interval
36 | # specified in seconds
37 | rexec_time = 5
38 |
39 | ##block connections
40 | # this parameter can assign the sensitivity of monitoring
41 | # 100 is recommanded
42 | no_of_connections = 100
43 |
44 | ###ip block time
45 | #support 1d/1h/1m format
46 | block_period_ip = "1m"
47 |
48 |
49 | [ttl]
50 | ##monitor protocol
51 | #it is available for TTL monitor module, tcp-tcp only, udp-udp only, ''-all protocols are monitored
52 | mont_protocol = "tcp"
53 |
54 |
55 | ##block connections
56 | # this parameter can assign the sensitivity of monitoring
57 | # 20000~100000 is recommanded
58 | no_ttl_connections = 20000
59 |
60 | ##ttl unblock time
61 | # surpport 1d/1h/1m format
62 | block_period_ttl = "1m"
63 |
64 | [alert]
65 | # smtp_type used ssl/no
66 | smtp_user = ""
67 | smtp_passwd = ""
68 | smtp_server = ""
69 | smtp_type = "no"
70 | admin_email = "admin@dshield.net"
71 |
72 | ##InfluxDB configuration
73 | [meta]
74 | dir = "data/meta"
75 |
76 | [data]
77 | dir = "data/data"
78 | wal-dir = "data/wal"
79 | cache-max-memory-size = 1048576000
80 |
81 | [coordinator]
82 | write-timeout = "10s"
83 | query-timeout = "5s"
84 |
85 | [admin]
86 | # web UI
87 | enabled = false
88 | bind-address = ":3083"
89 |
90 | [http]
91 | # Restful api
92 | enabled = true
93 | bind-address = ":3086"
94 |
--------------------------------------------------------------------------------
/src/data/data/_internal/monitor/1/1:
--------------------------------------------------------------------------------
1 | 1
2 |
--------------------------------------------------------------------------------
/src/data/meta/meta.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ywjt/Dshield/ef5b58d99bc479b92a97404b49d057d503e1a368/src/data/meta/meta.db
--------------------------------------------------------------------------------
/src/data/wal/_internal/monitor/1/_00001.wal:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ywjt/Dshield/ef5b58d99bc479b92a97404b49d057d503e1a368/src/data/wal/_internal/monitor/1/_00001.wal
--------------------------------------------------------------------------------
/src/lib/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 | """
4 | * @ Dshield for Python
5 | ##############################################################################
6 | # Author: YWJT / Sunshine Koo #
7 | # Modify: 2016-12-08 #
8 | ##############################################################################
9 | # This program is distributed under the "Artistic License" Agreement #
10 | # The LICENSE file is located in the same directory as this program. Please #
11 | # read the LICENSE file before you make copies or distribute this program #
12 | ##############################################################################
13 | """
14 |
15 | import sys, os
16 | reload(sys)
17 | sys.path.append("..")
18 | import commands
19 | import datetime, time
20 | from time import sleep
21 | from lib.loadConf import LoadConfig
22 | import functools
23 | import threading
24 |
25 | def async(func):
26 | @functools.wraps(func)
27 | def wrapper(*args, **kwargs):
28 | my_thread = threading.Thread(target=func, args=args, kwargs=kwargs)
29 | my_thread.start()
30 | return wrapper
31 |
32 |
33 | class Dshield(object):
34 | avr = {}
35 | avr['version'] = '4.0.0' #当前版本
36 | avr['logFile'] = 'running.log' #日志文件
37 | avr['maximumAllowedConnectionsPerIP'] = 200 #最大ip连接数
38 | avr['maximumAllowedConnectionsPerTTL'] = 100000 #最大ttl连接数
39 | avr['adminEmail'] = '' #管理员邮箱
40 | avr['whitelistIPs'] = {} #白名单
41 | avr['blockTime'] = '1h' #封锁时间
42 | avr['blockedIPs'] = {} #黑名单
43 | avr['exectime'] = 1 #检测间隔
44 | avr['montport'] = '*' #监控端口
45 | avr['montInterface'] = '' #网络接口
46 | avr['montlisten'] = False #监听模式
47 | avr['montProtocol'] = '' #监听协议
48 | avr['sender'] = '' #发送者
49 | avr['receiver'] = [] #收接人列表
50 | avr['smtpserver'] = '' #SMTP服务器
51 | avr['username'] = '' #用户名
52 | avr['password'] = '' #验证密码
53 | avr['type'] = '' #发信模式,默认不使用ssl
54 | keepRunning = True
55 |
56 |
57 | def __init__(self):
58 | self.avr['logFile'] = LoadConfig().getSectionValue('system', 'log_file')
59 | self.avr['maximumAllowedConnectionsPerIP'] = int(LoadConfig().getSectionValue('main','no_of_connections'))
60 | self.avr['maximumAllowedConnectionsPerTTL'] = int(LoadConfig().getSectionValue('ttl', 'no_ttl_connections'))
61 | self.avr['adminEmail'] = LoadConfig().getSectionValue('alert','admin_email')
62 | self.avr['whitelistIPs'] = LoadConfig().getSectionValue('main','whitelisted_ips')
63 | self.avr['blockTimeIP'] = LoadConfig().getSectionValue('main','block_period_ip')
64 | self.avr['blockTimeTTL'] = LoadConfig().getSectionValue('ttl','block_period_ttl')
65 | self.avr['exectime'] = int(LoadConfig().getSectionValue('main','rexec_time'))
66 | self.avr['montport'] = LoadConfig().getSectionValue('main','mont_port')
67 | self.avr['monlisten'] = LoadConfig().getSectionValue('main','mont_listen')
68 | self.avr['montInterface'] = LoadConfig().getSectionValue('main','mont_interface')
69 | self.avr['montProtocol'] = LoadConfig().getSectionValue('ttl', 'mont_protocol')
70 | self.avr['receiver'] = LoadConfig().getSectionValue('alert','admin_email')
71 | self.avr['smtpserver']= LoadConfig().getSectionValue('alert','smtp_server')
72 | self.avr['username'] = LoadConfig().getSectionValue('alert','smtp_user')
73 | self.avr['password'] = LoadConfig().getSectionValue('alert','smtp_passwd')
74 | self.avr['type'] = LoadConfig().getSectionValue('alert','smtp_type')
75 |
76 |
--------------------------------------------------------------------------------
/src/lib/dLog.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 | """
4 | * @ Dshield for Python
5 | ##############################################################################
6 | # Author: YWJT / Sunshine Koo #
7 | # Modify: 2016-12-08 #
8 | ##############################################################################
9 | # This program is distributed under the "Artistic License" Agreement #
10 | # The LICENSE file is located in the same directory as this program. Please #
11 | # read the LICENSE file before you make copies or distribute this program #
12 | ##############################################################################
13 | """
14 |
15 | import sys, os, time, datetime
16 | sys.path.append("..")
17 | sys.setdefaultencoding("utf-8")
18 | from lib import Dshield
19 |
20 | PROC_DIR = os.path.abspath('..')
21 | LOGS_DIR = PROC_DIR + '/logs/'
22 |
23 | '''
24 | #==================================
25 | # 日志记录类型
26 | #==================================
27 | # [LOCK] 封锁
28 | # [UNLOCK] 解封
29 | # [ERROR] 错误消息
30 | # [RECORD] 记录,不封锁
31 | # [REBL] 重载封锁列表
32 | # [MAIL] 发送邮件
33 | #==================================
34 | '''
35 | def save_log(type, data):
36 | logdir = LOGS_DIR + time.strftime('%Y_%m', time.localtime()) + '/'
37 |
38 | if not os.path.exists(logdir):
39 | os.system('mkdir -p ' + logdir)
40 | os.chmod(logdir, 777)
41 |
42 | log_file = logdir + time.strftime('%Y_%m_%d', time.localtime()) + '_' + Dshield().avr['logFile']
43 | f = open(log_file, 'a')
44 | f.write('['+type+'] ' + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + ' ' + str(data) + '\n')
45 | f.close()
46 |
--------------------------------------------------------------------------------
/src/lib/dMail.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 | """
4 | * @ Dshield for Python
5 | ##############################################################################
6 | # Author: YWJT / Sunshine Koo #
7 | # Modify: 2016-12-08 #
8 | ##############################################################################
9 | # This program is distributed under the "Artistic License" Agreement #
10 | # The LICENSE file is located in the same directory as this program. Please #
11 | # read the LICENSE file before you make copies or distribute this program #
12 | ##############################################################################
13 | """
14 |
15 | import sys
16 | sys.path.append("..")
17 | import smtplib
18 | from email.header import Header
19 | from email.mime.text import MIMEText
20 | from lib.dLog import save_log
21 | from lib import Dshield
22 |
23 |
24 | class PyEmail(Dshield):
25 | sender = '' #发送者
26 |
27 | """
28 | @name: 构造函数
29 | @desc: 读取配置文件,初始化变量
30 | """
31 | def __init__(self):
32 | Dshield.__init__(self)
33 |
34 | if not self.avr['username'].find('@'):
35 | self.sender = self.avr['smtpserver'].replace(self.avr['smtpserver'].split('.')[0]+'.',self.avr['username']+'@')
36 | else:
37 | self.sender = self.avr['username']
38 |
39 |
40 | """
41 | @name: 普通发信模式
42 | @desc: 不需要SSl认证
43 | """
44 | def nonsend(self,subject,msg):
45 | self.__init__()
46 | msg = MIMEText(msg,'plain','utf-8') #中文需参数‘utf-8’,单字节字符不需要
47 | msg['Subject'] = subject
48 | smtp = smtplib.SMTP()
49 | smtp.connect(self.avr['smtpserver'])
50 | smtp.login(self.avr['username'], self.avr['password'])
51 | smtp.sendmail(self.sender, self.avr['receiver'], msg.as_string())
52 | smtp.quit()
53 |
54 | """
55 | @name: SSL发信模式
56 | @desc: 支持google邮箱
57 | """
58 | def sslsend(self,subject,msg):
59 | self.__init__()
60 | msg = MIMEText(msg,'plain','utf-8') #中文需参数‘utf-8’,单字节字符不需要
61 | msg['Subject'] = Header(subject, 'utf-8')
62 | smtp = smtplib.SMTP()
63 | smtp.connect(self.avr['smtpserver'])
64 | smtp.ehlo()
65 | smtp.starttls()
66 | smtp.ehlo()
67 | smtp.set_debuglevel(1)
68 | smtp.login(self.avr['username'], self.avr['password'])
69 | smtp.sendmail(self.sender, self.avr['receiver'], msg.as_string())
70 | smtp.quit()
71 |
72 | """
73 | @name: 发送邮件
74 | """
75 | def sendto(self,subject,msg):
76 | if str(self.avr['type']) == 'ssl':
77 | try:
78 | self.sslsend(subject,msg)
79 | save_log('MAIL','Send mail Success.')
80 | except Exception, e:
81 | save_log('MAIL','Send mail failed to: %s' % e)
82 | else:
83 | try:
84 | self.nonsend(subject,msg)
85 | save_log('MAIL','Send mail Success.')
86 | except Exception, e:
87 | save_log('MAIL','Send mail failed to: %s' % e)
88 |
89 |
--------------------------------------------------------------------------------
/src/lib/dStat.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 | """
4 | * @ Dshield for Python
5 | ##############################################################################
6 | # Author: YWJT / Sunshine Koo #
7 | # Modify: 2016-12-08 #
8 | ##############################################################################
9 | # This program is distributed under the "Artistic License" Agreement #
10 | # The LICENSE file is located in the same directory as this program. Please #
11 | # read the LICENSE file before you make copies or distribute this program #
12 | ##############################################################################
13 | """
14 |
15 | import sys
16 | sys.path.append("..")
17 | import os, re, time
18 | from time import sleep
19 | from lib import Dshield
20 |
21 |
22 | class Dstat(Dshield):
23 |
24 | def __init__(self):
25 | Dshield.__init__(self)
26 |
27 | def _read(self):
28 | fd = open("/proc/net/dev", "r")
29 | for line in fd.readlines():
30 | if line.find(self.avr['montInterface']) > 0:
31 | field = line.split(":")
32 | recv = field[1].split()[0]
33 | send = field[1].split()[8]
34 | continue
35 | fd.close()
36 | return (float(recv), float(send))
37 |
38 | def net(self):
39 | net = {}
40 | (recv, send) = self._read()
41 | while True:
42 | time.sleep(1)
43 | (new_recv, new_send) = self._read()
44 | net['recv'] = "%.3f" %((new_recv - recv)/1024/1024)
45 | net['send'] = "%.3f" %((new_send - send)/1024/1024)
46 | return net
47 |
48 | def loadavg(self):
49 | loadavg = {}
50 | f = open("/proc/loadavg","r")
51 | con = f.read().split()
52 | f.close()
53 | loadavg['1m'] = con[0]
54 | loadavg['5m'] = con[1]
55 | loadavg['15m'] = con[2]
56 | return loadavg
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
--------------------------------------------------------------------------------
/src/lib/influxDB.py:
--------------------------------------------------------------------------------
1 |
2 | from influxdb import InfluxDBClient
3 |
4 | class DB_Conn():
5 | client = ''
6 | db = ''
7 |
8 | def __init__(self, db):
9 | try:
10 | self.client = InfluxDBClient('localhost', 3086, 'ddos', 'ddos', db)
11 | except IOError, e:
12 | print "InfluxDB Connected Fail ..."
13 | return
14 |
15 | def select(self, sql):
16 | return self.client.query(sql).get_points()
17 |
18 | def insert(self, json_data):
19 | return self.client.write_points(json_data)
20 |
21 | def delete(self, sql):
22 | return self.client.query(sql)
23 |
--------------------------------------------------------------------------------
/src/lib/loadConf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 |
4 |
5 | import ConfigParser
6 | import sys, os
7 | sys.path.append("..")
8 |
9 | PROC_DIR = os.path.abspath('..')
10 |
11 | class LoadConfig:
12 | cf = ''
13 | filepath = PROC_DIR + "/conf/default.ini"
14 |
15 | def __init__(self):
16 | try:
17 | f = open(self.filepath, 'r')
18 | except IOError, e:
19 | print "\"%s\" Config file not found." % (self.filepath)
20 | sys.exit(1)
21 | f.close()
22 |
23 | self.cf = ConfigParser.ConfigParser()
24 | self.cf.read(self.filepath)
25 |
26 | def getSectionValue(self, section, key):
27 | return self.getFormat(self.cf.get(section, key))
28 |
29 | def getSectionOptions(self, section):
30 | return self.cf.options(section)
31 |
32 | def getSectionItems(self, section):
33 | return self.cf.items(section)
34 |
35 | def getFormat(self, string):
36 | return string.strip("'").strip('"').replace(" ","")
--------------------------------------------------------------------------------
/src/logs/ChangeLog:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/src/sbin/daemon:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 |
4 | """
5 | * @ Dshield for Python
6 | ##############################################################################
7 | # Author: YWJT / Sunshine Koo #
8 | # Modify: 2016-12-08 #
9 | ##############################################################################
10 | # This program is distributed under the "Artistic License" Agreement #
11 | # The LICENSE file is located in the same directory as this program. Please #
12 | # read the LICENSE file before you make copies or distribute this program #
13 | ##############################################################################
14 | """
15 |
16 | import sys
17 | sys.path.append("..")
18 | import os, time, atexit, datetime
19 | from signal import SIGTERM
20 | from lib import Dshield
21 | from lib.dUtil import CC
22 | from lib.dSniff import Sniff
23 | from lib.dLog import save_log
24 |
25 |
26 | PROC_DIR = os.path.abspath('..')
27 | LOGS_DIR = PROC_DIR + '/logs/'
28 | if not PROC_DIR in os.environ['PATH']:
29 | os.environ['PATH'] = os.getenv('PATH') + ':' + PROC_DIR + '/sbin'
30 |
31 | class Daemon:
32 | """
33 | daemon class.
34 | Usage: subclass the Daemon class and override the _run() method
35 | """
36 | def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
37 | self.stdin = stdin
38 | self.stdout = stdout
39 | self.stderr = stderr
40 | self.pidfile = LOGS_DIR + pidfile
41 |
42 | def _daemonize(self):
43 | #脱离父进程
44 | try:
45 | pid = os.fork()
46 | if pid > 0:
47 | sys.exit(0)
48 | except OSError, e:
49 | sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
50 | save_log('DAEM',"Dshield daemon fork #1 failed:"+str(e.strerror)+"\n")
51 | sys.exit(1)
52 | os.setsid()
53 | os.chdir("/")
54 | os.umask(0)
55 |
56 | #第二次fork,禁止进程重新打开控制终端
57 | try:
58 | pid = os.fork()
59 | if pid > 0:
60 | sys.exit(0)
61 | except OSError, e:
62 | sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
63 | save_log('DAEM',"Dshield daemon fork #2 failed:"+str(e.strerror)+"\n")
64 | sys.exit(1)
65 |
66 | sys.stdout.flush()
67 | sys.stderr.flush()
68 | si = file(self.stdin, 'r')
69 | so = file(self.stdout, 'a+')
70 | se = file(self.stderr, 'a+', 0)
71 | os.dup2(si.fileno(), sys.stdin.fileno())
72 | os.dup2(so.fileno(), sys.stdout.fileno())
73 | os.dup2(se.fileno(), sys.stderr.fileno())
74 | atexit.register(self.delpid)
75 | pid = str(os.getpid())
76 | file(self.pidfile,'w+').write("%s\n" % pid)
77 |
78 | def delpid(self):
79 | os.remove(self.pidfile)
80 |
81 | def start(self):
82 | """
83 | Start the daemon
84 | """
85 | try:
86 | pf = file(self.pidfile,'r')
87 | pid = int(pf.read().strip())
88 | pf.close()
89 | except IOError,e:
90 | pid = None
91 |
92 | if pid:
93 | message = "Start error,pidfile %s already exist. Dshield daemon already running?\n"
94 | save_log('DAEM',message+"\n")
95 | sys.stderr.write(message % self.pidfile)
96 | sys.exit(1)
97 |
98 | self._daemonize()
99 | self._run()
100 |
101 | def stop(self):
102 | """
103 | Stop the daemon
104 | """
105 | try:
106 | pf = file(self.pidfile,'r')
107 | pid = int(pf.read().strip())
108 | pf.close()
109 | except IOError:
110 | pid = None
111 |
112 | if not pid:
113 | message = "pidfile %s does not exist. Dshield daemon not running?\n"
114 | sys.stderr.write(message % self.pidfile)
115 | return
116 |
117 | try:
118 | while 1:
119 | os.kill(pid, SIGTERM)
120 | time.sleep(0.1)
121 | except OSError, err:
122 | err = str(err)
123 | if err.find("No such process") > 0:
124 | if os.path.exists(self.pidfile):
125 | os.remove(self.pidfile)
126 | save_log('DAEM',"stop Dshield daemon Success.\n")
127 | else:
128 | save_log('DAEM',"stop error,"+str(err)+"\n")
129 | sys.exit(1)
130 |
131 | def restart(self):
132 | self.stop()
133 | self.start()
134 |
135 |
136 | class d(Daemon):
137 | def _run(self):
138 | save_log('-->','Dshield CC daemon started with pid %d' % os.getpid())
139 | save_log('-->','Dshield CC daemon started with %s' % datetime.datetime.now().strftime("%m/%d/%Y %H:%M"))
140 | CC().run()
141 |
142 | class s(Daemon):
143 | def _run(self):
144 | save_log('-->','Dshield Sniff daemon started with pid %d' % os.getpid())
145 | save_log('-->','Dshield Sniff daemon started with %s' % datetime.datetime.now().strftime("%m/%d/%Y %H:%M"))
146 | Sniff().run()
147 |
148 | def info():
149 | save_log('-->','Dshield %s For Python' % Dshield().avr['version'])
150 | save_log('-->','Copyright (C) 2016,YWJT.org.')
151 |
152 | def init():
153 | if 'cc' == (sys.argv[1]).lower():
154 | return d("ddos.pid")
155 | elif 'sniff' == (sys.argv[1]).lower():
156 | return s("sniff.pid")
157 | else:
158 | print "Unknow Command!"
159 | print "Usage: %s {cc|sniff} {start|stop|restart}" % sys.argv[0]
160 | sys.exit(1)
161 |
162 | if __name__ == '__main__':
163 | if len(sys.argv) > 2:
164 | if 'START' == (sys.argv[2]).upper():
165 | info()
166 | init().start()
167 | elif 'STOP' == (sys.argv[2]).upper():
168 | init().stop()
169 | elif 'RESTART' == (sys.argv[2]).upper():
170 | init().restart()
171 | else:
172 | print "Unknow Command!"
173 | print "Usage: %s {cc|sniff} {start|stop|restart}" % sys.argv[0]
174 | sys.exit(1)
175 | else:
176 | print "Usage: %s {cc|sniff} {start|stop|restart}" % sys.argv[0]
177 | sys.exit(0)
178 |
--------------------------------------------------------------------------------
/src/sbin/dshield:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # @ Dshield for Python
4 | ##############################################################################
5 | # Author: YWJT / Sunshine Koo #
6 | # Modify: 2016-12-08 #
7 | ##############################################################################
8 | # This program is distributed under the "Artistic License" Agreement #
9 | # The LICENSE file is located in the same directory as this program. Please #
10 | # read the LICENSE file before you make copies or distribute this program #
11 | ##############################################################################
12 | #
13 |
14 |
15 | # Check if user is root
16 | if [ $(id -u) != "0" ]; then
17 | echo "Error: You must be root to run this script, use sudo sh $0"
18 | exit 1
19 | fi
20 |
21 | SHELL_DIR=$(cd "$(dirname "$0")"; pwd)
22 | BASEDIR=$(dirname $SHELL_DIR)
23 | if [ `cat ~/.bash_profile|grep 'ddos'|wc -l` -eq 0 ];then
24 | echo "PATH="$PATH:$SHELL_DIR >> ~/.bash_profile
25 | echo "export PATH" >> ~/.bash_profile
26 | export PATH=$PATH:$SHELL_DIR
27 | fi
28 |
29 | #Env
30 | ENV_BIN="/usr/bin/python"
31 |
32 | cd $SHELL_DIR
33 | function header()
34 | {
35 | echo "Dshield v4.0.0 For Python"
36 | echo "Copyright (C)2016,YWJT.org."
37 | }
38 |
39 | function chk_iptables() {
40 | /sbin/service iptables status 1>/dev/null 2>&1
41 | if [ $? -ne 0 ]; then
42 | /etc/init.d/iptables start
43 | fi
44 | }
45 |
46 | #优化网卡软中断
47 | function option_irq() {
48 | if [ ! -f /var/log/irq_tmp ]
49 | then
50 | if [ ! -z ${MONT_INTERFACE} ]
51 | then
52 | /etc/init.d/irqbalance stop 1>/dev/null 2>&1
53 | IrqID=$(cat /proc/interrupts |grep ${MONT_INTERFACE}|awk -F ':' '{print $1}'|xargs)
54 | Nx=0
55 | for Cid in ${IrqID}
56 | do
57 | Mex=$(echo $((2**${Nx})))
58 | Hex=$(printf "%x" ${Mex})
59 | echo ${Hex} > /proc/irq/${Cid}/smp_affinity
60 | Nx=$((${Nx}+1))
61 | done
62 | echo 1 >> /var/log/irq_tmp
63 | fi
64 | fi
65 | }
66 |
67 | function do_start() {
68 | chk_iptables 1>/dev/null
69 | option_irq 1>/dev/null
70 | if [ "$1" == "all" ]
71 | then
72 | if ./inflctrl start
73 | then
74 | echo -n "Starting Dshield cc sniff daemon ... "
75 | sleep 8
76 | $ENV_BIN daemon cc start
77 | $ENV_BIN daemon sniff start
78 | echo "[ OK ]"
79 | fi
80 | else
81 | echo -n "Starting Dshield $1 daemon ... "
82 | nice -n -4 $ENV_BIN daemon $1 start
83 | echo "[ OK ]"
84 | fi
85 | }
86 |
87 | function do_stop() {
88 | if [ "$1" == "all" ]
89 | then
90 | if ./inflctrl stop
91 | then
92 | echo -n "Stopping Dshield cc sniff daemon ... "
93 | $ENV_BIN daemon cc stop
94 | $ENV_BIN daemon sniff stop
95 | echo "[ OK ]"
96 | fi
97 | else
98 | echo -n "Stopping Dshield $1 daemon ... "
99 | $ENV_BIN daemon $1 stop
100 | echo "[ OK ]"
101 | fi
102 | }
103 |
104 | function do_restart() {
105 | if [ "$1" == "all" ]
106 | then
107 | if ./inflctrl restart
108 | then
109 | echo -n "Restarting Dshield cc sniff daemon ... "
110 | $ENV_BIN daemon cc restart
111 | $ENV_BIN daemon sniff restart
112 | echo "[ OK ]"
113 | fi
114 | else
115 | echo -n "Restarting Dshield $1 daemon ... "
116 | $ENV_BIN daemon $1 restart
117 | echo "[ OK ]"
118 | fi
119 | }
120 |
121 |
122 | if [[ "$1" != "cc" && "$1" != "sniff" && "$1" != "influx" && "$1" != "all" ]]
123 | then
124 | echo "Usage: $0 {influx|cc|sniff|all} {start|stop|restart}"
125 | exit 1
126 | else
127 | proc=$1
128 | fi
129 |
130 | case "$2" in
131 | start)
132 | do_start $proc
133 | ;;
134 | stop)
135 | do_stop $proc
136 | ;;
137 | restart)
138 | do_restart $proc
139 | ;;
140 | *)
141 | echo "Usage: $0 {cc|sniff} {start|stop|restart}"
142 | exit 1
143 | esac
144 |
145 |
--------------------------------------------------------------------------------
/src/sbin/influxd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ywjt/Dshield/ef5b58d99bc479b92a97404b49d057d503e1a368/src/sbin/influxd
--------------------------------------------------------------------------------
/src/test/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | sys.path.append("..")
3 |
--------------------------------------------------------------------------------
/src/test/testasync.py:
--------------------------------------------------------------------------------
1 | import time
2 | import sys
3 | sys.path.append("..")
4 | from lib import async
5 |
6 |
7 | class foo:
8 | @async
9 | def foo(self,x,y):
10 | c = 0
11 | while c < 5:
12 | c = c + 1
13 | print x,y
14 | time.sleep(1)
15 |
16 |
17 | if __name__ == '__main__':
18 | foo().foo(456,789)
19 | foo().foo(123,y=345)
--------------------------------------------------------------------------------
/src/test/testblock.py:
--------------------------------------------------------------------------------
1 |
2 | #!/usr/bin/env python
3 | # encoding=utf-8
4 |
5 | import sys
6 | sys.path.append("..")
7 | from lib.dUtil import CC
8 |
9 | if __name__ == '__main__':
10 | CC().run()
11 |
12 |
13 |
--------------------------------------------------------------------------------
/src/test/testdstat.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 |
4 | import sys
5 | sys.path.append("..")
6 | from lib.dStat import Dstat
7 | from time import sleep
8 |
9 | if __name__ == '__main__':
10 | while True:
11 | print Dstat().net()
12 | #sleep(1)
13 |
--------------------------------------------------------------------------------
/src/test/testinflux.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding=utf-8
3 |
4 | import sys
5 | sys.path.append("..")
6 | from lib.influxUnit import DB_Conn
7 |
8 |
9 | if __name__ == '__main__':
10 |
11 | json_body = [{
12 | "measurement": "current",
13 | "tags": {"foreaddr": "6.6.6.6","locaddr":"1.1.1.1","port":"80","state":"ESTABLISHED"},
14 | "fields":{"connections":800, "value":300}
15 | }]
16 |
17 | print "-------------- test insert -----------------"
18 | if DB_Conn('connect').insert(json_body):
19 | print "Insert True."
20 | else:
21 | print "Insert False."
22 |
23 | print "-------------- test select -----------------"
24 | for items in DB_Conn('connect').select("select * from current"):
25 | print items
26 |
27 | print "-------------- test delete -----------------"
28 | try:
29 | DB_Conn('connect').delete("delete from current where foreaddr = '6.6.6.6'")
30 | except IOError, e:
31 | print "Delete Fail."
32 | pass
33 | else:
34 | for items in DB_Conn('connect').select("select * from current"):
35 | print items
36 |
--------------------------------------------------------------------------------
/src/test/testsniff.py:
--------------------------------------------------------------------------------
1 |
2 | #!/usr/bin/env python
3 | # encoding=utf-8
4 |
5 | import sys
6 | sys.path.append("..")
7 | from lib.dSniff import Sniff
8 |
9 | if __name__ == '__main__':
10 | Sniff().run()
11 |
--------------------------------------------------------------------------------