├── requests ├── __pycache__ │ ├── api.cpython-39.pyc │ ├── auth.cpython-39.pyc │ ├── help.cpython-39.pyc │ ├── certs.cpython-39.pyc │ ├── compat.cpython-39.pyc │ ├── hooks.cpython-39.pyc │ ├── models.cpython-39.pyc │ ├── utils.cpython-39.pyc │ ├── __init__.cpython-39.pyc │ ├── adapters.cpython-39.pyc │ ├── cookies.cpython-39.pyc │ ├── packages.cpython-39.pyc │ ├── sessions.cpython-39.pyc │ ├── __version__.cpython-39.pyc │ ├── exceptions.cpython-39.pyc │ ├── structures.cpython-39.pyc │ ├── status_codes.cpython-39.pyc │ └── _internal_utils.cpython-39.pyc ├── __version__.py ├── certs.py ├── packages.py ├── hooks.py ├── _internal_utils.py ├── compat.py ├── structures.py ├── exceptions.py ├── help.py ├── status_codes.py ├── __init__.py ├── api.py ├── auth.py ├── cookies.py ├── adapters.py ├── sessions.py ├── utils.py └── models.py ├── config.py ├── README.md ├── sendMsg.py └── run.py /requests/__pycache__/api.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/api.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/auth.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/auth.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/help.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/help.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/certs.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/certs.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/compat.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/compat.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/hooks.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/hooks.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/models.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/models.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/utils.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/utils.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/__init__.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/__init__.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/adapters.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/adapters.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/cookies.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/cookies.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/packages.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/packages.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/sessions.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/sessions.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/__version__.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/__version__.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/exceptions.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/exceptions.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/structures.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/structures.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/status_codes.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/status_codes.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__pycache__/_internal_utils.cpython-39.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bmaili/NEU_health_daka/HEAD/requests/__pycache__/_internal_utils.cpython-39.pyc -------------------------------------------------------------------------------- /requests/__version__.py: -------------------------------------------------------------------------------- 1 | # .-. .-. .-. . . .-. .-. .-. .-. 2 | # |( |- |.| | | |- `-. | `-. 3 | # ' ' `-' `-`.`-' `-' `-' ' `-' 4 | 5 | __title__ = 'requests' 6 | __description__ = 'Python HTTP for Humans.' 7 | __url__ = 'https://requests.readthedocs.io' 8 | __version__ = '2.25.1' 9 | __build__ = 0x022501 10 | __author__ = 'Kenneth Reitz' 11 | __author_email__ = 'me@kennethreitz.org' 12 | __license__ = 'Apache 2.0' 13 | __copyright__ = 'Copyright 2020 Kenneth Reitz' 14 | __cake__ = u'\u2728 \U0001f370 \u2728' 15 | -------------------------------------------------------------------------------- /requests/certs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | requests.certs 6 | ~~~~~~~~~~~~~~ 7 | 8 | This module returns the preferred default CA certificate bundle. There is 9 | only one — the one from the certifi package. 10 | 11 | If you are packaging Requests, e.g., for a Linux distribution or a managed 12 | environment, you can change the definition of where() to return a separately 13 | packaged CA bundle. 14 | """ 15 | from certifi import where 16 | 17 | if __name__ == '__main__': 18 | print(where()) 19 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | ### 基础部分 2 | # 填写你的学号 3 | stutendID = '' 4 | 5 | # 填写你的密码 6 | passward = '' 7 | 8 | 9 | ### 通知部分,打卡失败进行邮件通知 10 | # 当相应渠道设置后,将自动推送通知 11 | 12 | # 是否仅当打卡失败时才通知,False则打卡成功失败皆通知 13 | sendMsgOnlyError = True 14 | 15 | ## 邮件通知 16 | # 设置自己接收打卡信息通知的邮箱(XXXX@qq.com) 17 | receiver = '' 18 | 19 | # 发送的邮箱,这里可以设置成与接收邮箱一致(即自己发给自己),或者也可以用其他的 20 | sender = receiver 21 | 22 | # 填写发送邮箱SMTP授权码/密码 23 | mailPass = '' 24 | 25 | # 填写发送邮箱的SMTP服务器地址(smtp.qq.com) 26 | mailHost = '' 27 | 28 | ## ServerChan通知 29 | # Server酱的SCKEY,可以在Server酱的网站注册获取 30 | sckey = '' 31 | -------------------------------------------------------------------------------- /requests/packages.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | # This code exists for backwards compatibility reasons. 4 | # I don't like it either. Just look the other way. :) 5 | 6 | for package in ('urllib3', 'idna', 'chardet'): 7 | locals()[package] = __import__(package) 8 | # This traversal is apparently necessary such that the identities are 9 | # preserved (requests.packages.urllib3.* is urllib3.*) 10 | for mod in list(sys.modules): 11 | if mod == package or mod.startswith(package + '.'): 12 | sys.modules['requests.packages.' + mod] = sys.modules[mod] 13 | 14 | # Kinda cool, though, right? 15 | -------------------------------------------------------------------------------- /requests/hooks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.hooks 5 | ~~~~~~~~~~~~~~ 6 | 7 | This module provides the capabilities for the Requests hooks system. 8 | 9 | Available hooks: 10 | 11 | ``response``: 12 | The response generated from a Request. 13 | """ 14 | HOOKS = ['response'] 15 | 16 | 17 | def default_hooks(): 18 | return {event: [] for event in HOOKS} 19 | 20 | # TODO: response is the only one 21 | 22 | 23 | def dispatch_hook(key, hooks, hook_data, **kwargs): 24 | """Dispatches a hook dictionary on a given piece of data.""" 25 | hooks = hooks or {} 26 | hooks = hooks.get(key) 27 | if hooks: 28 | if hasattr(hooks, '__call__'): 29 | hooks = [hooks] 30 | for hook in hooks: 31 | _hook_data = hook(hook_data, **kwargs) 32 | if _hook_data is not None: 33 | hook_data = _hook_data 34 | return hook_data 35 | -------------------------------------------------------------------------------- /requests/_internal_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests._internal_utils 5 | ~~~~~~~~~~~~~~ 6 | 7 | Provides utility functions that are consumed internally by Requests 8 | which depend on extremely few external helpers (such as compat) 9 | """ 10 | 11 | from .compat import is_py2, builtin_str, str 12 | 13 | 14 | def to_native_string(string, encoding='ascii'): 15 | """Given a string object, regardless of type, returns a representation of 16 | that string in the native string type, encoding and decoding where 17 | necessary. This assumes ASCII unless told otherwise. 18 | """ 19 | if isinstance(string, builtin_str): 20 | out = string 21 | else: 22 | if is_py2: 23 | out = string.encode(encoding) 24 | else: 25 | out = string.decode(encoding) 26 | 27 | return out 28 | 29 | 30 | def unicode_is_ascii(u_string): 31 | """Determine if unicode string only contains ASCII characters. 32 | 33 | :param str u_string: unicode string to check. Must be unicode 34 | and not Python 2 `str`. 35 | :rtype: bool 36 | """ 37 | assert isinstance(u_string, str) 38 | try: 39 | u_string.encode('ascii') 40 | return True 41 | except UnicodeEncodeError: 42 | return False 43 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/QQ图片20220502013357.jpg) 2 | 3 | # 脚本说明 4 | 5 | **python(本地无python环境也ok)脚本进行NEU健康、体温打卡,使用腾讯云云函数定时调用脚本,以防万一,打卡失败时将通知使用者。** 6 | 7 | 8 | 9 | 10 | # 使用流程 11 | 12 | ## 个人信息配置 13 | 14 | 下载所有文件,解压,在 config.py 文件中填写需要打卡的学号与密码 15 | 16 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/image-20220502010356041.png) 17 | ## 通知配置(可选) 18 | 19 | 打卡通知设置,相应渠道设置后,将自动推送通知。 20 | 21 | - ### 邮箱通知 22 | 23 | 开启qq邮箱SMTP服务,得到授权码:[参考博客](https://www.cnblogs.com/Alear/p/11594932.html) 24 | 25 | 在 config.py 文件中配置邮箱信息。之后可直接运行sendEmail.py文件测试发送效果。 26 | 27 | - ### Server酱通知 28 | 29 | 在 config.py 文件中配置Server酱的SCKEY,可以在Server酱的网站注册获取。之后可直接运行sendEmail.py文件测试发送效果。 30 | 31 | ## 腾讯云部署 32 | #### (自2022年6月1日起腾讯云函数服务不再免费,建议换成[华为云函数工作流](https://console.huaweicloud.com/functiongraph/?region=cn-south-1#/serverless/dashboard),使用方式与下面相似) 33 | 34 | 1. 开始部署腾讯云,第一次使用也许你需要实名注册:[~~腾讯云Serverless~~](https://console.cloud.tencent.com/scf/index) [华为云函数工作流](https://console.huaweicloud.com/functiongraph/?region=cn-south-1#/serverless/dashboard) 35 | 36 | 2. 点击函数服务,新建一个云函数: 37 | 38 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/yun1.png) 39 | 40 | 41 | 42 | 3. 自定义函数,注意运行环境是Python3.6: 43 | 44 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/yun2.png) 45 | 46 | 47 | 48 | 4. 将修改后的所有文件压缩,然后如图设置,执行方法格式为:文件名+方法名: 49 | 50 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/yun3.png) 51 | 52 | 53 | 54 | 5. 高级配置:环境配置里的执行超时时间不能太小 55 | 56 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/yun4.png) 57 | 58 | 59 | 60 | 6. 触发器配置:自定义一个定时触发器,cron表达式我是这样写的:11 12 8,13,20 * * * * ,表示每天8点、13点、20点,12分11秒触发函数,即每天三次健康打卡三次体温打卡 61 | 62 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/yun5.png) 63 | 64 | 65 | 66 | 7. 点击”完成“进行创建。点击”部署“可再次部署,点击”测试“可立即测试打卡,点击日志查询可查看近期打卡日志。 67 | 68 | ![](http://bmalimarkdown.oss-cn-beijing.aliyuncs.com/img/image-20220502013928983.png) 69 | 70 | ## 其他 71 | 72 | 通知部分也能用其他邮箱,自动打卡部分也可以挂自己云服务器上或者用GitHub Actions(偶尔抽风),兄弟们按需修改(体温打卡因为腾讯云函数跑出来是格林时间,进行了+8处理,若是部署到自己服务器上需要改回来)。 73 | -------------------------------------------------------------------------------- /requests/compat.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.compat 5 | ~~~~~~~~~~~~~~~ 6 | 7 | This module handles import compatibility issues between Python 2 and 8 | Python 3. 9 | """ 10 | 11 | import chardet 12 | 13 | import sys 14 | 15 | # ------- 16 | # Pythons 17 | # ------- 18 | 19 | # Syntax sugar. 20 | _ver = sys.version_info 21 | 22 | #: Python 2.x? 23 | is_py2 = (_ver[0] == 2) 24 | 25 | #: Python 3.x? 26 | is_py3 = (_ver[0] == 3) 27 | 28 | try: 29 | import simplejson as json 30 | except ImportError: 31 | import json 32 | 33 | # --------- 34 | # Specifics 35 | # --------- 36 | 37 | if is_py2: 38 | from urllib import ( 39 | quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, 40 | proxy_bypass, proxy_bypass_environment, getproxies_environment) 41 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag 42 | from urllib2 import parse_http_list 43 | import cookielib 44 | from Cookie import Morsel 45 | from StringIO import StringIO 46 | # Keep OrderedDict for backwards compatibility. 47 | from collections import Callable, Mapping, MutableMapping, OrderedDict 48 | 49 | 50 | builtin_str = str 51 | bytes = str 52 | str = unicode 53 | basestring = basestring 54 | numeric_types = (int, long, float) 55 | integer_types = (int, long) 56 | 57 | elif is_py3: 58 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag 59 | from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment 60 | from http import cookiejar as cookielib 61 | from http.cookies import Morsel 62 | from io import StringIO 63 | # Keep OrderedDict for backwards compatibility. 64 | from collections import OrderedDict 65 | from collections.abc import Callable, Mapping, MutableMapping 66 | 67 | builtin_str = str 68 | str = str 69 | bytes = bytes 70 | basestring = (str, bytes) 71 | numeric_types = (int, float) 72 | integer_types = (int,) 73 | -------------------------------------------------------------------------------- /sendMsg.py: -------------------------------------------------------------------------------- 1 | #邮件发送依赖 2 | from email.mime.text import MIMEText 3 | from email.header import Header 4 | import smtplib 5 | #Server酱发送依赖 6 | import requests 7 | #环境变量 8 | import config 9 | 10 | class sendEmail(): 11 | 12 | def __init__(self): 13 | self.sender = config.sender 14 | self.receiver = config.receiver 15 | self.mailHost = config.mailHost 16 | self.mailUser = config.sender 17 | self.mailPass = config.mailPass 18 | 19 | def sendCheck(self): 20 | return self.sender=='' or self.receiver=='' or self.mailHost=='' or self.mailUser=='' or self.mailPass=='' 21 | 22 | def sendMessage(self , message): 23 | # 邮件普通文本内容 24 | mailContent = message 25 | message = MIMEText(mailContent, 'plain', 'utf-8') 26 | # 发送人名称 27 | message['From'] = Header('健康打卡通知', 'utf-8') 28 | # 收件人名称 29 | message['To'] = Header(self.receiver, 'utf-8') 30 | # 邮件标题 31 | message['Subject'] = Header('健康打卡通知', 'utf-8') 32 | 33 | try: 34 | smtpObj = smtplib.SMTP_SSL(self.mailHost, 465) 35 | smtpObj.login(self.mailUser, self.mailPass) 36 | smtpObj.sendmail(self.sender, self.receiver, message.as_string()) 37 | except smtplib.SMTPException: 38 | print('Error: 无法发送邮件') 39 | 40 | 41 | 42 | class sendServerChan(): 43 | def __init__(self): 44 | self.sckey=config.sckey 45 | 46 | def sendMessage(self, message): 47 | url = 'https://sctapi.ftqq.com/' + self.sckey + '.send' 48 | data = { 49 | 'title': '健康打卡通知', 50 | 'desp': message 51 | } 52 | requests.post(url, data) 53 | 54 | 55 | class sendMsg(): 56 | def __init__(self): 57 | self.sckey=config.sckey 58 | self.mailReciver=config.receiver 59 | self._sendEmail=sendEmail() 60 | self._sendServerChan=sendServerChan() 61 | 62 | 63 | def sendMessage(self, message): 64 | if(self.sckey!=''): 65 | self._sendServerChan.sendMessage(message) 66 | if(self.mailReciver!=''): 67 | self._sendEmail.sendMessage(message) 68 | 69 | if __name__ == '__main__': 70 | _sendMsg=sendMsg() 71 | _sendMsg.sendMessage('测试') -------------------------------------------------------------------------------- /requests/structures.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.structures 5 | ~~~~~~~~~~~~~~~~~~~ 6 | 7 | Data structures that power Requests. 8 | """ 9 | 10 | from collections import OrderedDict 11 | 12 | from .compat import Mapping, MutableMapping 13 | 14 | 15 | class CaseInsensitiveDict(MutableMapping): 16 | """A case-insensitive ``dict``-like object. 17 | 18 | Implements all methods and operations of 19 | ``MutableMapping`` as well as dict's ``copy``. Also 20 | provides ``lower_items``. 21 | 22 | All keys are expected to be strings. The structure remembers the 23 | case of the last key to be set, and ``iter(instance)``, 24 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` 25 | will contain case-sensitive keys. However, querying and contains 26 | testing is case insensitive:: 27 | 28 | cid = CaseInsensitiveDict() 29 | cid['Accept'] = 'application/json' 30 | cid['aCCEPT'] == 'application/json' # True 31 | list(cid) == ['Accept'] # True 32 | 33 | For example, ``headers['content-encoding']`` will return the 34 | value of a ``'Content-Encoding'`` response header, regardless 35 | of how the header name was originally stored. 36 | 37 | If the constructor, ``.update``, or equality comparison 38 | operations are given keys that have equal ``.lower()``s, the 39 | behavior is undefined. 40 | """ 41 | 42 | def __init__(self, data=None, **kwargs): 43 | self._store = OrderedDict() 44 | if data is None: 45 | data = {} 46 | self.update(data, **kwargs) 47 | 48 | def __setitem__(self, key, value): 49 | # Use the lowercased key for lookups, but store the actual 50 | # key alongside the value. 51 | self._store[key.lower()] = (key, value) 52 | 53 | def __getitem__(self, key): 54 | return self._store[key.lower()][1] 55 | 56 | def __delitem__(self, key): 57 | del self._store[key.lower()] 58 | 59 | def __iter__(self): 60 | return (casedkey for casedkey, mappedvalue in self._store.values()) 61 | 62 | def __len__(self): 63 | return len(self._store) 64 | 65 | def lower_items(self): 66 | """Like iteritems(), but with all lowercase keys.""" 67 | return ( 68 | (lowerkey, keyval[1]) 69 | for (lowerkey, keyval) 70 | in self._store.items() 71 | ) 72 | 73 | def __eq__(self, other): 74 | if isinstance(other, Mapping): 75 | other = CaseInsensitiveDict(other) 76 | else: 77 | return NotImplemented 78 | # Compare insensitively 79 | return dict(self.lower_items()) == dict(other.lower_items()) 80 | 81 | # Copy is required 82 | def copy(self): 83 | return CaseInsensitiveDict(self._store.values()) 84 | 85 | def __repr__(self): 86 | return str(dict(self.items())) 87 | 88 | 89 | class LookupDict(dict): 90 | """Dictionary lookup object.""" 91 | 92 | def __init__(self, name=None): 93 | self.name = name 94 | super(LookupDict, self).__init__() 95 | 96 | def __repr__(self): 97 | return '' % (self.name) 98 | 99 | def __getitem__(self, key): 100 | # We allow fall-through here, so values default to None 101 | 102 | return self.__dict__.get(key, None) 103 | 104 | def get(self, key, default=None): 105 | return self.__dict__.get(key, default) 106 | -------------------------------------------------------------------------------- /requests/exceptions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.exceptions 5 | ~~~~~~~~~~~~~~~~~~~ 6 | 7 | This module contains the set of Requests' exceptions. 8 | """ 9 | from urllib3.exceptions import HTTPError as BaseHTTPError 10 | 11 | 12 | class RequestException(IOError): 13 | """There was an ambiguous exception that occurred while handling your 14 | request. 15 | """ 16 | 17 | def __init__(self, *args, **kwargs): 18 | """Initialize RequestException with `request` and `response` objects.""" 19 | response = kwargs.pop('response', None) 20 | self.response = response 21 | self.request = kwargs.pop('request', None) 22 | if (response is not None and not self.request and 23 | hasattr(response, 'request')): 24 | self.request = self.response.request 25 | super(RequestException, self).__init__(*args, **kwargs) 26 | 27 | 28 | class HTTPError(RequestException): 29 | """An HTTP error occurred.""" 30 | 31 | 32 | class ConnectionError(RequestException): 33 | """A Connection error occurred.""" 34 | 35 | 36 | class ProxyError(ConnectionError): 37 | """A proxy error occurred.""" 38 | 39 | 40 | class SSLError(ConnectionError): 41 | """An SSL error occurred.""" 42 | 43 | 44 | class Timeout(RequestException): 45 | """The request timed out. 46 | 47 | Catching this error will catch both 48 | :exc:`~requests.exceptions.ConnectTimeout` and 49 | :exc:`~requests.exceptions.ReadTimeout` errors. 50 | """ 51 | 52 | 53 | class ConnectTimeout(ConnectionError, Timeout): 54 | """The request timed out while trying to connect to the remote server. 55 | 56 | Requests that produced this error are safe to retry. 57 | """ 58 | 59 | 60 | class ReadTimeout(Timeout): 61 | """The server did not send any data in the allotted amount of time.""" 62 | 63 | 64 | class URLRequired(RequestException): 65 | """A valid URL is required to make a request.""" 66 | 67 | 68 | class TooManyRedirects(RequestException): 69 | """Too many redirects.""" 70 | 71 | 72 | class MissingSchema(RequestException, ValueError): 73 | """The URL schema (e.g. http or https) is missing.""" 74 | 75 | 76 | class InvalidSchema(RequestException, ValueError): 77 | """See defaults.py for valid schemas.""" 78 | 79 | 80 | class InvalidURL(RequestException, ValueError): 81 | """The URL provided was somehow invalid.""" 82 | 83 | 84 | class InvalidHeader(RequestException, ValueError): 85 | """The header value provided was somehow invalid.""" 86 | 87 | 88 | class InvalidProxyURL(InvalidURL): 89 | """The proxy URL provided is invalid.""" 90 | 91 | 92 | class ChunkedEncodingError(RequestException): 93 | """The server declared chunked encoding but sent an invalid chunk.""" 94 | 95 | 96 | class ContentDecodingError(RequestException, BaseHTTPError): 97 | """Failed to decode response content.""" 98 | 99 | 100 | class StreamConsumedError(RequestException, TypeError): 101 | """The content for this response was already consumed.""" 102 | 103 | 104 | class RetryError(RequestException): 105 | """Custom retries logic failed""" 106 | 107 | 108 | class UnrewindableBodyError(RequestException): 109 | """Requests encountered an error when trying to rewind a body.""" 110 | 111 | # Warnings 112 | 113 | 114 | class RequestsWarning(Warning): 115 | """Base warning for Requests.""" 116 | 117 | 118 | class FileModeWarning(RequestsWarning, DeprecationWarning): 119 | """A file was opened in text mode, but Requests determined its binary length.""" 120 | 121 | 122 | class RequestsDependencyWarning(RequestsWarning): 123 | """An imported dependency doesn't match the expected version range.""" 124 | -------------------------------------------------------------------------------- /requests/help.py: -------------------------------------------------------------------------------- 1 | """Module containing bug report helper(s).""" 2 | from __future__ import print_function 3 | 4 | import json 5 | import platform 6 | import sys 7 | import ssl 8 | 9 | import idna 10 | import urllib3 11 | import chardet 12 | 13 | from . import __version__ as requests_version 14 | 15 | try: 16 | from urllib3.contrib import pyopenssl 17 | except ImportError: 18 | pyopenssl = None 19 | OpenSSL = None 20 | cryptography = None 21 | else: 22 | import OpenSSL 23 | import cryptography 24 | 25 | 26 | def _implementation(): 27 | """Return a dict with the Python implementation and version. 28 | 29 | Provide both the name and the version of the Python implementation 30 | currently running. For example, on CPython 2.7.5 it will return 31 | {'name': 'CPython', 'version': '2.7.5'}. 32 | 33 | This function works best on CPython and PyPy: in particular, it probably 34 | doesn't work for Jython or IronPython. Future investigation should be done 35 | to work out the correct shape of the code for those platforms. 36 | """ 37 | implementation = platform.python_implementation() 38 | 39 | if implementation == 'CPython': 40 | implementation_version = platform.python_version() 41 | elif implementation == 'PyPy': 42 | implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, 43 | sys.pypy_version_info.minor, 44 | sys.pypy_version_info.micro) 45 | if sys.pypy_version_info.releaselevel != 'final': 46 | implementation_version = ''.join([ 47 | implementation_version, sys.pypy_version_info.releaselevel 48 | ]) 49 | elif implementation == 'Jython': 50 | implementation_version = platform.python_version() # Complete Guess 51 | elif implementation == 'IronPython': 52 | implementation_version = platform.python_version() # Complete Guess 53 | else: 54 | implementation_version = 'Unknown' 55 | 56 | return {'name': implementation, 'version': implementation_version} 57 | 58 | 59 | def info(): 60 | """Generate information for a bug report.""" 61 | try: 62 | platform_info = { 63 | 'system': platform.system(), 64 | 'release': platform.release(), 65 | } 66 | except IOError: 67 | platform_info = { 68 | 'system': 'Unknown', 69 | 'release': 'Unknown', 70 | } 71 | 72 | implementation_info = _implementation() 73 | urllib3_info = {'version': urllib3.__version__} 74 | chardet_info = {'version': chardet.__version__} 75 | 76 | pyopenssl_info = { 77 | 'version': None, 78 | 'openssl_version': '', 79 | } 80 | if OpenSSL: 81 | pyopenssl_info = { 82 | 'version': OpenSSL.__version__, 83 | 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, 84 | } 85 | cryptography_info = { 86 | 'version': getattr(cryptography, '__version__', ''), 87 | } 88 | idna_info = { 89 | 'version': getattr(idna, '__version__', ''), 90 | } 91 | 92 | system_ssl = ssl.OPENSSL_VERSION_NUMBER 93 | system_ssl_info = { 94 | 'version': '%x' % system_ssl if system_ssl is not None else '' 95 | } 96 | 97 | return { 98 | 'platform': platform_info, 99 | 'implementation': implementation_info, 100 | 'system_ssl': system_ssl_info, 101 | 'using_pyopenssl': pyopenssl is not None, 102 | 'pyOpenSSL': pyopenssl_info, 103 | 'urllib3': urllib3_info, 104 | 'chardet': chardet_info, 105 | 'cryptography': cryptography_info, 106 | 'idna': idna_info, 107 | 'requests': { 108 | 'version': requests_version, 109 | }, 110 | } 111 | 112 | 113 | def main(): 114 | """Pretty-print the bug information as JSON.""" 115 | print(json.dumps(info(), sort_keys=True, indent=2)) 116 | 117 | 118 | if __name__ == '__main__': 119 | main() 120 | -------------------------------------------------------------------------------- /requests/status_codes.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | r""" 4 | The ``codes`` object defines a mapping from common names for HTTP statuses 5 | to their numerical codes, accessible either as attributes or as dictionary 6 | items. 7 | 8 | Example:: 9 | 10 | >>> import requests 11 | >>> requests.codes['temporary_redirect'] 12 | 307 13 | >>> requests.codes.teapot 14 | 418 15 | >>> requests.codes['\o/'] 16 | 200 17 | 18 | Some codes have multiple names, and both upper- and lower-case versions of 19 | the names are allowed. For example, ``codes.ok``, ``codes.OK``, and 20 | ``codes.okay`` all correspond to the HTTP status code 200. 21 | """ 22 | 23 | from .structures import LookupDict 24 | 25 | _codes = { 26 | 27 | # Informational. 28 | 100: ('continue',), 29 | 101: ('switching_protocols',), 30 | 102: ('processing',), 31 | 103: ('checkpoint',), 32 | 122: ('uri_too_long', 'request_uri_too_long'), 33 | 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), 34 | 201: ('created',), 35 | 202: ('accepted',), 36 | 203: ('non_authoritative_info', 'non_authoritative_information'), 37 | 204: ('no_content',), 38 | 205: ('reset_content', 'reset'), 39 | 206: ('partial_content', 'partial'), 40 | 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 41 | 208: ('already_reported',), 42 | 226: ('im_used',), 43 | 44 | # Redirection. 45 | 300: ('multiple_choices',), 46 | 301: ('moved_permanently', 'moved', '\\o-'), 47 | 302: ('found',), 48 | 303: ('see_other', 'other'), 49 | 304: ('not_modified',), 50 | 305: ('use_proxy',), 51 | 306: ('switch_proxy',), 52 | 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 53 | 308: ('permanent_redirect', 54 | 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 55 | 56 | # Client Error. 57 | 400: ('bad_request', 'bad'), 58 | 401: ('unauthorized',), 59 | 402: ('payment_required', 'payment'), 60 | 403: ('forbidden',), 61 | 404: ('not_found', '-o-'), 62 | 405: ('method_not_allowed', 'not_allowed'), 63 | 406: ('not_acceptable',), 64 | 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 65 | 408: ('request_timeout', 'timeout'), 66 | 409: ('conflict',), 67 | 410: ('gone',), 68 | 411: ('length_required',), 69 | 412: ('precondition_failed', 'precondition'), 70 | 413: ('request_entity_too_large',), 71 | 414: ('request_uri_too_large',), 72 | 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 73 | 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 74 | 417: ('expectation_failed',), 75 | 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), 76 | 421: ('misdirected_request',), 77 | 422: ('unprocessable_entity', 'unprocessable'), 78 | 423: ('locked',), 79 | 424: ('failed_dependency', 'dependency'), 80 | 425: ('unordered_collection', 'unordered'), 81 | 426: ('upgrade_required', 'upgrade'), 82 | 428: ('precondition_required', 'precondition'), 83 | 429: ('too_many_requests', 'too_many'), 84 | 431: ('header_fields_too_large', 'fields_too_large'), 85 | 444: ('no_response', 'none'), 86 | 449: ('retry_with', 'retry'), 87 | 450: ('blocked_by_windows_parental_controls', 'parental_controls'), 88 | 451: ('unavailable_for_legal_reasons', 'legal_reasons'), 89 | 499: ('client_closed_request',), 90 | 91 | # Server Error. 92 | 500: ('internal_server_error', 'server_error', '/o\\', '✗'), 93 | 501: ('not_implemented',), 94 | 502: ('bad_gateway',), 95 | 503: ('service_unavailable', 'unavailable'), 96 | 504: ('gateway_timeout',), 97 | 505: ('http_version_not_supported', 'http_version'), 98 | 506: ('variant_also_negotiates',), 99 | 507: ('insufficient_storage',), 100 | 509: ('bandwidth_limit_exceeded', 'bandwidth'), 101 | 510: ('not_extended',), 102 | 511: ('network_authentication_required', 'network_auth', 'network_authentication'), 103 | } 104 | 105 | codes = LookupDict(name='status_codes') 106 | 107 | def _init(): 108 | for code, titles in _codes.items(): 109 | for title in titles: 110 | setattr(codes, title, code) 111 | if not title.startswith(('\\', '/')): 112 | setattr(codes, title.upper(), code) 113 | 114 | def doc(code): 115 | names = ', '.join('``%s``' % n for n in _codes[code]) 116 | return '* %d: %s' % (code, names) 117 | 118 | global __doc__ 119 | __doc__ = (__doc__ + '\n' + 120 | '\n'.join(doc(code) for code in sorted(_codes)) 121 | if __doc__ is not None else None) 122 | 123 | _init() 124 | -------------------------------------------------------------------------------- /requests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # __ 4 | # /__) _ _ _ _ _/ _ 5 | # / ( (- (/ (/ (- _) / _) 6 | # / 7 | 8 | """ 9 | Requests HTTP Library 10 | ~~~~~~~~~~~~~~~~~~~~~ 11 | 12 | Requests is an HTTP library, written in Python, for human beings. 13 | Basic GET usage: 14 | 15 | >>> import requests 16 | >>> r = requests.get('https://www.python.org') 17 | >>> r.status_code 18 | 200 19 | >>> b'Python is a programming language' in r.content 20 | True 21 | 22 | ... or POST: 23 | 24 | >>> payload = dict(key1='value1', key2='value2') 25 | >>> r = requests.post('https://httpbin.org/post', data=payload) 26 | >>> print(r.text) 27 | { 28 | ... 29 | "form": { 30 | "key1": "value1", 31 | "key2": "value2" 32 | }, 33 | ... 34 | } 35 | 36 | The other HTTP methods are supported - see `requests.api`. Full documentation 37 | is at . 38 | 39 | :copyright: (c) 2017 by Kenneth Reitz. 40 | :license: Apache 2.0, see LICENSE for more details. 41 | """ 42 | 43 | import urllib3 44 | import chardet 45 | import warnings 46 | from .exceptions import RequestsDependencyWarning 47 | 48 | 49 | def check_compatibility(urllib3_version, chardet_version): 50 | urllib3_version = urllib3_version.split('.') 51 | assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. 52 | 53 | # Sometimes, urllib3 only reports its version as 16.1. 54 | if len(urllib3_version) == 2: 55 | urllib3_version.append('0') 56 | 57 | # Check urllib3 for compatibility. 58 | major, minor, patch = urllib3_version # noqa: F811 59 | major, minor, patch = int(major), int(minor), int(patch) 60 | # urllib3 >= 1.21.1, <= 1.26 61 | assert major == 1 62 | assert minor >= 21 63 | assert minor <= 26 64 | 65 | # Check chardet for compatibility. 66 | major, minor, patch = chardet_version.split('.')[:3] 67 | major, minor, patch = int(major), int(minor), int(patch) 68 | # chardet >= 3.0.2, < 5.0.0 69 | assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0) 70 | 71 | 72 | def _check_cryptography(cryptography_version): 73 | # cryptography < 1.3.4 74 | try: 75 | cryptography_version = list(map(int, cryptography_version.split('.'))) 76 | except ValueError: 77 | return 78 | 79 | if cryptography_version < [1, 3, 4]: 80 | warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) 81 | warnings.warn(warning, RequestsDependencyWarning) 82 | 83 | # Check imported dependencies for compatibility. 84 | try: 85 | check_compatibility(urllib3.__version__, chardet.__version__) 86 | except (AssertionError, ValueError): 87 | warnings.warn("urllib3 ({}) or chardet ({}) doesn't match a supported " 88 | "version!".format(urllib3.__version__, chardet.__version__), 89 | RequestsDependencyWarning) 90 | 91 | # Attempt to enable urllib3's fallback for SNI support 92 | # if the standard library doesn't support SNI or the 93 | # 'ssl' library isn't available. 94 | try: 95 | try: 96 | import ssl 97 | except ImportError: 98 | ssl = None 99 | 100 | if not getattr(ssl, "HAS_SNI", False): 101 | from urllib3.contrib import pyopenssl 102 | pyopenssl.inject_into_urllib3() 103 | 104 | # Check cryptography version 105 | from cryptography import __version__ as cryptography_version 106 | _check_cryptography(cryptography_version) 107 | except ImportError: 108 | pass 109 | 110 | # urllib3's DependencyWarnings should be silenced. 111 | from urllib3.exceptions import DependencyWarning 112 | warnings.simplefilter('ignore', DependencyWarning) 113 | 114 | from .__version__ import __title__, __description__, __url__, __version__ 115 | from .__version__ import __build__, __author__, __author_email__, __license__ 116 | from .__version__ import __copyright__, __cake__ 117 | 118 | from . import utils 119 | from . import packages 120 | from .models import Request, Response, PreparedRequest 121 | from .api import request, get, head, post, patch, put, delete, options 122 | from .sessions import session, Session 123 | from .status_codes import codes 124 | from .exceptions import ( 125 | RequestException, Timeout, URLRequired, 126 | TooManyRedirects, HTTPError, ConnectionError, 127 | FileModeWarning, ConnectTimeout, ReadTimeout 128 | ) 129 | 130 | # Set default logging handler to avoid "No handler found" warnings. 131 | import logging 132 | from logging import NullHandler 133 | 134 | logging.getLogger(__name__).addHandler(NullHandler()) 135 | 136 | # FileModeWarnings go off per the default. 137 | warnings.simplefilter('default', FileModeWarning, append=True) 138 | -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | import re 2 | import time 3 | import requests 4 | import sendMsg 5 | import config 6 | 7 | 8 | class daka(): 9 | def __init__(self): 10 | self.id = config.stutendID 11 | self.password = config.passward 12 | self.token = "" 13 | self.name = "" 14 | self.lt = "" 15 | self.my_session = requests.session() 16 | self.sendMsg=sendMsg.sendMsg() 17 | 18 | self.login_url = 'https://e-report.neu.edu.cn/login' 19 | self.post_url = 'https://pass.neu.edu.cn/tpass/login' 20 | self.create_url = 'https://e-report.neu.edu.cn/notes/create' 21 | self.note_url = 'https://e-report.neu.edu.cn/api/notes' 22 | 23 | 24 | def login(self): 25 | #登陆,更新session 26 | msg='' 27 | try: 28 | login_response = self.my_session.get(self.login_url) 29 | self.lt = re.findall(r'LT-[0-9]*-[0-9a-zA-Z]*-tpass', login_response.text, re.S)[0] 30 | 31 | login_form_items = { 32 | 'rsa': self.id + self.password + self.lt, 33 | 'ul': str(len(self.id)), 34 | 'pl': str(len(self.password)), 35 | 'lt': self.lt, 36 | 'execution': 'e1s1', 37 | '_eventId': 'submit' 38 | } 39 | post_response = self.my_session.post(self.post_url, login_form_items) 40 | msg=config.stutendID+'登录成功!' 41 | except: 42 | msg=config.stutendID+'登录失败!请手动完成打卡!' 43 | return msg 44 | 45 | 46 | def healthDaka(self): 47 | #健康打卡 48 | msg='' 49 | success=False 50 | try: 51 | note_response = self.my_session.get(self.create_url) 52 | self.token = re.findall(r'name=\"_token\"\s+value=\"([0-9a-zA-Z]+)\"',note_response.text, re.S)[0] 53 | self.name = re.findall(r'当前用户:\s*(\w+)\s*', note_response.text, re.S)[0] 54 | 55 | health_items = { 56 | '_token': self.token, 57 | 'jibenxinxi_shifoubenrenshangbao': '1', 58 | 'profile[xuegonghao]': self.id, 59 | 'profile[xingming]': self.name, 60 | 'profile[suoshubanji]': '', 61 | 'jiankangxinxi_muqianshentizhuangkuang': '正常', 62 | 'xingchengxinxi_weizhishifouyoubianhua': '0', 63 | 'cross_city': '无', 64 | 'qitashixiang_qitaxuyaoshuomingdeshixiang': '' 65 | } 66 | health_response = self.my_session.post(self.note_url, health_items) 67 | if health_response.status_code == 201: 68 | print(str(health_response) + '健康打卡成功') 69 | msg=config.stutendID+'健康打卡成功!' 70 | success=True 71 | else: 72 | msg=config.stutendID+'健康打卡失败!请手动完成打卡!'+ '(响应异常)'+str(health_response) 73 | except: 74 | msg=config.stutendID+'健康打卡失败!请手动完成打卡!'+ '(执行异常)' 75 | return msg,success 76 | 77 | 78 | def temperatureDaka(self): 79 | #体温打卡 80 | msg='' 81 | success=False 82 | try: 83 | hour = (time.localtime().tm_hour + 8) % 24 # 加8是因为腾讯云跑出来是格林时间,若是运行在自己服务器上需要改回来~ 84 | temperature_url = 'https://e-report.neu.edu.cn/inspection/items/{}/records'.format(('1' if 7 <= hour <= 9 else '2' if 12 <= hour <= 14 else '3')) 85 | temperature_items = { 86 | '_token': self.token, 87 | 'temperature': '36.5', 88 | 'suspicious_respiratory_symptoms': '0', 89 | 'symptom_descriptions': '' 90 | } 91 | temperature_response = self.my_session.post(temperature_url, temperature_items) 92 | if temperature_response.status_code == 200: 93 | print(str(temperature_response) + '体温打卡成功') 94 | msg=config.stutendID+'体温打卡成功!' 95 | success=True 96 | else: 97 | msg=config.stutendID+'体温打卡失败!请手动完成打卡!'+ '(响应异常)'+str(temperature_response) 98 | except: 99 | msg=config.stutendID+'体温打卡失败!请手动完成打卡!'+ '(执行异常)' 100 | return msg,success 101 | 102 | def main_handler(event, context): 103 | _daka = daka() 104 | loginMsg = _daka.login() 105 | healthMsg, healSuc = _daka.healthDaka() 106 | temperatureMsg, tempSuc = _daka.temperatureDaka() 107 | 108 | if config.sendMsgOnlyError and healSuc and tempSuc: 109 | pass 110 | else: 111 | _daka.sendMsg.sendMessage(time.strftime('%Y-%m-%d %H:%M:%S',time.localtime())+'\n'+loginMsg+'\n'+healthMsg+'\n'+temperatureMsg) 112 | 113 | if __name__ == '__main__': 114 | main_handler(None,None) 115 | -------------------------------------------------------------------------------- /requests/api.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.api 5 | ~~~~~~~~~~~~ 6 | 7 | This module implements the Requests API. 8 | 9 | :copyright: (c) 2012 by Kenneth Reitz. 10 | :license: Apache2, see LICENSE for more details. 11 | """ 12 | 13 | from . import sessions 14 | 15 | 16 | def request(method, url, **kwargs): 17 | """Constructs and sends a :class:`Request `. 18 | 19 | :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. 20 | :param url: URL for the new :class:`Request` object. 21 | :param params: (optional) Dictionary, list of tuples or bytes to send 22 | in the query string for the :class:`Request`. 23 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 24 | object to send in the body of the :class:`Request`. 25 | :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. 26 | :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. 27 | :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. 28 | :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. 29 | ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` 30 | or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string 31 | defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers 32 | to add for the file. 33 | :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. 34 | :param timeout: (optional) How many seconds to wait for the server to send data 35 | before giving up, as a float, or a :ref:`(connect timeout, read 36 | timeout) ` tuple. 37 | :type timeout: float or tuple 38 | :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. 39 | :type allow_redirects: bool 40 | :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. 41 | :param verify: (optional) Either a boolean, in which case it controls whether we verify 42 | the server's TLS certificate, or a string, in which case it must be a path 43 | to a CA bundle to use. Defaults to ``True``. 44 | :param stream: (optional) if ``False``, the response content will be immediately downloaded. 45 | :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. 46 | :return: :class:`Response ` object 47 | :rtype: requests.Response 48 | 49 | Usage:: 50 | 51 | >>> import requests 52 | >>> req = requests.request('GET', 'https://httpbin.org/get') 53 | >>> req 54 | 55 | """ 56 | 57 | # By using the 'with' statement we are sure the session is closed, thus we 58 | # avoid leaving sockets open which can trigger a ResourceWarning in some 59 | # cases, and look like a memory leak in others. 60 | with sessions.Session() as session: 61 | return session.request(method=method, url=url, **kwargs) 62 | 63 | 64 | def get(url, params=None, **kwargs): 65 | r"""Sends a GET request. 66 | 67 | :param url: URL for the new :class:`Request` object. 68 | :param params: (optional) Dictionary, list of tuples or bytes to send 69 | in the query string for the :class:`Request`. 70 | :param \*\*kwargs: Optional arguments that ``request`` takes. 71 | :return: :class:`Response ` object 72 | :rtype: requests.Response 73 | """ 74 | 75 | kwargs.setdefault('allow_redirects', True) 76 | return request('get', url, params=params, **kwargs) 77 | 78 | 79 | def options(url, **kwargs): 80 | r"""Sends an OPTIONS request. 81 | 82 | :param url: URL for the new :class:`Request` object. 83 | :param \*\*kwargs: Optional arguments that ``request`` takes. 84 | :return: :class:`Response ` object 85 | :rtype: requests.Response 86 | """ 87 | 88 | kwargs.setdefault('allow_redirects', True) 89 | return request('options', url, **kwargs) 90 | 91 | 92 | def head(url, **kwargs): 93 | r"""Sends a HEAD request. 94 | 95 | :param url: URL for the new :class:`Request` object. 96 | :param \*\*kwargs: Optional arguments that ``request`` takes. If 97 | `allow_redirects` is not provided, it will be set to `False` (as 98 | opposed to the default :meth:`request` behavior). 99 | :return: :class:`Response ` object 100 | :rtype: requests.Response 101 | """ 102 | 103 | kwargs.setdefault('allow_redirects', False) 104 | return request('head', url, **kwargs) 105 | 106 | 107 | def post(url, data=None, json=None, **kwargs): 108 | r"""Sends a POST request. 109 | 110 | :param url: URL for the new :class:`Request` object. 111 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 112 | object to send in the body of the :class:`Request`. 113 | :param json: (optional) json data to send in the body of the :class:`Request`. 114 | :param \*\*kwargs: Optional arguments that ``request`` takes. 115 | :return: :class:`Response ` object 116 | :rtype: requests.Response 117 | """ 118 | 119 | return request('post', url, data=data, json=json, **kwargs) 120 | 121 | 122 | def put(url, data=None, **kwargs): 123 | r"""Sends a PUT request. 124 | 125 | :param url: URL for the new :class:`Request` object. 126 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 127 | object to send in the body of the :class:`Request`. 128 | :param json: (optional) json data to send in the body of the :class:`Request`. 129 | :param \*\*kwargs: Optional arguments that ``request`` takes. 130 | :return: :class:`Response ` object 131 | :rtype: requests.Response 132 | """ 133 | 134 | return request('put', url, data=data, **kwargs) 135 | 136 | 137 | def patch(url, data=None, **kwargs): 138 | r"""Sends a PATCH request. 139 | 140 | :param url: URL for the new :class:`Request` object. 141 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 142 | object to send in the body of the :class:`Request`. 143 | :param json: (optional) json data to send in the body of the :class:`Request`. 144 | :param \*\*kwargs: Optional arguments that ``request`` takes. 145 | :return: :class:`Response ` object 146 | :rtype: requests.Response 147 | """ 148 | 149 | return request('patch', url, data=data, **kwargs) 150 | 151 | 152 | def delete(url, **kwargs): 153 | r"""Sends a DELETE request. 154 | 155 | :param url: URL for the new :class:`Request` object. 156 | :param \*\*kwargs: Optional arguments that ``request`` takes. 157 | :return: :class:`Response ` object 158 | :rtype: requests.Response 159 | """ 160 | 161 | return request('delete', url, **kwargs) 162 | -------------------------------------------------------------------------------- /requests/auth.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.auth 5 | ~~~~~~~~~~~~~ 6 | 7 | This module contains the authentication handlers for Requests. 8 | """ 9 | 10 | import os 11 | import re 12 | import time 13 | import hashlib 14 | import threading 15 | import warnings 16 | 17 | from base64 import b64encode 18 | 19 | from .compat import urlparse, str, basestring 20 | from .cookies import extract_cookies_to_jar 21 | from ._internal_utils import to_native_string 22 | from .utils import parse_dict_header 23 | 24 | CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' 25 | CONTENT_TYPE_MULTI_PART = 'multipart/form-data' 26 | 27 | 28 | def _basic_auth_str(username, password): 29 | """Returns a Basic Auth string.""" 30 | 31 | # "I want us to put a big-ol' comment on top of it that 32 | # says that this behaviour is dumb but we need to preserve 33 | # it because people are relying on it." 34 | # - Lukasa 35 | # 36 | # These are here solely to maintain backwards compatibility 37 | # for things like ints. This will be removed in 3.0.0. 38 | if not isinstance(username, basestring): 39 | warnings.warn( 40 | "Non-string usernames will no longer be supported in Requests " 41 | "3.0.0. Please convert the object you've passed in ({!r}) to " 42 | "a string or bytes object in the near future to avoid " 43 | "problems.".format(username), 44 | category=DeprecationWarning, 45 | ) 46 | username = str(username) 47 | 48 | if not isinstance(password, basestring): 49 | warnings.warn( 50 | "Non-string passwords will no longer be supported in Requests " 51 | "3.0.0. Please convert the object you've passed in ({!r}) to " 52 | "a string or bytes object in the near future to avoid " 53 | "problems.".format(type(password)), 54 | category=DeprecationWarning, 55 | ) 56 | password = str(password) 57 | # -- End Removal -- 58 | 59 | if isinstance(username, str): 60 | username = username.encode('latin1') 61 | 62 | if isinstance(password, str): 63 | password = password.encode('latin1') 64 | 65 | authstr = 'Basic ' + to_native_string( 66 | b64encode(b':'.join((username, password))).strip() 67 | ) 68 | 69 | return authstr 70 | 71 | 72 | class AuthBase(object): 73 | """Base class that all auth implementations derive from""" 74 | 75 | def __call__(self, r): 76 | raise NotImplementedError('Auth hooks must be callable.') 77 | 78 | 79 | class HTTPBasicAuth(AuthBase): 80 | """Attaches HTTP Basic Authentication to the given Request object.""" 81 | 82 | def __init__(self, username, password): 83 | self.username = username 84 | self.password = password 85 | 86 | def __eq__(self, other): 87 | return all([ 88 | self.username == getattr(other, 'username', None), 89 | self.password == getattr(other, 'password', None) 90 | ]) 91 | 92 | def __ne__(self, other): 93 | return not self == other 94 | 95 | def __call__(self, r): 96 | r.headers['Authorization'] = _basic_auth_str(self.username, self.password) 97 | return r 98 | 99 | 100 | class HTTPProxyAuth(HTTPBasicAuth): 101 | """Attaches HTTP Proxy Authentication to a given Request object.""" 102 | 103 | def __call__(self, r): 104 | r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) 105 | return r 106 | 107 | 108 | class HTTPDigestAuth(AuthBase): 109 | """Attaches HTTP Digest Authentication to the given Request object.""" 110 | 111 | def __init__(self, username, password): 112 | self.username = username 113 | self.password = password 114 | # Keep state in per-thread local storage 115 | self._thread_local = threading.local() 116 | 117 | def init_per_thread_state(self): 118 | # Ensure state is initialized just once per-thread 119 | if not hasattr(self._thread_local, 'init'): 120 | self._thread_local.init = True 121 | self._thread_local.last_nonce = '' 122 | self._thread_local.nonce_count = 0 123 | self._thread_local.chal = {} 124 | self._thread_local.pos = None 125 | self._thread_local.num_401_calls = None 126 | 127 | def build_digest_header(self, method, url): 128 | """ 129 | :rtype: str 130 | """ 131 | 132 | realm = self._thread_local.chal['realm'] 133 | nonce = self._thread_local.chal['nonce'] 134 | qop = self._thread_local.chal.get('qop') 135 | algorithm = self._thread_local.chal.get('algorithm') 136 | opaque = self._thread_local.chal.get('opaque') 137 | hash_utf8 = None 138 | 139 | if algorithm is None: 140 | _algorithm = 'MD5' 141 | else: 142 | _algorithm = algorithm.upper() 143 | # lambdas assume digest modules are imported at the top level 144 | if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': 145 | def md5_utf8(x): 146 | if isinstance(x, str): 147 | x = x.encode('utf-8') 148 | return hashlib.md5(x).hexdigest() 149 | hash_utf8 = md5_utf8 150 | elif _algorithm == 'SHA': 151 | def sha_utf8(x): 152 | if isinstance(x, str): 153 | x = x.encode('utf-8') 154 | return hashlib.sha1(x).hexdigest() 155 | hash_utf8 = sha_utf8 156 | elif _algorithm == 'SHA-256': 157 | def sha256_utf8(x): 158 | if isinstance(x, str): 159 | x = x.encode('utf-8') 160 | return hashlib.sha256(x).hexdigest() 161 | hash_utf8 = sha256_utf8 162 | elif _algorithm == 'SHA-512': 163 | def sha512_utf8(x): 164 | if isinstance(x, str): 165 | x = x.encode('utf-8') 166 | return hashlib.sha512(x).hexdigest() 167 | hash_utf8 = sha512_utf8 168 | 169 | KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) 170 | 171 | if hash_utf8 is None: 172 | return None 173 | 174 | # XXX not implemented yet 175 | entdig = None 176 | p_parsed = urlparse(url) 177 | #: path is request-uri defined in RFC 2616 which should not be empty 178 | path = p_parsed.path or "/" 179 | if p_parsed.query: 180 | path += '?' + p_parsed.query 181 | 182 | A1 = '%s:%s:%s' % (self.username, realm, self.password) 183 | A2 = '%s:%s' % (method, path) 184 | 185 | HA1 = hash_utf8(A1) 186 | HA2 = hash_utf8(A2) 187 | 188 | if nonce == self._thread_local.last_nonce: 189 | self._thread_local.nonce_count += 1 190 | else: 191 | self._thread_local.nonce_count = 1 192 | ncvalue = '%08x' % self._thread_local.nonce_count 193 | s = str(self._thread_local.nonce_count).encode('utf-8') 194 | s += nonce.encode('utf-8') 195 | s += time.ctime().encode('utf-8') 196 | s += os.urandom(8) 197 | 198 | cnonce = (hashlib.sha1(s).hexdigest()[:16]) 199 | if _algorithm == 'MD5-SESS': 200 | HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) 201 | 202 | if not qop: 203 | respdig = KD(HA1, "%s:%s" % (nonce, HA2)) 204 | elif qop == 'auth' or 'auth' in qop.split(','): 205 | noncebit = "%s:%s:%s:%s:%s" % ( 206 | nonce, ncvalue, cnonce, 'auth', HA2 207 | ) 208 | respdig = KD(HA1, noncebit) 209 | else: 210 | # XXX handle auth-int. 211 | return None 212 | 213 | self._thread_local.last_nonce = nonce 214 | 215 | # XXX should the partial digests be encoded too? 216 | base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ 217 | 'response="%s"' % (self.username, realm, nonce, path, respdig) 218 | if opaque: 219 | base += ', opaque="%s"' % opaque 220 | if algorithm: 221 | base += ', algorithm="%s"' % algorithm 222 | if entdig: 223 | base += ', digest="%s"' % entdig 224 | if qop: 225 | base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) 226 | 227 | return 'Digest %s' % (base) 228 | 229 | def handle_redirect(self, r, **kwargs): 230 | """Reset num_401_calls counter on redirects.""" 231 | if r.is_redirect: 232 | self._thread_local.num_401_calls = 1 233 | 234 | def handle_401(self, r, **kwargs): 235 | """ 236 | Takes the given response and tries digest-auth, if needed. 237 | 238 | :rtype: requests.Response 239 | """ 240 | 241 | # If response is not 4xx, do not auth 242 | # See https://github.com/psf/requests/issues/3772 243 | if not 400 <= r.status_code < 500: 244 | self._thread_local.num_401_calls = 1 245 | return r 246 | 247 | if self._thread_local.pos is not None: 248 | # Rewind the file position indicator of the body to where 249 | # it was to resend the request. 250 | r.request.body.seek(self._thread_local.pos) 251 | s_auth = r.headers.get('www-authenticate', '') 252 | 253 | if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: 254 | 255 | self._thread_local.num_401_calls += 1 256 | pat = re.compile(r'digest ', flags=re.IGNORECASE) 257 | self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) 258 | 259 | # Consume content and release the original connection 260 | # to allow our new request to reuse the same one. 261 | r.content 262 | r.close() 263 | prep = r.request.copy() 264 | extract_cookies_to_jar(prep._cookies, r.request, r.raw) 265 | prep.prepare_cookies(prep._cookies) 266 | 267 | prep.headers['Authorization'] = self.build_digest_header( 268 | prep.method, prep.url) 269 | _r = r.connection.send(prep, **kwargs) 270 | _r.history.append(r) 271 | _r.request = prep 272 | 273 | return _r 274 | 275 | self._thread_local.num_401_calls = 1 276 | return r 277 | 278 | def __call__(self, r): 279 | # Initialize per-thread state, if needed 280 | self.init_per_thread_state() 281 | # If we have a saved nonce, skip the 401 282 | if self._thread_local.last_nonce: 283 | r.headers['Authorization'] = self.build_digest_header(r.method, r.url) 284 | try: 285 | self._thread_local.pos = r.body.tell() 286 | except AttributeError: 287 | # In the case of HTTPDigestAuth being reused and the body of 288 | # the previous request was a file-like object, pos has the 289 | # file position of the previous body. Ensure it's set to 290 | # None. 291 | self._thread_local.pos = None 292 | r.register_hook('response', self.handle_401) 293 | r.register_hook('response', self.handle_redirect) 294 | self._thread_local.num_401_calls = 1 295 | 296 | return r 297 | 298 | def __eq__(self, other): 299 | return all([ 300 | self.username == getattr(other, 'username', None), 301 | self.password == getattr(other, 'password', None) 302 | ]) 303 | 304 | def __ne__(self, other): 305 | return not self == other 306 | -------------------------------------------------------------------------------- /requests/cookies.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.cookies 5 | ~~~~~~~~~~~~~~~~ 6 | 7 | Compatibility code to be able to use `cookielib.CookieJar` with requests. 8 | 9 | requests.utils imports from here, so be careful with imports. 10 | """ 11 | 12 | import copy 13 | import time 14 | import calendar 15 | 16 | from ._internal_utils import to_native_string 17 | from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping 18 | 19 | try: 20 | import threading 21 | except ImportError: 22 | import dummy_threading as threading 23 | 24 | 25 | class MockRequest(object): 26 | """Wraps a `requests.Request` to mimic a `urllib2.Request`. 27 | 28 | The code in `cookielib.CookieJar` expects this interface in order to correctly 29 | manage cookie policies, i.e., determine whether a cookie can be set, given the 30 | domains of the request and the cookie. 31 | 32 | The original request object is read-only. The client is responsible for collecting 33 | the new headers via `get_new_headers()` and interpreting them appropriately. You 34 | probably want `get_cookie_header`, defined below. 35 | """ 36 | 37 | def __init__(self, request): 38 | self._r = request 39 | self._new_headers = {} 40 | self.type = urlparse(self._r.url).scheme 41 | 42 | def get_type(self): 43 | return self.type 44 | 45 | def get_host(self): 46 | return urlparse(self._r.url).netloc 47 | 48 | def get_origin_req_host(self): 49 | return self.get_host() 50 | 51 | def get_full_url(self): 52 | # Only return the response's URL if the user hadn't set the Host 53 | # header 54 | if not self._r.headers.get('Host'): 55 | return self._r.url 56 | # If they did set it, retrieve it and reconstruct the expected domain 57 | host = to_native_string(self._r.headers['Host'], encoding='utf-8') 58 | parsed = urlparse(self._r.url) 59 | # Reconstruct the URL as we expect it 60 | return urlunparse([ 61 | parsed.scheme, host, parsed.path, parsed.params, parsed.query, 62 | parsed.fragment 63 | ]) 64 | 65 | def is_unverifiable(self): 66 | return True 67 | 68 | def has_header(self, name): 69 | return name in self._r.headers or name in self._new_headers 70 | 71 | def get_header(self, name, default=None): 72 | return self._r.headers.get(name, self._new_headers.get(name, default)) 73 | 74 | def add_header(self, key, val): 75 | """cookielib has no legitimate use for this method; add it back if you find one.""" 76 | raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") 77 | 78 | def add_unredirected_header(self, name, value): 79 | self._new_headers[name] = value 80 | 81 | def get_new_headers(self): 82 | return self._new_headers 83 | 84 | @property 85 | def unverifiable(self): 86 | return self.is_unverifiable() 87 | 88 | @property 89 | def origin_req_host(self): 90 | return self.get_origin_req_host() 91 | 92 | @property 93 | def host(self): 94 | return self.get_host() 95 | 96 | 97 | class MockResponse(object): 98 | """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. 99 | 100 | ...what? Basically, expose the parsed HTTP headers from the server response 101 | the way `cookielib` expects to see them. 102 | """ 103 | 104 | def __init__(self, headers): 105 | """Make a MockResponse for `cookielib` to read. 106 | 107 | :param headers: a httplib.HTTPMessage or analogous carrying the headers 108 | """ 109 | self._headers = headers 110 | 111 | def info(self): 112 | return self._headers 113 | 114 | def getheaders(self, name): 115 | self._headers.getheaders(name) 116 | 117 | 118 | def extract_cookies_to_jar(jar, request, response): 119 | """Extract the cookies from the response into a CookieJar. 120 | 121 | :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) 122 | :param request: our own requests.Request object 123 | :param response: urllib3.HTTPResponse object 124 | """ 125 | if not (hasattr(response, '_original_response') and 126 | response._original_response): 127 | return 128 | # the _original_response field is the wrapped httplib.HTTPResponse object, 129 | req = MockRequest(request) 130 | # pull out the HTTPMessage with the headers and put it in the mock: 131 | res = MockResponse(response._original_response.msg) 132 | jar.extract_cookies(res, req) 133 | 134 | 135 | def get_cookie_header(jar, request): 136 | """ 137 | Produce an appropriate Cookie header string to be sent with `request`, or None. 138 | 139 | :rtype: str 140 | """ 141 | r = MockRequest(request) 142 | jar.add_cookie_header(r) 143 | return r.get_new_headers().get('Cookie') 144 | 145 | 146 | def remove_cookie_by_name(cookiejar, name, domain=None, path=None): 147 | """Unsets a cookie by name, by default over all domains and paths. 148 | 149 | Wraps CookieJar.clear(), is O(n). 150 | """ 151 | clearables = [] 152 | for cookie in cookiejar: 153 | if cookie.name != name: 154 | continue 155 | if domain is not None and domain != cookie.domain: 156 | continue 157 | if path is not None and path != cookie.path: 158 | continue 159 | clearables.append((cookie.domain, cookie.path, cookie.name)) 160 | 161 | for domain, path, name in clearables: 162 | cookiejar.clear(domain, path, name) 163 | 164 | 165 | class CookieConflictError(RuntimeError): 166 | """There are two cookies that meet the criteria specified in the cookie jar. 167 | Use .get and .set and include domain and path args in order to be more specific. 168 | """ 169 | 170 | 171 | class RequestsCookieJar(cookielib.CookieJar, MutableMapping): 172 | """Compatibility class; is a cookielib.CookieJar, but exposes a dict 173 | interface. 174 | 175 | This is the CookieJar we create by default for requests and sessions that 176 | don't specify one, since some clients may expect response.cookies and 177 | session.cookies to support dict operations. 178 | 179 | Requests does not use the dict interface internally; it's just for 180 | compatibility with external client code. All requests code should work 181 | out of the box with externally provided instances of ``CookieJar``, e.g. 182 | ``LWPCookieJar`` and ``FileCookieJar``. 183 | 184 | Unlike a regular CookieJar, this class is pickleable. 185 | 186 | .. warning:: dictionary operations that are normally O(1) may be O(n). 187 | """ 188 | 189 | def get(self, name, default=None, domain=None, path=None): 190 | """Dict-like get() that also supports optional domain and path args in 191 | order to resolve naming collisions from using one cookie jar over 192 | multiple domains. 193 | 194 | .. warning:: operation is O(n), not O(1). 195 | """ 196 | try: 197 | return self._find_no_duplicates(name, domain, path) 198 | except KeyError: 199 | return default 200 | 201 | def set(self, name, value, **kwargs): 202 | """Dict-like set() that also supports optional domain and path args in 203 | order to resolve naming collisions from using one cookie jar over 204 | multiple domains. 205 | """ 206 | # support client code that unsets cookies by assignment of a None value: 207 | if value is None: 208 | remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) 209 | return 210 | 211 | if isinstance(value, Morsel): 212 | c = morsel_to_cookie(value) 213 | else: 214 | c = create_cookie(name, value, **kwargs) 215 | self.set_cookie(c) 216 | return c 217 | 218 | def iterkeys(self): 219 | """Dict-like iterkeys() that returns an iterator of names of cookies 220 | from the jar. 221 | 222 | .. seealso:: itervalues() and iteritems(). 223 | """ 224 | for cookie in iter(self): 225 | yield cookie.name 226 | 227 | def keys(self): 228 | """Dict-like keys() that returns a list of names of cookies from the 229 | jar. 230 | 231 | .. seealso:: values() and items(). 232 | """ 233 | return list(self.iterkeys()) 234 | 235 | def itervalues(self): 236 | """Dict-like itervalues() that returns an iterator of values of cookies 237 | from the jar. 238 | 239 | .. seealso:: iterkeys() and iteritems(). 240 | """ 241 | for cookie in iter(self): 242 | yield cookie.value 243 | 244 | def values(self): 245 | """Dict-like values() that returns a list of values of cookies from the 246 | jar. 247 | 248 | .. seealso:: keys() and items(). 249 | """ 250 | return list(self.itervalues()) 251 | 252 | def iteritems(self): 253 | """Dict-like iteritems() that returns an iterator of name-value tuples 254 | from the jar. 255 | 256 | .. seealso:: iterkeys() and itervalues(). 257 | """ 258 | for cookie in iter(self): 259 | yield cookie.name, cookie.value 260 | 261 | def items(self): 262 | """Dict-like items() that returns a list of name-value tuples from the 263 | jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a 264 | vanilla python dict of key value pairs. 265 | 266 | .. seealso:: keys() and values(). 267 | """ 268 | return list(self.iteritems()) 269 | 270 | def list_domains(self): 271 | """Utility method to list all the domains in the jar.""" 272 | domains = [] 273 | for cookie in iter(self): 274 | if cookie.domain not in domains: 275 | domains.append(cookie.domain) 276 | return domains 277 | 278 | def list_paths(self): 279 | """Utility method to list all the paths in the jar.""" 280 | paths = [] 281 | for cookie in iter(self): 282 | if cookie.path not in paths: 283 | paths.append(cookie.path) 284 | return paths 285 | 286 | def multiple_domains(self): 287 | """Returns True if there are multiple domains in the jar. 288 | Returns False otherwise. 289 | 290 | :rtype: bool 291 | """ 292 | domains = [] 293 | for cookie in iter(self): 294 | if cookie.domain is not None and cookie.domain in domains: 295 | return True 296 | domains.append(cookie.domain) 297 | return False # there is only one domain in jar 298 | 299 | def get_dict(self, domain=None, path=None): 300 | """Takes as an argument an optional domain and path and returns a plain 301 | old Python dict of name-value pairs of cookies that meet the 302 | requirements. 303 | 304 | :rtype: dict 305 | """ 306 | dictionary = {} 307 | for cookie in iter(self): 308 | if ( 309 | (domain is None or cookie.domain == domain) and 310 | (path is None or cookie.path == path) 311 | ): 312 | dictionary[cookie.name] = cookie.value 313 | return dictionary 314 | 315 | def __contains__(self, name): 316 | try: 317 | return super(RequestsCookieJar, self).__contains__(name) 318 | except CookieConflictError: 319 | return True 320 | 321 | def __getitem__(self, name): 322 | """Dict-like __getitem__() for compatibility with client code. Throws 323 | exception if there are more than one cookie with name. In that case, 324 | use the more explicit get() method instead. 325 | 326 | .. warning:: operation is O(n), not O(1). 327 | """ 328 | return self._find_no_duplicates(name) 329 | 330 | def __setitem__(self, name, value): 331 | """Dict-like __setitem__ for compatibility with client code. Throws 332 | exception if there is already a cookie of that name in the jar. In that 333 | case, use the more explicit set() method instead. 334 | """ 335 | self.set(name, value) 336 | 337 | def __delitem__(self, name): 338 | """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s 339 | ``remove_cookie_by_name()``. 340 | """ 341 | remove_cookie_by_name(self, name) 342 | 343 | def set_cookie(self, cookie, *args, **kwargs): 344 | if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): 345 | cookie.value = cookie.value.replace('\\"', '') 346 | return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) 347 | 348 | def update(self, other): 349 | """Updates this jar with cookies from another CookieJar or dict-like""" 350 | if isinstance(other, cookielib.CookieJar): 351 | for cookie in other: 352 | self.set_cookie(copy.copy(cookie)) 353 | else: 354 | super(RequestsCookieJar, self).update(other) 355 | 356 | def _find(self, name, domain=None, path=None): 357 | """Requests uses this method internally to get cookie values. 358 | 359 | If there are conflicting cookies, _find arbitrarily chooses one. 360 | See _find_no_duplicates if you want an exception thrown if there are 361 | conflicting cookies. 362 | 363 | :param name: a string containing name of cookie 364 | :param domain: (optional) string containing domain of cookie 365 | :param path: (optional) string containing path of cookie 366 | :return: cookie.value 367 | """ 368 | for cookie in iter(self): 369 | if cookie.name == name: 370 | if domain is None or cookie.domain == domain: 371 | if path is None or cookie.path == path: 372 | return cookie.value 373 | 374 | raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) 375 | 376 | def _find_no_duplicates(self, name, domain=None, path=None): 377 | """Both ``__get_item__`` and ``get`` call this function: it's never 378 | used elsewhere in Requests. 379 | 380 | :param name: a string containing name of cookie 381 | :param domain: (optional) string containing domain of cookie 382 | :param path: (optional) string containing path of cookie 383 | :raises KeyError: if cookie is not found 384 | :raises CookieConflictError: if there are multiple cookies 385 | that match name and optionally domain and path 386 | :return: cookie.value 387 | """ 388 | toReturn = None 389 | for cookie in iter(self): 390 | if cookie.name == name: 391 | if domain is None or cookie.domain == domain: 392 | if path is None or cookie.path == path: 393 | if toReturn is not None: # if there are multiple cookies that meet passed in criteria 394 | raise CookieConflictError('There are multiple cookies with name, %r' % (name)) 395 | toReturn = cookie.value # we will eventually return this as long as no cookie conflict 396 | 397 | if toReturn: 398 | return toReturn 399 | raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) 400 | 401 | def __getstate__(self): 402 | """Unlike a normal CookieJar, this class is pickleable.""" 403 | state = self.__dict__.copy() 404 | # remove the unpickleable RLock object 405 | state.pop('_cookies_lock') 406 | return state 407 | 408 | def __setstate__(self, state): 409 | """Unlike a normal CookieJar, this class is pickleable.""" 410 | self.__dict__.update(state) 411 | if '_cookies_lock' not in self.__dict__: 412 | self._cookies_lock = threading.RLock() 413 | 414 | def copy(self): 415 | """Return a copy of this RequestsCookieJar.""" 416 | new_cj = RequestsCookieJar() 417 | new_cj.set_policy(self.get_policy()) 418 | new_cj.update(self) 419 | return new_cj 420 | 421 | def get_policy(self): 422 | """Return the CookiePolicy instance used.""" 423 | return self._policy 424 | 425 | 426 | def _copy_cookie_jar(jar): 427 | if jar is None: 428 | return None 429 | 430 | if hasattr(jar, 'copy'): 431 | # We're dealing with an instance of RequestsCookieJar 432 | return jar.copy() 433 | # We're dealing with a generic CookieJar instance 434 | new_jar = copy.copy(jar) 435 | new_jar.clear() 436 | for cookie in jar: 437 | new_jar.set_cookie(copy.copy(cookie)) 438 | return new_jar 439 | 440 | 441 | def create_cookie(name, value, **kwargs): 442 | """Make a cookie from underspecified parameters. 443 | 444 | By default, the pair of `name` and `value` will be set for the domain '' 445 | and sent on every request (this is sometimes called a "supercookie"). 446 | """ 447 | result = { 448 | 'version': 0, 449 | 'name': name, 450 | 'value': value, 451 | 'port': None, 452 | 'domain': '', 453 | 'path': '/', 454 | 'secure': False, 455 | 'expires': None, 456 | 'discard': True, 457 | 'comment': None, 458 | 'comment_url': None, 459 | 'rest': {'HttpOnly': None}, 460 | 'rfc2109': False, 461 | } 462 | 463 | badargs = set(kwargs) - set(result) 464 | if badargs: 465 | err = 'create_cookie() got unexpected keyword arguments: %s' 466 | raise TypeError(err % list(badargs)) 467 | 468 | result.update(kwargs) 469 | result['port_specified'] = bool(result['port']) 470 | result['domain_specified'] = bool(result['domain']) 471 | result['domain_initial_dot'] = result['domain'].startswith('.') 472 | result['path_specified'] = bool(result['path']) 473 | 474 | return cookielib.Cookie(**result) 475 | 476 | 477 | def morsel_to_cookie(morsel): 478 | """Convert a Morsel object into a Cookie containing the one k/v pair.""" 479 | 480 | expires = None 481 | if morsel['max-age']: 482 | try: 483 | expires = int(time.time() + int(morsel['max-age'])) 484 | except ValueError: 485 | raise TypeError('max-age: %s must be integer' % morsel['max-age']) 486 | elif morsel['expires']: 487 | time_template = '%a, %d-%b-%Y %H:%M:%S GMT' 488 | expires = calendar.timegm( 489 | time.strptime(morsel['expires'], time_template) 490 | ) 491 | return create_cookie( 492 | comment=morsel['comment'], 493 | comment_url=bool(morsel['comment']), 494 | discard=False, 495 | domain=morsel['domain'], 496 | expires=expires, 497 | name=morsel.key, 498 | path=morsel['path'], 499 | port=None, 500 | rest={'HttpOnly': morsel['httponly']}, 501 | rfc2109=False, 502 | secure=bool(morsel['secure']), 503 | value=morsel.value, 504 | version=morsel['version'] or 0, 505 | ) 506 | 507 | 508 | def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): 509 | """Returns a CookieJar from a key/value dictionary. 510 | 511 | :param cookie_dict: Dict of key/values to insert into CookieJar. 512 | :param cookiejar: (optional) A cookiejar to add the cookies to. 513 | :param overwrite: (optional) If False, will not replace cookies 514 | already in the jar with new ones. 515 | :rtype: CookieJar 516 | """ 517 | if cookiejar is None: 518 | cookiejar = RequestsCookieJar() 519 | 520 | if cookie_dict is not None: 521 | names_from_jar = [cookie.name for cookie in cookiejar] 522 | for name in cookie_dict: 523 | if overwrite or (name not in names_from_jar): 524 | cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) 525 | 526 | return cookiejar 527 | 528 | 529 | def merge_cookies(cookiejar, cookies): 530 | """Add cookies to cookiejar and returns a merged CookieJar. 531 | 532 | :param cookiejar: CookieJar object to add the cookies to. 533 | :param cookies: Dictionary or CookieJar object to be added. 534 | :rtype: CookieJar 535 | """ 536 | if not isinstance(cookiejar, cookielib.CookieJar): 537 | raise ValueError('You can only merge into CookieJar') 538 | 539 | if isinstance(cookies, dict): 540 | cookiejar = cookiejar_from_dict( 541 | cookies, cookiejar=cookiejar, overwrite=False) 542 | elif isinstance(cookies, cookielib.CookieJar): 543 | try: 544 | cookiejar.update(cookies) 545 | except AttributeError: 546 | for cookie_in_jar in cookies: 547 | cookiejar.set_cookie(cookie_in_jar) 548 | 549 | return cookiejar 550 | -------------------------------------------------------------------------------- /requests/adapters.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.adapters 5 | ~~~~~~~~~~~~~~~~~ 6 | 7 | This module contains the transport adapters that Requests uses to define 8 | and maintain connections. 9 | """ 10 | 11 | import os.path 12 | import socket 13 | 14 | from urllib3.poolmanager import PoolManager, proxy_from_url 15 | from urllib3.response import HTTPResponse 16 | from urllib3.util import parse_url 17 | from urllib3.util import Timeout as TimeoutSauce 18 | from urllib3.util.retry import Retry 19 | from urllib3.exceptions import ClosedPoolError 20 | from urllib3.exceptions import ConnectTimeoutError 21 | from urllib3.exceptions import HTTPError as _HTTPError 22 | from urllib3.exceptions import MaxRetryError 23 | from urllib3.exceptions import NewConnectionError 24 | from urllib3.exceptions import ProxyError as _ProxyError 25 | from urllib3.exceptions import ProtocolError 26 | from urllib3.exceptions import ReadTimeoutError 27 | from urllib3.exceptions import SSLError as _SSLError 28 | from urllib3.exceptions import ResponseError 29 | from urllib3.exceptions import LocationValueError 30 | 31 | from .models import Response 32 | from .compat import urlparse, basestring 33 | from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, 34 | get_encoding_from_headers, prepend_scheme_if_needed, 35 | get_auth_from_url, urldefragauth, select_proxy) 36 | from .structures import CaseInsensitiveDict 37 | from .cookies import extract_cookies_to_jar 38 | from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, 39 | ProxyError, RetryError, InvalidSchema, InvalidProxyURL, 40 | InvalidURL) 41 | from .auth import _basic_auth_str 42 | 43 | try: 44 | from urllib3.contrib.socks import SOCKSProxyManager 45 | except ImportError: 46 | def SOCKSProxyManager(*args, **kwargs): 47 | raise InvalidSchema("Missing dependencies for SOCKS support.") 48 | 49 | DEFAULT_POOLBLOCK = False 50 | DEFAULT_POOLSIZE = 10 51 | DEFAULT_RETRIES = 0 52 | DEFAULT_POOL_TIMEOUT = None 53 | 54 | 55 | class BaseAdapter(object): 56 | """The Base Transport Adapter""" 57 | 58 | def __init__(self): 59 | super(BaseAdapter, self).__init__() 60 | 61 | def send(self, request, stream=False, timeout=None, verify=True, 62 | cert=None, proxies=None): 63 | """Sends PreparedRequest object. Returns Response object. 64 | 65 | :param request: The :class:`PreparedRequest ` being sent. 66 | :param stream: (optional) Whether to stream the request content. 67 | :param timeout: (optional) How long to wait for the server to send 68 | data before giving up, as a float, or a :ref:`(connect timeout, 69 | read timeout) ` tuple. 70 | :type timeout: float or tuple 71 | :param verify: (optional) Either a boolean, in which case it controls whether we verify 72 | the server's TLS certificate, or a string, in which case it must be a path 73 | to a CA bundle to use 74 | :param cert: (optional) Any user-provided SSL certificate to be trusted. 75 | :param proxies: (optional) The proxies dictionary to apply to the request. 76 | """ 77 | raise NotImplementedError 78 | 79 | def close(self): 80 | """Cleans up adapter specific items.""" 81 | raise NotImplementedError 82 | 83 | 84 | class HTTPAdapter(BaseAdapter): 85 | """The built-in HTTP Adapter for urllib3. 86 | 87 | Provides a general-case interface for Requests sessions to contact HTTP and 88 | HTTPS urls by implementing the Transport Adapter interface. This class will 89 | usually be created by the :class:`Session ` class under the 90 | covers. 91 | 92 | :param pool_connections: The number of urllib3 connection pools to cache. 93 | :param pool_maxsize: The maximum number of connections to save in the pool. 94 | :param max_retries: The maximum number of retries each connection 95 | should attempt. Note, this applies only to failed DNS lookups, socket 96 | connections and connection timeouts, never to requests where data has 97 | made it to the server. By default, Requests does not retry failed 98 | connections. If you need granular control over the conditions under 99 | which we retry a request, import urllib3's ``Retry`` class and pass 100 | that instead. 101 | :param pool_block: Whether the connection pool should block for connections. 102 | 103 | Usage:: 104 | 105 | >>> import requests 106 | >>> s = requests.Session() 107 | >>> a = requests.adapters.HTTPAdapter(max_retries=3) 108 | >>> s.mount('http://', a) 109 | """ 110 | __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', 111 | '_pool_block'] 112 | 113 | def __init__(self, pool_connections=DEFAULT_POOLSIZE, 114 | pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, 115 | pool_block=DEFAULT_POOLBLOCK): 116 | if max_retries == DEFAULT_RETRIES: 117 | self.max_retries = Retry(0, read=False) 118 | else: 119 | self.max_retries = Retry.from_int(max_retries) 120 | self.config = {} 121 | self.proxy_manager = {} 122 | 123 | super(HTTPAdapter, self).__init__() 124 | 125 | self._pool_connections = pool_connections 126 | self._pool_maxsize = pool_maxsize 127 | self._pool_block = pool_block 128 | 129 | self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) 130 | 131 | def __getstate__(self): 132 | return {attr: getattr(self, attr, None) for attr in self.__attrs__} 133 | 134 | def __setstate__(self, state): 135 | # Can't handle by adding 'proxy_manager' to self.__attrs__ because 136 | # self.poolmanager uses a lambda function, which isn't pickleable. 137 | self.proxy_manager = {} 138 | self.config = {} 139 | 140 | for attr, value in state.items(): 141 | setattr(self, attr, value) 142 | 143 | self.init_poolmanager(self._pool_connections, self._pool_maxsize, 144 | block=self._pool_block) 145 | 146 | def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): 147 | """Initializes a urllib3 PoolManager. 148 | 149 | This method should not be called from user code, and is only 150 | exposed for use when subclassing the 151 | :class:`HTTPAdapter `. 152 | 153 | :param connections: The number of urllib3 connection pools to cache. 154 | :param maxsize: The maximum number of connections to save in the pool. 155 | :param block: Block when no free connections are available. 156 | :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. 157 | """ 158 | # save these values for pickling 159 | self._pool_connections = connections 160 | self._pool_maxsize = maxsize 161 | self._pool_block = block 162 | 163 | self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, 164 | block=block, strict=True, **pool_kwargs) 165 | 166 | def proxy_manager_for(self, proxy, **proxy_kwargs): 167 | """Return urllib3 ProxyManager for the given proxy. 168 | 169 | This method should not be called from user code, and is only 170 | exposed for use when subclassing the 171 | :class:`HTTPAdapter `. 172 | 173 | :param proxy: The proxy to return a urllib3 ProxyManager for. 174 | :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. 175 | :returns: ProxyManager 176 | :rtype: urllib3.ProxyManager 177 | """ 178 | if proxy in self.proxy_manager: 179 | manager = self.proxy_manager[proxy] 180 | elif proxy.lower().startswith('socks'): 181 | username, password = get_auth_from_url(proxy) 182 | manager = self.proxy_manager[proxy] = SOCKSProxyManager( 183 | proxy, 184 | username=username, 185 | password=password, 186 | num_pools=self._pool_connections, 187 | maxsize=self._pool_maxsize, 188 | block=self._pool_block, 189 | **proxy_kwargs 190 | ) 191 | else: 192 | proxy_headers = self.proxy_headers(proxy) 193 | manager = self.proxy_manager[proxy] = proxy_from_url( 194 | proxy, 195 | proxy_headers=proxy_headers, 196 | num_pools=self._pool_connections, 197 | maxsize=self._pool_maxsize, 198 | block=self._pool_block, 199 | **proxy_kwargs) 200 | 201 | return manager 202 | 203 | def cert_verify(self, conn, url, verify, cert): 204 | """Verify a SSL certificate. This method should not be called from user 205 | code, and is only exposed for use when subclassing the 206 | :class:`HTTPAdapter `. 207 | 208 | :param conn: The urllib3 connection object associated with the cert. 209 | :param url: The requested URL. 210 | :param verify: Either a boolean, in which case it controls whether we verify 211 | the server's TLS certificate, or a string, in which case it must be a path 212 | to a CA bundle to use 213 | :param cert: The SSL certificate to verify. 214 | """ 215 | if url.lower().startswith('https') and verify: 216 | 217 | cert_loc = None 218 | 219 | # Allow self-specified cert location. 220 | if verify is not True: 221 | cert_loc = verify 222 | 223 | if not cert_loc: 224 | cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) 225 | 226 | if not cert_loc or not os.path.exists(cert_loc): 227 | raise IOError("Could not find a suitable TLS CA certificate bundle, " 228 | "invalid path: {}".format(cert_loc)) 229 | 230 | conn.cert_reqs = 'CERT_REQUIRED' 231 | 232 | if not os.path.isdir(cert_loc): 233 | conn.ca_certs = cert_loc 234 | else: 235 | conn.ca_cert_dir = cert_loc 236 | else: 237 | conn.cert_reqs = 'CERT_NONE' 238 | conn.ca_certs = None 239 | conn.ca_cert_dir = None 240 | 241 | if cert: 242 | if not isinstance(cert, basestring): 243 | conn.cert_file = cert[0] 244 | conn.key_file = cert[1] 245 | else: 246 | conn.cert_file = cert 247 | conn.key_file = None 248 | if conn.cert_file and not os.path.exists(conn.cert_file): 249 | raise IOError("Could not find the TLS certificate file, " 250 | "invalid path: {}".format(conn.cert_file)) 251 | if conn.key_file and not os.path.exists(conn.key_file): 252 | raise IOError("Could not find the TLS key file, " 253 | "invalid path: {}".format(conn.key_file)) 254 | 255 | def build_response(self, req, resp): 256 | """Builds a :class:`Response ` object from a urllib3 257 | response. This should not be called from user code, and is only exposed 258 | for use when subclassing the 259 | :class:`HTTPAdapter ` 260 | 261 | :param req: The :class:`PreparedRequest ` used to generate the response. 262 | :param resp: The urllib3 response object. 263 | :rtype: requests.Response 264 | """ 265 | response = Response() 266 | 267 | # Fallback to None if there's no status_code, for whatever reason. 268 | response.status_code = getattr(resp, 'status', None) 269 | 270 | # Make headers case-insensitive. 271 | response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) 272 | 273 | # Set encoding. 274 | response.encoding = get_encoding_from_headers(response.headers) 275 | response.raw = resp 276 | response.reason = response.raw.reason 277 | 278 | if isinstance(req.url, bytes): 279 | response.url = req.url.decode('utf-8') 280 | else: 281 | response.url = req.url 282 | 283 | # Add new cookies from the server. 284 | extract_cookies_to_jar(response.cookies, req, resp) 285 | 286 | # Give the Response some context. 287 | response.request = req 288 | response.connection = self 289 | 290 | return response 291 | 292 | def get_connection(self, url, proxies=None): 293 | """Returns a urllib3 connection for the given URL. This should not be 294 | called from user code, and is only exposed for use when subclassing the 295 | :class:`HTTPAdapter `. 296 | 297 | :param url: The URL to connect to. 298 | :param proxies: (optional) A Requests-style dictionary of proxies used on this request. 299 | :rtype: urllib3.ConnectionPool 300 | """ 301 | proxy = select_proxy(url, proxies) 302 | 303 | if proxy: 304 | proxy = prepend_scheme_if_needed(proxy, 'http') 305 | proxy_url = parse_url(proxy) 306 | if not proxy_url.host: 307 | raise InvalidProxyURL("Please check proxy URL. It is malformed" 308 | " and could be missing the host.") 309 | proxy_manager = self.proxy_manager_for(proxy) 310 | conn = proxy_manager.connection_from_url(url) 311 | else: 312 | # Only scheme should be lower case 313 | parsed = urlparse(url) 314 | url = parsed.geturl() 315 | conn = self.poolmanager.connection_from_url(url) 316 | 317 | return conn 318 | 319 | def close(self): 320 | """Disposes of any internal state. 321 | 322 | Currently, this closes the PoolManager and any active ProxyManager, 323 | which closes any pooled connections. 324 | """ 325 | self.poolmanager.clear() 326 | for proxy in self.proxy_manager.values(): 327 | proxy.clear() 328 | 329 | def request_url(self, request, proxies): 330 | """Obtain the url to use when making the final request. 331 | 332 | If the message is being sent through a HTTP proxy, the full URL has to 333 | be used. Otherwise, we should only use the path portion of the URL. 334 | 335 | This should not be called from user code, and is only exposed for use 336 | when subclassing the 337 | :class:`HTTPAdapter `. 338 | 339 | :param request: The :class:`PreparedRequest ` being sent. 340 | :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. 341 | :rtype: str 342 | """ 343 | proxy = select_proxy(request.url, proxies) 344 | scheme = urlparse(request.url).scheme 345 | 346 | is_proxied_http_request = (proxy and scheme != 'https') 347 | using_socks_proxy = False 348 | if proxy: 349 | proxy_scheme = urlparse(proxy).scheme.lower() 350 | using_socks_proxy = proxy_scheme.startswith('socks') 351 | 352 | url = request.path_url 353 | if is_proxied_http_request and not using_socks_proxy: 354 | url = urldefragauth(request.url) 355 | 356 | return url 357 | 358 | def add_headers(self, request, **kwargs): 359 | """Add any headers needed by the connection. As of v2.0 this does 360 | nothing by default, but is left for overriding by users that subclass 361 | the :class:`HTTPAdapter `. 362 | 363 | This should not be called from user code, and is only exposed for use 364 | when subclassing the 365 | :class:`HTTPAdapter `. 366 | 367 | :param request: The :class:`PreparedRequest ` to add headers to. 368 | :param kwargs: The keyword arguments from the call to send(). 369 | """ 370 | pass 371 | 372 | def proxy_headers(self, proxy): 373 | """Returns a dictionary of the headers to add to any request sent 374 | through a proxy. This works with urllib3 magic to ensure that they are 375 | correctly sent to the proxy, rather than in a tunnelled request if 376 | CONNECT is being used. 377 | 378 | This should not be called from user code, and is only exposed for use 379 | when subclassing the 380 | :class:`HTTPAdapter `. 381 | 382 | :param proxy: The url of the proxy being used for this request. 383 | :rtype: dict 384 | """ 385 | headers = {} 386 | username, password = get_auth_from_url(proxy) 387 | 388 | if username: 389 | headers['Proxy-Authorization'] = _basic_auth_str(username, 390 | password) 391 | 392 | return headers 393 | 394 | def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): 395 | """Sends PreparedRequest object. Returns Response object. 396 | 397 | :param request: The :class:`PreparedRequest ` being sent. 398 | :param stream: (optional) Whether to stream the request content. 399 | :param timeout: (optional) How long to wait for the server to send 400 | data before giving up, as a float, or a :ref:`(connect timeout, 401 | read timeout) ` tuple. 402 | :type timeout: float or tuple or urllib3 Timeout object 403 | :param verify: (optional) Either a boolean, in which case it controls whether 404 | we verify the server's TLS certificate, or a string, in which case it 405 | must be a path to a CA bundle to use 406 | :param cert: (optional) Any user-provided SSL certificate to be trusted. 407 | :param proxies: (optional) The proxies dictionary to apply to the request. 408 | :rtype: requests.Response 409 | """ 410 | 411 | try: 412 | conn = self.get_connection(request.url, proxies) 413 | except LocationValueError as e: 414 | raise InvalidURL(e, request=request) 415 | 416 | self.cert_verify(conn, request.url, verify, cert) 417 | url = self.request_url(request, proxies) 418 | self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) 419 | 420 | chunked = not (request.body is None or 'Content-Length' in request.headers) 421 | 422 | if isinstance(timeout, tuple): 423 | try: 424 | connect, read = timeout 425 | timeout = TimeoutSauce(connect=connect, read=read) 426 | except ValueError as e: 427 | # this may raise a string formatting error. 428 | err = ("Invalid timeout {}. Pass a (connect, read) " 429 | "timeout tuple, or a single float to set " 430 | "both timeouts to the same value".format(timeout)) 431 | raise ValueError(err) 432 | elif isinstance(timeout, TimeoutSauce): 433 | pass 434 | else: 435 | timeout = TimeoutSauce(connect=timeout, read=timeout) 436 | 437 | try: 438 | if not chunked: 439 | resp = conn.urlopen( 440 | method=request.method, 441 | url=url, 442 | body=request.body, 443 | headers=request.headers, 444 | redirect=False, 445 | assert_same_host=False, 446 | preload_content=False, 447 | decode_content=False, 448 | retries=self.max_retries, 449 | timeout=timeout 450 | ) 451 | 452 | # Send the request. 453 | else: 454 | if hasattr(conn, 'proxy_pool'): 455 | conn = conn.proxy_pool 456 | 457 | low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) 458 | 459 | try: 460 | low_conn.putrequest(request.method, 461 | url, 462 | skip_accept_encoding=True) 463 | 464 | for header, value in request.headers.items(): 465 | low_conn.putheader(header, value) 466 | 467 | low_conn.endheaders() 468 | 469 | for i in request.body: 470 | low_conn.send(hex(len(i))[2:].encode('utf-8')) 471 | low_conn.send(b'\r\n') 472 | low_conn.send(i) 473 | low_conn.send(b'\r\n') 474 | low_conn.send(b'0\r\n\r\n') 475 | 476 | # Receive the response from the server 477 | try: 478 | # For Python 2.7, use buffering of HTTP responses 479 | r = low_conn.getresponse(buffering=True) 480 | except TypeError: 481 | # For compatibility with Python 3.3+ 482 | r = low_conn.getresponse() 483 | 484 | resp = HTTPResponse.from_httplib( 485 | r, 486 | pool=conn, 487 | connection=low_conn, 488 | preload_content=False, 489 | decode_content=False 490 | ) 491 | except: 492 | # If we hit any problems here, clean up the connection. 493 | # Then, reraise so that we can handle the actual exception. 494 | low_conn.close() 495 | raise 496 | 497 | except (ProtocolError, socket.error) as err: 498 | raise ConnectionError(err, request=request) 499 | 500 | except MaxRetryError as e: 501 | if isinstance(e.reason, ConnectTimeoutError): 502 | # TODO: Remove this in 3.0.0: see #2811 503 | if not isinstance(e.reason, NewConnectionError): 504 | raise ConnectTimeout(e, request=request) 505 | 506 | if isinstance(e.reason, ResponseError): 507 | raise RetryError(e, request=request) 508 | 509 | if isinstance(e.reason, _ProxyError): 510 | raise ProxyError(e, request=request) 511 | 512 | if isinstance(e.reason, _SSLError): 513 | # This branch is for urllib3 v1.22 and later. 514 | raise SSLError(e, request=request) 515 | 516 | raise ConnectionError(e, request=request) 517 | 518 | except ClosedPoolError as e: 519 | raise ConnectionError(e, request=request) 520 | 521 | except _ProxyError as e: 522 | raise ProxyError(e) 523 | 524 | except (_SSLError, _HTTPError) as e: 525 | if isinstance(e, _SSLError): 526 | # This branch is for urllib3 versions earlier than v1.22 527 | raise SSLError(e, request=request) 528 | elif isinstance(e, ReadTimeoutError): 529 | raise ReadTimeout(e, request=request) 530 | else: 531 | raise 532 | 533 | return self.build_response(request, resp) 534 | -------------------------------------------------------------------------------- /requests/sessions.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.sessions 5 | ~~~~~~~~~~~~~~~~~ 6 | 7 | This module provides a Session object to manage and persist settings across 8 | requests (cookies, auth, proxies). 9 | """ 10 | import os 11 | import sys 12 | import time 13 | from datetime import timedelta 14 | from collections import OrderedDict 15 | 16 | from .auth import _basic_auth_str 17 | from .compat import cookielib, is_py3, urljoin, urlparse, Mapping 18 | from .cookies import ( 19 | cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) 20 | from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT 21 | from .hooks import default_hooks, dispatch_hook 22 | from ._internal_utils import to_native_string 23 | from .utils import to_key_val_list, default_headers, DEFAULT_PORTS 24 | from .exceptions import ( 25 | TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) 26 | 27 | from .structures import CaseInsensitiveDict 28 | from .adapters import HTTPAdapter 29 | 30 | from .utils import ( 31 | requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, 32 | get_auth_from_url, rewind_body 33 | ) 34 | 35 | from .status_codes import codes 36 | 37 | # formerly defined here, reexposed here for backward compatibility 38 | from .models import REDIRECT_STATI 39 | 40 | # Preferred clock, based on which one is more accurate on a given system. 41 | if sys.platform == 'win32': 42 | try: # Python 3.4+ 43 | preferred_clock = time.perf_counter 44 | except AttributeError: # Earlier than Python 3. 45 | preferred_clock = time.clock 46 | else: 47 | preferred_clock = time.time 48 | 49 | 50 | def merge_setting(request_setting, session_setting, dict_class=OrderedDict): 51 | """Determines appropriate setting for a given request, taking into account 52 | the explicit setting on that request, and the setting in the session. If a 53 | setting is a dictionary, they will be merged together using `dict_class` 54 | """ 55 | 56 | if session_setting is None: 57 | return request_setting 58 | 59 | if request_setting is None: 60 | return session_setting 61 | 62 | # Bypass if not a dictionary (e.g. verify) 63 | if not ( 64 | isinstance(session_setting, Mapping) and 65 | isinstance(request_setting, Mapping) 66 | ): 67 | return request_setting 68 | 69 | merged_setting = dict_class(to_key_val_list(session_setting)) 70 | merged_setting.update(to_key_val_list(request_setting)) 71 | 72 | # Remove keys that are set to None. Extract keys first to avoid altering 73 | # the dictionary during iteration. 74 | none_keys = [k for (k, v) in merged_setting.items() if v is None] 75 | for key in none_keys: 76 | del merged_setting[key] 77 | 78 | return merged_setting 79 | 80 | 81 | def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): 82 | """Properly merges both requests and session hooks. 83 | 84 | This is necessary because when request_hooks == {'response': []}, the 85 | merge breaks Session hooks entirely. 86 | """ 87 | if session_hooks is None or session_hooks.get('response') == []: 88 | return request_hooks 89 | 90 | if request_hooks is None or request_hooks.get('response') == []: 91 | return session_hooks 92 | 93 | return merge_setting(request_hooks, session_hooks, dict_class) 94 | 95 | 96 | class SessionRedirectMixin(object): 97 | 98 | def get_redirect_target(self, resp): 99 | """Receives a Response. Returns a redirect URI or ``None``""" 100 | # Due to the nature of how requests processes redirects this method will 101 | # be called at least once upon the original response and at least twice 102 | # on each subsequent redirect response (if any). 103 | # If a custom mixin is used to handle this logic, it may be advantageous 104 | # to cache the redirect location onto the response object as a private 105 | # attribute. 106 | if resp.is_redirect: 107 | location = resp.headers['location'] 108 | # Currently the underlying http module on py3 decode headers 109 | # in latin1, but empirical evidence suggests that latin1 is very 110 | # rarely used with non-ASCII characters in HTTP headers. 111 | # It is more likely to get UTF8 header rather than latin1. 112 | # This causes incorrect handling of UTF8 encoded location headers. 113 | # To solve this, we re-encode the location in latin1. 114 | if is_py3: 115 | location = location.encode('latin1') 116 | return to_native_string(location, 'utf8') 117 | return None 118 | 119 | def should_strip_auth(self, old_url, new_url): 120 | """Decide whether Authorization header should be removed when redirecting""" 121 | old_parsed = urlparse(old_url) 122 | new_parsed = urlparse(new_url) 123 | if old_parsed.hostname != new_parsed.hostname: 124 | return True 125 | # Special case: allow http -> https redirect when using the standard 126 | # ports. This isn't specified by RFC 7235, but is kept to avoid 127 | # breaking backwards compatibility with older versions of requests 128 | # that allowed any redirects on the same host. 129 | if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) 130 | and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): 131 | return False 132 | 133 | # Handle default port usage corresponding to scheme. 134 | changed_port = old_parsed.port != new_parsed.port 135 | changed_scheme = old_parsed.scheme != new_parsed.scheme 136 | default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) 137 | if (not changed_scheme and old_parsed.port in default_port 138 | and new_parsed.port in default_port): 139 | return False 140 | 141 | # Standard case: root URI must match 142 | return changed_port or changed_scheme 143 | 144 | def resolve_redirects(self, resp, req, stream=False, timeout=None, 145 | verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): 146 | """Receives a Response. Returns a generator of Responses or Requests.""" 147 | 148 | hist = [] # keep track of history 149 | 150 | url = self.get_redirect_target(resp) 151 | previous_fragment = urlparse(req.url).fragment 152 | while url: 153 | prepared_request = req.copy() 154 | 155 | # Update history and keep track of redirects. 156 | # resp.history must ignore the original request in this loop 157 | hist.append(resp) 158 | resp.history = hist[1:] 159 | 160 | try: 161 | resp.content # Consume socket so it can be released 162 | except (ChunkedEncodingError, ContentDecodingError, RuntimeError): 163 | resp.raw.read(decode_content=False) 164 | 165 | if len(resp.history) >= self.max_redirects: 166 | raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) 167 | 168 | # Release the connection back into the pool. 169 | resp.close() 170 | 171 | # Handle redirection without scheme (see: RFC 1808 Section 4) 172 | if url.startswith('//'): 173 | parsed_rurl = urlparse(resp.url) 174 | url = ':'.join([to_native_string(parsed_rurl.scheme), url]) 175 | 176 | # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) 177 | parsed = urlparse(url) 178 | if parsed.fragment == '' and previous_fragment: 179 | parsed = parsed._replace(fragment=previous_fragment) 180 | elif parsed.fragment: 181 | previous_fragment = parsed.fragment 182 | url = parsed.geturl() 183 | 184 | # Facilitate relative 'location' headers, as allowed by RFC 7231. 185 | # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') 186 | # Compliant with RFC3986, we percent encode the url. 187 | if not parsed.netloc: 188 | url = urljoin(resp.url, requote_uri(url)) 189 | else: 190 | url = requote_uri(url) 191 | 192 | prepared_request.url = to_native_string(url) 193 | 194 | self.rebuild_method(prepared_request, resp) 195 | 196 | # https://github.com/psf/requests/issues/1084 197 | if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): 198 | # https://github.com/psf/requests/issues/3490 199 | purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') 200 | for header in purged_headers: 201 | prepared_request.headers.pop(header, None) 202 | prepared_request.body = None 203 | 204 | headers = prepared_request.headers 205 | headers.pop('Cookie', None) 206 | 207 | # Extract any cookies sent on the response to the cookiejar 208 | # in the new request. Because we've mutated our copied prepared 209 | # request, use the old one that we haven't yet touched. 210 | extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) 211 | merge_cookies(prepared_request._cookies, self.cookies) 212 | prepared_request.prepare_cookies(prepared_request._cookies) 213 | 214 | # Rebuild auth and proxy information. 215 | proxies = self.rebuild_proxies(prepared_request, proxies) 216 | self.rebuild_auth(prepared_request, resp) 217 | 218 | # A failed tell() sets `_body_position` to `object()`. This non-None 219 | # value ensures `rewindable` will be True, allowing us to raise an 220 | # UnrewindableBodyError, instead of hanging the connection. 221 | rewindable = ( 222 | prepared_request._body_position is not None and 223 | ('Content-Length' in headers or 'Transfer-Encoding' in headers) 224 | ) 225 | 226 | # Attempt to rewind consumed file-like object. 227 | if rewindable: 228 | rewind_body(prepared_request) 229 | 230 | # Override the original request. 231 | req = prepared_request 232 | 233 | if yield_requests: 234 | yield req 235 | else: 236 | 237 | resp = self.send( 238 | req, 239 | stream=stream, 240 | timeout=timeout, 241 | verify=verify, 242 | cert=cert, 243 | proxies=proxies, 244 | allow_redirects=False, 245 | **adapter_kwargs 246 | ) 247 | 248 | extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) 249 | 250 | # extract redirect url, if any, for the next loop 251 | url = self.get_redirect_target(resp) 252 | yield resp 253 | 254 | def rebuild_auth(self, prepared_request, response): 255 | """When being redirected we may want to strip authentication from the 256 | request to avoid leaking credentials. This method intelligently removes 257 | and reapplies authentication where possible to avoid credential loss. 258 | """ 259 | headers = prepared_request.headers 260 | url = prepared_request.url 261 | 262 | if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): 263 | # If we get redirected to a new host, we should strip out any 264 | # authentication headers. 265 | del headers['Authorization'] 266 | 267 | # .netrc might have more auth for us on our new host. 268 | new_auth = get_netrc_auth(url) if self.trust_env else None 269 | if new_auth is not None: 270 | prepared_request.prepare_auth(new_auth) 271 | 272 | 273 | def rebuild_proxies(self, prepared_request, proxies): 274 | """This method re-evaluates the proxy configuration by considering the 275 | environment variables. If we are redirected to a URL covered by 276 | NO_PROXY, we strip the proxy configuration. Otherwise, we set missing 277 | proxy keys for this URL (in case they were stripped by a previous 278 | redirect). 279 | 280 | This method also replaces the Proxy-Authorization header where 281 | necessary. 282 | 283 | :rtype: dict 284 | """ 285 | proxies = proxies if proxies is not None else {} 286 | headers = prepared_request.headers 287 | url = prepared_request.url 288 | scheme = urlparse(url).scheme 289 | new_proxies = proxies.copy() 290 | no_proxy = proxies.get('no_proxy') 291 | 292 | bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) 293 | if self.trust_env and not bypass_proxy: 294 | environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) 295 | 296 | proxy = environ_proxies.get(scheme, environ_proxies.get('all')) 297 | 298 | if proxy: 299 | new_proxies.setdefault(scheme, proxy) 300 | 301 | if 'Proxy-Authorization' in headers: 302 | del headers['Proxy-Authorization'] 303 | 304 | try: 305 | username, password = get_auth_from_url(new_proxies[scheme]) 306 | except KeyError: 307 | username, password = None, None 308 | 309 | if username and password: 310 | headers['Proxy-Authorization'] = _basic_auth_str(username, password) 311 | 312 | return new_proxies 313 | 314 | def rebuild_method(self, prepared_request, response): 315 | """When being redirected we may want to change the method of the request 316 | based on certain specs or browser behavior. 317 | """ 318 | method = prepared_request.method 319 | 320 | # https://tools.ietf.org/html/rfc7231#section-6.4.4 321 | if response.status_code == codes.see_other and method != 'HEAD': 322 | method = 'GET' 323 | 324 | # Do what the browsers do, despite standards... 325 | # First, turn 302s into GETs. 326 | if response.status_code == codes.found and method != 'HEAD': 327 | method = 'GET' 328 | 329 | # Second, if a POST is responded to with a 301, turn it into a GET. 330 | # This bizarre behaviour is explained in Issue 1704. 331 | if response.status_code == codes.moved and method == 'POST': 332 | method = 'GET' 333 | 334 | prepared_request.method = method 335 | 336 | 337 | class Session(SessionRedirectMixin): 338 | """A Requests session. 339 | 340 | Provides cookie persistence, connection-pooling, and configuration. 341 | 342 | Basic Usage:: 343 | 344 | >>> import requests 345 | >>> s = requests.Session() 346 | >>> s.get('https://httpbin.org/get') 347 | 348 | 349 | Or as a context manager:: 350 | 351 | >>> with requests.Session() as s: 352 | ... s.get('https://httpbin.org/get') 353 | 354 | """ 355 | 356 | __attrs__ = [ 357 | 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', 358 | 'cert', 'adapters', 'stream', 'trust_env', 359 | 'max_redirects', 360 | ] 361 | 362 | def __init__(self): 363 | 364 | #: A case-insensitive dictionary of headers to be sent on each 365 | #: :class:`Request ` sent from this 366 | #: :class:`Session `. 367 | self.headers = default_headers() 368 | 369 | #: Default Authentication tuple or object to attach to 370 | #: :class:`Request `. 371 | self.auth = None 372 | 373 | #: Dictionary mapping protocol or protocol and host to the URL of the proxy 374 | #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to 375 | #: be used on each :class:`Request `. 376 | self.proxies = {} 377 | 378 | #: Event-handling hooks. 379 | self.hooks = default_hooks() 380 | 381 | #: Dictionary of querystring data to attach to each 382 | #: :class:`Request `. The dictionary values may be lists for 383 | #: representing multivalued query parameters. 384 | self.params = {} 385 | 386 | #: Stream response content default. 387 | self.stream = False 388 | 389 | #: SSL Verification default. 390 | #: Defaults to `True`, requiring requests to verify the TLS certificate at the 391 | #: remote end. 392 | #: If verify is set to `False`, requests will accept any TLS certificate 393 | #: presented by the server, and will ignore hostname mismatches and/or 394 | #: expired certificates, which will make your application vulnerable to 395 | #: man-in-the-middle (MitM) attacks. 396 | #: Only set this to `False` for testing. 397 | self.verify = True 398 | 399 | #: SSL client certificate default, if String, path to ssl client 400 | #: cert file (.pem). If Tuple, ('cert', 'key') pair. 401 | self.cert = None 402 | 403 | #: Maximum number of redirects allowed. If the request exceeds this 404 | #: limit, a :class:`TooManyRedirects` exception is raised. 405 | #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is 406 | #: 30. 407 | self.max_redirects = DEFAULT_REDIRECT_LIMIT 408 | 409 | #: Trust environment settings for proxy configuration, default 410 | #: authentication and similar. 411 | self.trust_env = True 412 | 413 | #: A CookieJar containing all currently outstanding cookies set on this 414 | #: session. By default it is a 415 | #: :class:`RequestsCookieJar `, but 416 | #: may be any other ``cookielib.CookieJar`` compatible object. 417 | self.cookies = cookiejar_from_dict({}) 418 | 419 | # Default connection adapters. 420 | self.adapters = OrderedDict() 421 | self.mount('https://', HTTPAdapter()) 422 | self.mount('http://', HTTPAdapter()) 423 | 424 | def __enter__(self): 425 | return self 426 | 427 | def __exit__(self, *args): 428 | self.close() 429 | 430 | def prepare_request(self, request): 431 | """Constructs a :class:`PreparedRequest ` for 432 | transmission and returns it. The :class:`PreparedRequest` has settings 433 | merged from the :class:`Request ` instance and those of the 434 | :class:`Session`. 435 | 436 | :param request: :class:`Request` instance to prepare with this 437 | session's settings. 438 | :rtype: requests.PreparedRequest 439 | """ 440 | cookies = request.cookies or {} 441 | 442 | # Bootstrap CookieJar. 443 | if not isinstance(cookies, cookielib.CookieJar): 444 | cookies = cookiejar_from_dict(cookies) 445 | 446 | # Merge with session cookies 447 | merged_cookies = merge_cookies( 448 | merge_cookies(RequestsCookieJar(), self.cookies), cookies) 449 | 450 | # Set environment's basic authentication if not explicitly set. 451 | auth = request.auth 452 | if self.trust_env and not auth and not self.auth: 453 | auth = get_netrc_auth(request.url) 454 | 455 | p = PreparedRequest() 456 | p.prepare( 457 | method=request.method.upper(), 458 | url=request.url, 459 | files=request.files, 460 | data=request.data, 461 | json=request.json, 462 | headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), 463 | params=merge_setting(request.params, self.params), 464 | auth=merge_setting(auth, self.auth), 465 | cookies=merged_cookies, 466 | hooks=merge_hooks(request.hooks, self.hooks), 467 | ) 468 | return p 469 | 470 | def request(self, method, url, 471 | params=None, data=None, headers=None, cookies=None, files=None, 472 | auth=None, timeout=None, allow_redirects=True, proxies=None, 473 | hooks=None, stream=None, verify=None, cert=None, json=None): 474 | """Constructs a :class:`Request `, prepares it and sends it. 475 | Returns :class:`Response ` object. 476 | 477 | :param method: method for the new :class:`Request` object. 478 | :param url: URL for the new :class:`Request` object. 479 | :param params: (optional) Dictionary or bytes to be sent in the query 480 | string for the :class:`Request`. 481 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 482 | object to send in the body of the :class:`Request`. 483 | :param json: (optional) json to send in the body of the 484 | :class:`Request`. 485 | :param headers: (optional) Dictionary of HTTP Headers to send with the 486 | :class:`Request`. 487 | :param cookies: (optional) Dict or CookieJar object to send with the 488 | :class:`Request`. 489 | :param files: (optional) Dictionary of ``'filename': file-like-objects`` 490 | for multipart encoding upload. 491 | :param auth: (optional) Auth tuple or callable to enable 492 | Basic/Digest/Custom HTTP Auth. 493 | :param timeout: (optional) How long to wait for the server to send 494 | data before giving up, as a float, or a :ref:`(connect timeout, 495 | read timeout) ` tuple. 496 | :type timeout: float or tuple 497 | :param allow_redirects: (optional) Set to True by default. 498 | :type allow_redirects: bool 499 | :param proxies: (optional) Dictionary mapping protocol or protocol and 500 | hostname to the URL of the proxy. 501 | :param stream: (optional) whether to immediately download the response 502 | content. Defaults to ``False``. 503 | :param verify: (optional) Either a boolean, in which case it controls whether we verify 504 | the server's TLS certificate, or a string, in which case it must be a path 505 | to a CA bundle to use. Defaults to ``True``. When set to 506 | ``False``, requests will accept any TLS certificate presented by 507 | the server, and will ignore hostname mismatches and/or expired 508 | certificates, which will make your application vulnerable to 509 | man-in-the-middle (MitM) attacks. Setting verify to ``False`` 510 | may be useful during local development or testing. 511 | :param cert: (optional) if String, path to ssl client cert file (.pem). 512 | If Tuple, ('cert', 'key') pair. 513 | :rtype: requests.Response 514 | """ 515 | # Create the Request. 516 | req = Request( 517 | method=method.upper(), 518 | url=url, 519 | headers=headers, 520 | files=files, 521 | data=data or {}, 522 | json=json, 523 | params=params or {}, 524 | auth=auth, 525 | cookies=cookies, 526 | hooks=hooks, 527 | ) 528 | prep = self.prepare_request(req) 529 | 530 | proxies = proxies or {} 531 | 532 | settings = self.merge_environment_settings( 533 | prep.url, proxies, stream, verify, cert 534 | ) 535 | 536 | # Send the request. 537 | send_kwargs = { 538 | 'timeout': timeout, 539 | 'allow_redirects': allow_redirects, 540 | } 541 | send_kwargs.update(settings) 542 | resp = self.send(prep, **send_kwargs) 543 | 544 | return resp 545 | 546 | def get(self, url, **kwargs): 547 | r"""Sends a GET request. Returns :class:`Response` object. 548 | 549 | :param url: URL for the new :class:`Request` object. 550 | :param \*\*kwargs: Optional arguments that ``request`` takes. 551 | :rtype: requests.Response 552 | """ 553 | 554 | kwargs.setdefault('allow_redirects', True) 555 | return self.request('GET', url, **kwargs) 556 | 557 | def options(self, url, **kwargs): 558 | r"""Sends a OPTIONS request. Returns :class:`Response` object. 559 | 560 | :param url: URL for the new :class:`Request` object. 561 | :param \*\*kwargs: Optional arguments that ``request`` takes. 562 | :rtype: requests.Response 563 | """ 564 | 565 | kwargs.setdefault('allow_redirects', True) 566 | return self.request('OPTIONS', url, **kwargs) 567 | 568 | def head(self, url, **kwargs): 569 | r"""Sends a HEAD request. Returns :class:`Response` object. 570 | 571 | :param url: URL for the new :class:`Request` object. 572 | :param \*\*kwargs: Optional arguments that ``request`` takes. 573 | :rtype: requests.Response 574 | """ 575 | 576 | kwargs.setdefault('allow_redirects', False) 577 | return self.request('HEAD', url, **kwargs) 578 | 579 | def post(self, url, data=None, json=None, **kwargs): 580 | r"""Sends a POST request. Returns :class:`Response` object. 581 | 582 | :param url: URL for the new :class:`Request` object. 583 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 584 | object to send in the body of the :class:`Request`. 585 | :param json: (optional) json to send in the body of the :class:`Request`. 586 | :param \*\*kwargs: Optional arguments that ``request`` takes. 587 | :rtype: requests.Response 588 | """ 589 | 590 | return self.request('POST', url, data=data, json=json, **kwargs) 591 | 592 | def put(self, url, data=None, **kwargs): 593 | r"""Sends a PUT request. Returns :class:`Response` object. 594 | 595 | :param url: URL for the new :class:`Request` object. 596 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 597 | object to send in the body of the :class:`Request`. 598 | :param \*\*kwargs: Optional arguments that ``request`` takes. 599 | :rtype: requests.Response 600 | """ 601 | 602 | return self.request('PUT', url, data=data, **kwargs) 603 | 604 | def patch(self, url, data=None, **kwargs): 605 | r"""Sends a PATCH request. Returns :class:`Response` object. 606 | 607 | :param url: URL for the new :class:`Request` object. 608 | :param data: (optional) Dictionary, list of tuples, bytes, or file-like 609 | object to send in the body of the :class:`Request`. 610 | :param \*\*kwargs: Optional arguments that ``request`` takes. 611 | :rtype: requests.Response 612 | """ 613 | 614 | return self.request('PATCH', url, data=data, **kwargs) 615 | 616 | def delete(self, url, **kwargs): 617 | r"""Sends a DELETE request. Returns :class:`Response` object. 618 | 619 | :param url: URL for the new :class:`Request` object. 620 | :param \*\*kwargs: Optional arguments that ``request`` takes. 621 | :rtype: requests.Response 622 | """ 623 | 624 | return self.request('DELETE', url, **kwargs) 625 | 626 | def send(self, request, **kwargs): 627 | """Send a given PreparedRequest. 628 | 629 | :rtype: requests.Response 630 | """ 631 | # Set defaults that the hooks can utilize to ensure they always have 632 | # the correct parameters to reproduce the previous request. 633 | kwargs.setdefault('stream', self.stream) 634 | kwargs.setdefault('verify', self.verify) 635 | kwargs.setdefault('cert', self.cert) 636 | kwargs.setdefault('proxies', self.proxies) 637 | 638 | # It's possible that users might accidentally send a Request object. 639 | # Guard against that specific failure case. 640 | if isinstance(request, Request): 641 | raise ValueError('You can only send PreparedRequests.') 642 | 643 | # Set up variables needed for resolve_redirects and dispatching of hooks 644 | allow_redirects = kwargs.pop('allow_redirects', True) 645 | stream = kwargs.get('stream') 646 | hooks = request.hooks 647 | 648 | # Get the appropriate adapter to use 649 | adapter = self.get_adapter(url=request.url) 650 | 651 | # Start time (approximately) of the request 652 | start = preferred_clock() 653 | 654 | # Send the request 655 | r = adapter.send(request, **kwargs) 656 | 657 | # Total elapsed time of the request (approximately) 658 | elapsed = preferred_clock() - start 659 | r.elapsed = timedelta(seconds=elapsed) 660 | 661 | # Response manipulation hooks 662 | r = dispatch_hook('response', hooks, r, **kwargs) 663 | 664 | # Persist cookies 665 | if r.history: 666 | 667 | # If the hooks create history then we want those cookies too 668 | for resp in r.history: 669 | extract_cookies_to_jar(self.cookies, resp.request, resp.raw) 670 | 671 | extract_cookies_to_jar(self.cookies, request, r.raw) 672 | 673 | # Resolve redirects if allowed. 674 | if allow_redirects: 675 | # Redirect resolving generator. 676 | gen = self.resolve_redirects(r, request, **kwargs) 677 | history = [resp for resp in gen] 678 | else: 679 | history = [] 680 | 681 | # Shuffle things around if there's history. 682 | if history: 683 | # Insert the first (original) request at the start 684 | history.insert(0, r) 685 | # Get the last request made 686 | r = history.pop() 687 | r.history = history 688 | 689 | # If redirects aren't being followed, store the response on the Request for Response.next(). 690 | if not allow_redirects: 691 | try: 692 | r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) 693 | except StopIteration: 694 | pass 695 | 696 | if not stream: 697 | r.content 698 | 699 | return r 700 | 701 | def merge_environment_settings(self, url, proxies, stream, verify, cert): 702 | """ 703 | Check the environment and merge it with some settings. 704 | 705 | :rtype: dict 706 | """ 707 | # Gather clues from the surrounding environment. 708 | if self.trust_env: 709 | # Set environment's proxies. 710 | no_proxy = proxies.get('no_proxy') if proxies is not None else None 711 | env_proxies = get_environ_proxies(url, no_proxy=no_proxy) 712 | for (k, v) in env_proxies.items(): 713 | proxies.setdefault(k, v) 714 | 715 | # Look for requests environment configuration and be compatible 716 | # with cURL. 717 | if verify is True or verify is None: 718 | verify = (os.environ.get('REQUESTS_CA_BUNDLE') or 719 | os.environ.get('CURL_CA_BUNDLE')) 720 | 721 | # Merge all the kwargs. 722 | proxies = merge_setting(proxies, self.proxies) 723 | stream = merge_setting(stream, self.stream) 724 | verify = merge_setting(verify, self.verify) 725 | cert = merge_setting(cert, self.cert) 726 | 727 | return {'verify': verify, 'proxies': proxies, 'stream': stream, 728 | 'cert': cert} 729 | 730 | def get_adapter(self, url): 731 | """ 732 | Returns the appropriate connection adapter for the given URL. 733 | 734 | :rtype: requests.adapters.BaseAdapter 735 | """ 736 | for (prefix, adapter) in self.adapters.items(): 737 | 738 | if url.lower().startswith(prefix.lower()): 739 | return adapter 740 | 741 | # Nothing matches :-/ 742 | raise InvalidSchema("No connection adapters were found for {!r}".format(url)) 743 | 744 | def close(self): 745 | """Closes all adapters and as such the session""" 746 | for v in self.adapters.values(): 747 | v.close() 748 | 749 | def mount(self, prefix, adapter): 750 | """Registers a connection adapter to a prefix. 751 | 752 | Adapters are sorted in descending order by prefix length. 753 | """ 754 | self.adapters[prefix] = adapter 755 | keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] 756 | 757 | for key in keys_to_move: 758 | self.adapters[key] = self.adapters.pop(key) 759 | 760 | def __getstate__(self): 761 | state = {attr: getattr(self, attr, None) for attr in self.__attrs__} 762 | return state 763 | 764 | def __setstate__(self, state): 765 | for attr, value in state.items(): 766 | setattr(self, attr, value) 767 | 768 | 769 | def session(): 770 | """ 771 | Returns a :class:`Session` for context-management. 772 | 773 | .. deprecated:: 1.0.0 774 | 775 | This method has been deprecated since version 1.0.0 and is only kept for 776 | backwards compatibility. New code should use :class:`~requests.sessions.Session` 777 | to create a session. This may be removed at a future date. 778 | 779 | :rtype: Session 780 | """ 781 | return Session() 782 | -------------------------------------------------------------------------------- /requests/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.utils 5 | ~~~~~~~~~~~~~~ 6 | 7 | This module provides utility functions that are used within Requests 8 | that are also useful for external consumption. 9 | """ 10 | 11 | import codecs 12 | import contextlib 13 | import io 14 | import os 15 | import re 16 | import socket 17 | import struct 18 | import sys 19 | import tempfile 20 | import warnings 21 | import zipfile 22 | from collections import OrderedDict 23 | 24 | from .__version__ import __version__ 25 | from . import certs 26 | # to_native_string is unused here, but imported here for backwards compatibility 27 | from ._internal_utils import to_native_string 28 | from .compat import parse_http_list as _parse_list_header 29 | from .compat import ( 30 | quote, urlparse, bytes, str, unquote, getproxies, 31 | proxy_bypass, urlunparse, basestring, integer_types, is_py3, 32 | proxy_bypass_environment, getproxies_environment, Mapping) 33 | from .cookies import cookiejar_from_dict 34 | from .structures import CaseInsensitiveDict 35 | from .exceptions import ( 36 | InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) 37 | 38 | NETRC_FILES = ('.netrc', '_netrc') 39 | 40 | DEFAULT_CA_BUNDLE_PATH = certs.where() 41 | 42 | DEFAULT_PORTS = {'http': 80, 'https': 443} 43 | 44 | 45 | if sys.platform == 'win32': 46 | # provide a proxy_bypass version on Windows without DNS lookups 47 | 48 | def proxy_bypass_registry(host): 49 | try: 50 | if is_py3: 51 | import winreg 52 | else: 53 | import _winreg as winreg 54 | except ImportError: 55 | return False 56 | 57 | try: 58 | internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, 59 | r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') 60 | # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it 61 | proxyEnable = int(winreg.QueryValueEx(internetSettings, 62 | 'ProxyEnable')[0]) 63 | # ProxyOverride is almost always a string 64 | proxyOverride = winreg.QueryValueEx(internetSettings, 65 | 'ProxyOverride')[0] 66 | except OSError: 67 | return False 68 | if not proxyEnable or not proxyOverride: 69 | return False 70 | 71 | # make a check value list from the registry entry: replace the 72 | # '' string by the localhost entry and the corresponding 73 | # canonical entry. 74 | proxyOverride = proxyOverride.split(';') 75 | # now check if we match one of the registry values. 76 | for test in proxyOverride: 77 | if test == '': 78 | if '.' not in host: 79 | return True 80 | test = test.replace(".", r"\.") # mask dots 81 | test = test.replace("*", r".*") # change glob sequence 82 | test = test.replace("?", r".") # change glob char 83 | if re.match(test, host, re.I): 84 | return True 85 | return False 86 | 87 | def proxy_bypass(host): # noqa 88 | """Return True, if the host should be bypassed. 89 | 90 | Checks proxy settings gathered from the environment, if specified, 91 | or the registry. 92 | """ 93 | if getproxies_environment(): 94 | return proxy_bypass_environment(host) 95 | else: 96 | return proxy_bypass_registry(host) 97 | 98 | 99 | def dict_to_sequence(d): 100 | """Returns an internal sequence dictionary update.""" 101 | 102 | if hasattr(d, 'items'): 103 | d = d.items() 104 | 105 | return d 106 | 107 | 108 | def super_len(o): 109 | total_length = None 110 | current_position = 0 111 | 112 | if hasattr(o, '__len__'): 113 | total_length = len(o) 114 | 115 | elif hasattr(o, 'len'): 116 | total_length = o.len 117 | 118 | elif hasattr(o, 'fileno'): 119 | try: 120 | fileno = o.fileno() 121 | except io.UnsupportedOperation: 122 | pass 123 | else: 124 | total_length = os.fstat(fileno).st_size 125 | 126 | # Having used fstat to determine the file length, we need to 127 | # confirm that this file was opened up in binary mode. 128 | if 'b' not in o.mode: 129 | warnings.warn(( 130 | "Requests has determined the content-length for this " 131 | "request using the binary size of the file: however, the " 132 | "file has been opened in text mode (i.e. without the 'b' " 133 | "flag in the mode). This may lead to an incorrect " 134 | "content-length. In Requests 3.0, support will be removed " 135 | "for files in text mode."), 136 | FileModeWarning 137 | ) 138 | 139 | if hasattr(o, 'tell'): 140 | try: 141 | current_position = o.tell() 142 | except (OSError, IOError): 143 | # This can happen in some weird situations, such as when the file 144 | # is actually a special file descriptor like stdin. In this 145 | # instance, we don't know what the length is, so set it to zero and 146 | # let requests chunk it instead. 147 | if total_length is not None: 148 | current_position = total_length 149 | else: 150 | if hasattr(o, 'seek') and total_length is None: 151 | # StringIO and BytesIO have seek but no useable fileno 152 | try: 153 | # seek to end of file 154 | o.seek(0, 2) 155 | total_length = o.tell() 156 | 157 | # seek back to current position to support 158 | # partially read file-like objects 159 | o.seek(current_position or 0) 160 | except (OSError, IOError): 161 | total_length = 0 162 | 163 | if total_length is None: 164 | total_length = 0 165 | 166 | return max(0, total_length - current_position) 167 | 168 | 169 | def get_netrc_auth(url, raise_errors=False): 170 | """Returns the Requests tuple auth for a given url from netrc.""" 171 | 172 | netrc_file = os.environ.get('NETRC') 173 | if netrc_file is not None: 174 | netrc_locations = (netrc_file,) 175 | else: 176 | netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES) 177 | 178 | try: 179 | from netrc import netrc, NetrcParseError 180 | 181 | netrc_path = None 182 | 183 | for f in netrc_locations: 184 | try: 185 | loc = os.path.expanduser(f) 186 | except KeyError: 187 | # os.path.expanduser can fail when $HOME is undefined and 188 | # getpwuid fails. See https://bugs.python.org/issue20164 & 189 | # https://github.com/psf/requests/issues/1846 190 | return 191 | 192 | if os.path.exists(loc): 193 | netrc_path = loc 194 | break 195 | 196 | # Abort early if there isn't one. 197 | if netrc_path is None: 198 | return 199 | 200 | ri = urlparse(url) 201 | 202 | # Strip port numbers from netloc. This weird `if...encode`` dance is 203 | # used for Python 3.2, which doesn't support unicode literals. 204 | splitstr = b':' 205 | if isinstance(url, str): 206 | splitstr = splitstr.decode('ascii') 207 | host = ri.netloc.split(splitstr)[0] 208 | 209 | try: 210 | _netrc = netrc(netrc_path).authenticators(host) 211 | if _netrc: 212 | # Return with login / password 213 | login_i = (0 if _netrc[0] else 1) 214 | return (_netrc[login_i], _netrc[2]) 215 | except (NetrcParseError, IOError): 216 | # If there was a parsing error or a permissions issue reading the file, 217 | # we'll just skip netrc auth unless explicitly asked to raise errors. 218 | if raise_errors: 219 | raise 220 | 221 | # App Engine hackiness. 222 | except (ImportError, AttributeError): 223 | pass 224 | 225 | 226 | def guess_filename(obj): 227 | """Tries to guess the filename of the given object.""" 228 | name = getattr(obj, 'name', None) 229 | if (name and isinstance(name, basestring) and name[0] != '<' and 230 | name[-1] != '>'): 231 | return os.path.basename(name) 232 | 233 | 234 | def extract_zipped_paths(path): 235 | """Replace nonexistent paths that look like they refer to a member of a zip 236 | archive with the location of an extracted copy of the target, or else 237 | just return the provided path unchanged. 238 | """ 239 | if os.path.exists(path): 240 | # this is already a valid path, no need to do anything further 241 | return path 242 | 243 | # find the first valid part of the provided path and treat that as a zip archive 244 | # assume the rest of the path is the name of a member in the archive 245 | archive, member = os.path.split(path) 246 | while archive and not os.path.exists(archive): 247 | archive, prefix = os.path.split(archive) 248 | member = '/'.join([prefix, member]) 249 | 250 | if not zipfile.is_zipfile(archive): 251 | return path 252 | 253 | zip_file = zipfile.ZipFile(archive) 254 | if member not in zip_file.namelist(): 255 | return path 256 | 257 | # we have a valid zip archive and a valid member of that archive 258 | tmp = tempfile.gettempdir() 259 | extracted_path = os.path.join(tmp, *member.split('/')) 260 | if not os.path.exists(extracted_path): 261 | extracted_path = zip_file.extract(member, path=tmp) 262 | 263 | return extracted_path 264 | 265 | 266 | def from_key_val_list(value): 267 | """Take an object and test to see if it can be represented as a 268 | dictionary. Unless it can not be represented as such, return an 269 | OrderedDict, e.g., 270 | 271 | :: 272 | 273 | >>> from_key_val_list([('key', 'val')]) 274 | OrderedDict([('key', 'val')]) 275 | >>> from_key_val_list('string') 276 | Traceback (most recent call last): 277 | ... 278 | ValueError: cannot encode objects that are not 2-tuples 279 | >>> from_key_val_list({'key': 'val'}) 280 | OrderedDict([('key', 'val')]) 281 | 282 | :rtype: OrderedDict 283 | """ 284 | if value is None: 285 | return None 286 | 287 | if isinstance(value, (str, bytes, bool, int)): 288 | raise ValueError('cannot encode objects that are not 2-tuples') 289 | 290 | return OrderedDict(value) 291 | 292 | 293 | def to_key_val_list(value): 294 | """Take an object and test to see if it can be represented as a 295 | dictionary. If it can be, return a list of tuples, e.g., 296 | 297 | :: 298 | 299 | >>> to_key_val_list([('key', 'val')]) 300 | [('key', 'val')] 301 | >>> to_key_val_list({'key': 'val'}) 302 | [('key', 'val')] 303 | >>> to_key_val_list('string') 304 | Traceback (most recent call last): 305 | ... 306 | ValueError: cannot encode objects that are not 2-tuples 307 | 308 | :rtype: list 309 | """ 310 | if value is None: 311 | return None 312 | 313 | if isinstance(value, (str, bytes, bool, int)): 314 | raise ValueError('cannot encode objects that are not 2-tuples') 315 | 316 | if isinstance(value, Mapping): 317 | value = value.items() 318 | 319 | return list(value) 320 | 321 | 322 | # From mitsuhiko/werkzeug (used with permission). 323 | def parse_list_header(value): 324 | """Parse lists as described by RFC 2068 Section 2. 325 | 326 | In particular, parse comma-separated lists where the elements of 327 | the list may include quoted-strings. A quoted-string could 328 | contain a comma. A non-quoted string could have quotes in the 329 | middle. Quotes are removed automatically after parsing. 330 | 331 | It basically works like :func:`parse_set_header` just that items 332 | may appear multiple times and case sensitivity is preserved. 333 | 334 | The return value is a standard :class:`list`: 335 | 336 | >>> parse_list_header('token, "quoted value"') 337 | ['token', 'quoted value'] 338 | 339 | To create a header from the :class:`list` again, use the 340 | :func:`dump_header` function. 341 | 342 | :param value: a string with a list header. 343 | :return: :class:`list` 344 | :rtype: list 345 | """ 346 | result = [] 347 | for item in _parse_list_header(value): 348 | if item[:1] == item[-1:] == '"': 349 | item = unquote_header_value(item[1:-1]) 350 | result.append(item) 351 | return result 352 | 353 | 354 | # From mitsuhiko/werkzeug (used with permission). 355 | def parse_dict_header(value): 356 | """Parse lists of key, value pairs as described by RFC 2068 Section 2 and 357 | convert them into a python dict: 358 | 359 | >>> d = parse_dict_header('foo="is a fish", bar="as well"') 360 | >>> type(d) is dict 361 | True 362 | >>> sorted(d.items()) 363 | [('bar', 'as well'), ('foo', 'is a fish')] 364 | 365 | If there is no value for a key it will be `None`: 366 | 367 | >>> parse_dict_header('key_without_value') 368 | {'key_without_value': None} 369 | 370 | To create a header from the :class:`dict` again, use the 371 | :func:`dump_header` function. 372 | 373 | :param value: a string with a dict header. 374 | :return: :class:`dict` 375 | :rtype: dict 376 | """ 377 | result = {} 378 | for item in _parse_list_header(value): 379 | if '=' not in item: 380 | result[item] = None 381 | continue 382 | name, value = item.split('=', 1) 383 | if value[:1] == value[-1:] == '"': 384 | value = unquote_header_value(value[1:-1]) 385 | result[name] = value 386 | return result 387 | 388 | 389 | # From mitsuhiko/werkzeug (used with permission). 390 | def unquote_header_value(value, is_filename=False): 391 | r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). 392 | This does not use the real unquoting but what browsers are actually 393 | using for quoting. 394 | 395 | :param value: the header value to unquote. 396 | :rtype: str 397 | """ 398 | if value and value[0] == value[-1] == '"': 399 | # this is not the real unquoting, but fixing this so that the 400 | # RFC is met will result in bugs with internet explorer and 401 | # probably some other browsers as well. IE for example is 402 | # uploading files with "C:\foo\bar.txt" as filename 403 | value = value[1:-1] 404 | 405 | # if this is a filename and the starting characters look like 406 | # a UNC path, then just return the value without quotes. Using the 407 | # replace sequence below on a UNC path has the effect of turning 408 | # the leading double slash into a single slash and then 409 | # _fix_ie_filename() doesn't work correctly. See #458. 410 | if not is_filename or value[:2] != '\\\\': 411 | return value.replace('\\\\', '\\').replace('\\"', '"') 412 | return value 413 | 414 | 415 | def dict_from_cookiejar(cj): 416 | """Returns a key/value dictionary from a CookieJar. 417 | 418 | :param cj: CookieJar object to extract cookies from. 419 | :rtype: dict 420 | """ 421 | 422 | cookie_dict = {} 423 | 424 | for cookie in cj: 425 | cookie_dict[cookie.name] = cookie.value 426 | 427 | return cookie_dict 428 | 429 | 430 | def add_dict_to_cookiejar(cj, cookie_dict): 431 | """Returns a CookieJar from a key/value dictionary. 432 | 433 | :param cj: CookieJar to insert cookies into. 434 | :param cookie_dict: Dict of key/values to insert into CookieJar. 435 | :rtype: CookieJar 436 | """ 437 | 438 | return cookiejar_from_dict(cookie_dict, cj) 439 | 440 | 441 | def get_encodings_from_content(content): 442 | """Returns encodings from given content string. 443 | 444 | :param content: bytestring to extract encodings from. 445 | """ 446 | warnings.warn(( 447 | 'In requests 3.0, get_encodings_from_content will be removed. For ' 448 | 'more information, please see the discussion on issue #2266. (This' 449 | ' warning should only appear once.)'), 450 | DeprecationWarning) 451 | 452 | charset_re = re.compile(r']', flags=re.I) 453 | pragma_re = re.compile(r']', flags=re.I) 454 | xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') 455 | 456 | return (charset_re.findall(content) + 457 | pragma_re.findall(content) + 458 | xml_re.findall(content)) 459 | 460 | 461 | def _parse_content_type_header(header): 462 | """Returns content type and parameters from given header 463 | 464 | :param header: string 465 | :return: tuple containing content type and dictionary of 466 | parameters 467 | """ 468 | 469 | tokens = header.split(';') 470 | content_type, params = tokens[0].strip(), tokens[1:] 471 | params_dict = {} 472 | items_to_strip = "\"' " 473 | 474 | for param in params: 475 | param = param.strip() 476 | if param: 477 | key, value = param, True 478 | index_of_equals = param.find("=") 479 | if index_of_equals != -1: 480 | key = param[:index_of_equals].strip(items_to_strip) 481 | value = param[index_of_equals + 1:].strip(items_to_strip) 482 | params_dict[key.lower()] = value 483 | return content_type, params_dict 484 | 485 | 486 | def get_encoding_from_headers(headers): 487 | """Returns encodings from given HTTP Header Dict. 488 | 489 | :param headers: dictionary to extract encoding from. 490 | :rtype: str 491 | """ 492 | 493 | content_type = headers.get('content-type') 494 | 495 | if not content_type: 496 | return None 497 | 498 | content_type, params = _parse_content_type_header(content_type) 499 | 500 | if 'charset' in params: 501 | return params['charset'].strip("'\"") 502 | 503 | if 'text' in content_type: 504 | return 'ISO-8859-1' 505 | 506 | if 'application/json' in content_type: 507 | # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset 508 | return 'utf-8' 509 | 510 | 511 | def stream_decode_response_unicode(iterator, r): 512 | """Stream decodes a iterator.""" 513 | 514 | if r.encoding is None: 515 | for item in iterator: 516 | yield item 517 | return 518 | 519 | decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') 520 | for chunk in iterator: 521 | rv = decoder.decode(chunk) 522 | if rv: 523 | yield rv 524 | rv = decoder.decode(b'', final=True) 525 | if rv: 526 | yield rv 527 | 528 | 529 | def iter_slices(string, slice_length): 530 | """Iterate over slices of a string.""" 531 | pos = 0 532 | if slice_length is None or slice_length <= 0: 533 | slice_length = len(string) 534 | while pos < len(string): 535 | yield string[pos:pos + slice_length] 536 | pos += slice_length 537 | 538 | 539 | def get_unicode_from_response(r): 540 | """Returns the requested content back in unicode. 541 | 542 | :param r: Response object to get unicode content from. 543 | 544 | Tried: 545 | 546 | 1. charset from content-type 547 | 2. fall back and replace all unicode characters 548 | 549 | :rtype: str 550 | """ 551 | warnings.warn(( 552 | 'In requests 3.0, get_unicode_from_response will be removed. For ' 553 | 'more information, please see the discussion on issue #2266. (This' 554 | ' warning should only appear once.)'), 555 | DeprecationWarning) 556 | 557 | tried_encodings = [] 558 | 559 | # Try charset from content-type 560 | encoding = get_encoding_from_headers(r.headers) 561 | 562 | if encoding: 563 | try: 564 | return str(r.content, encoding) 565 | except UnicodeError: 566 | tried_encodings.append(encoding) 567 | 568 | # Fall back: 569 | try: 570 | return str(r.content, encoding, errors='replace') 571 | except TypeError: 572 | return r.content 573 | 574 | 575 | # The unreserved URI characters (RFC 3986) 576 | UNRESERVED_SET = frozenset( 577 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") 578 | 579 | 580 | def unquote_unreserved(uri): 581 | """Un-escape any percent-escape sequences in a URI that are unreserved 582 | characters. This leaves all reserved, illegal and non-ASCII bytes encoded. 583 | 584 | :rtype: str 585 | """ 586 | parts = uri.split('%') 587 | for i in range(1, len(parts)): 588 | h = parts[i][0:2] 589 | if len(h) == 2 and h.isalnum(): 590 | try: 591 | c = chr(int(h, 16)) 592 | except ValueError: 593 | raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) 594 | 595 | if c in UNRESERVED_SET: 596 | parts[i] = c + parts[i][2:] 597 | else: 598 | parts[i] = '%' + parts[i] 599 | else: 600 | parts[i] = '%' + parts[i] 601 | return ''.join(parts) 602 | 603 | 604 | def requote_uri(uri): 605 | """Re-quote the given URI. 606 | 607 | This function passes the given URI through an unquote/quote cycle to 608 | ensure that it is fully and consistently quoted. 609 | 610 | :rtype: str 611 | """ 612 | safe_with_percent = "!#$%&'()*+,/:;=?@[]~" 613 | safe_without_percent = "!#$&'()*+,/:;=?@[]~" 614 | try: 615 | # Unquote only the unreserved characters 616 | # Then quote only illegal characters (do not quote reserved, 617 | # unreserved, or '%') 618 | return quote(unquote_unreserved(uri), safe=safe_with_percent) 619 | except InvalidURL: 620 | # We couldn't unquote the given URI, so let's try quoting it, but 621 | # there may be unquoted '%'s in the URI. We need to make sure they're 622 | # properly quoted so they do not cause issues elsewhere. 623 | return quote(uri, safe=safe_without_percent) 624 | 625 | 626 | def address_in_network(ip, net): 627 | """This function allows you to check if an IP belongs to a network subnet 628 | 629 | Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 630 | returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 631 | 632 | :rtype: bool 633 | """ 634 | ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] 635 | netaddr, bits = net.split('/') 636 | netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] 637 | network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask 638 | return (ipaddr & netmask) == (network & netmask) 639 | 640 | 641 | def dotted_netmask(mask): 642 | """Converts mask from /xx format to xxx.xxx.xxx.xxx 643 | 644 | Example: if mask is 24 function returns 255.255.255.0 645 | 646 | :rtype: str 647 | """ 648 | bits = 0xffffffff ^ (1 << 32 - mask) - 1 649 | return socket.inet_ntoa(struct.pack('>I', bits)) 650 | 651 | 652 | def is_ipv4_address(string_ip): 653 | """ 654 | :rtype: bool 655 | """ 656 | try: 657 | socket.inet_aton(string_ip) 658 | except socket.error: 659 | return False 660 | return True 661 | 662 | 663 | def is_valid_cidr(string_network): 664 | """ 665 | Very simple check of the cidr format in no_proxy variable. 666 | 667 | :rtype: bool 668 | """ 669 | if string_network.count('/') == 1: 670 | try: 671 | mask = int(string_network.split('/')[1]) 672 | except ValueError: 673 | return False 674 | 675 | if mask < 1 or mask > 32: 676 | return False 677 | 678 | try: 679 | socket.inet_aton(string_network.split('/')[0]) 680 | except socket.error: 681 | return False 682 | else: 683 | return False 684 | return True 685 | 686 | 687 | @contextlib.contextmanager 688 | def set_environ(env_name, value): 689 | """Set the environment variable 'env_name' to 'value' 690 | 691 | Save previous value, yield, and then restore the previous value stored in 692 | the environment variable 'env_name'. 693 | 694 | If 'value' is None, do nothing""" 695 | value_changed = value is not None 696 | if value_changed: 697 | old_value = os.environ.get(env_name) 698 | os.environ[env_name] = value 699 | try: 700 | yield 701 | finally: 702 | if value_changed: 703 | if old_value is None: 704 | del os.environ[env_name] 705 | else: 706 | os.environ[env_name] = old_value 707 | 708 | 709 | def should_bypass_proxies(url, no_proxy): 710 | """ 711 | Returns whether we should bypass proxies or not. 712 | 713 | :rtype: bool 714 | """ 715 | # Prioritize lowercase environment variables over uppercase 716 | # to keep a consistent behaviour with other http projects (curl, wget). 717 | get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) 718 | 719 | # First check whether no_proxy is defined. If it is, check that the URL 720 | # we're getting isn't in the no_proxy list. 721 | no_proxy_arg = no_proxy 722 | if no_proxy is None: 723 | no_proxy = get_proxy('no_proxy') 724 | parsed = urlparse(url) 725 | 726 | if parsed.hostname is None: 727 | # URLs don't always have hostnames, e.g. file:/// urls. 728 | return True 729 | 730 | if no_proxy: 731 | # We need to check whether we match here. We need to see if we match 732 | # the end of the hostname, both with and without the port. 733 | no_proxy = ( 734 | host for host in no_proxy.replace(' ', '').split(',') if host 735 | ) 736 | 737 | if is_ipv4_address(parsed.hostname): 738 | for proxy_ip in no_proxy: 739 | if is_valid_cidr(proxy_ip): 740 | if address_in_network(parsed.hostname, proxy_ip): 741 | return True 742 | elif parsed.hostname == proxy_ip: 743 | # If no_proxy ip was defined in plain IP notation instead of cidr notation & 744 | # matches the IP of the index 745 | return True 746 | else: 747 | host_with_port = parsed.hostname 748 | if parsed.port: 749 | host_with_port += ':{}'.format(parsed.port) 750 | 751 | for host in no_proxy: 752 | if parsed.hostname.endswith(host) or host_with_port.endswith(host): 753 | # The URL does match something in no_proxy, so we don't want 754 | # to apply the proxies on this URL. 755 | return True 756 | 757 | with set_environ('no_proxy', no_proxy_arg): 758 | # parsed.hostname can be `None` in cases such as a file URI. 759 | try: 760 | bypass = proxy_bypass(parsed.hostname) 761 | except (TypeError, socket.gaierror): 762 | bypass = False 763 | 764 | if bypass: 765 | return True 766 | 767 | return False 768 | 769 | 770 | def get_environ_proxies(url, no_proxy=None): 771 | """ 772 | Return a dict of environment proxies. 773 | 774 | :rtype: dict 775 | """ 776 | if should_bypass_proxies(url, no_proxy=no_proxy): 777 | return {} 778 | else: 779 | return getproxies() 780 | 781 | 782 | def select_proxy(url, proxies): 783 | """Select a proxy for the url, if applicable. 784 | 785 | :param url: The url being for the request 786 | :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs 787 | """ 788 | proxies = proxies or {} 789 | urlparts = urlparse(url) 790 | if urlparts.hostname is None: 791 | return proxies.get(urlparts.scheme, proxies.get('all')) 792 | 793 | proxy_keys = [ 794 | urlparts.scheme + '://' + urlparts.hostname, 795 | urlparts.scheme, 796 | 'all://' + urlparts.hostname, 797 | 'all', 798 | ] 799 | proxy = None 800 | for proxy_key in proxy_keys: 801 | if proxy_key in proxies: 802 | proxy = proxies[proxy_key] 803 | break 804 | 805 | return proxy 806 | 807 | 808 | def default_user_agent(name="python-requests"): 809 | """ 810 | Return a string representing the default user agent. 811 | 812 | :rtype: str 813 | """ 814 | return '%s/%s' % (name, __version__) 815 | 816 | 817 | def default_headers(): 818 | """ 819 | :rtype: requests.structures.CaseInsensitiveDict 820 | """ 821 | return CaseInsensitiveDict({ 822 | 'User-Agent': default_user_agent(), 823 | 'Accept-Encoding': ', '.join(('gzip', 'deflate')), 824 | 'Accept': '*/*', 825 | 'Connection': 'keep-alive', 826 | }) 827 | 828 | 829 | def parse_header_links(value): 830 | """Return a list of parsed link headers proxies. 831 | 832 | i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" 833 | 834 | :rtype: list 835 | """ 836 | 837 | links = [] 838 | 839 | replace_chars = ' \'"' 840 | 841 | value = value.strip(replace_chars) 842 | if not value: 843 | return links 844 | 845 | for val in re.split(', *<', value): 846 | try: 847 | url, params = val.split(';', 1) 848 | except ValueError: 849 | url, params = val, '' 850 | 851 | link = {'url': url.strip('<> \'"')} 852 | 853 | for param in params.split(';'): 854 | try: 855 | key, value = param.split('=') 856 | except ValueError: 857 | break 858 | 859 | link[key.strip(replace_chars)] = value.strip(replace_chars) 860 | 861 | links.append(link) 862 | 863 | return links 864 | 865 | 866 | # Null bytes; no need to recreate these on each call to guess_json_utf 867 | _null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 868 | _null2 = _null * 2 869 | _null3 = _null * 3 870 | 871 | 872 | def guess_json_utf(data): 873 | """ 874 | :rtype: str 875 | """ 876 | # JSON always starts with two ASCII characters, so detection is as 877 | # easy as counting the nulls and from their location and count 878 | # determine the encoding. Also detect a BOM, if present. 879 | sample = data[:4] 880 | if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): 881 | return 'utf-32' # BOM included 882 | if sample[:3] == codecs.BOM_UTF8: 883 | return 'utf-8-sig' # BOM included, MS style (discouraged) 884 | if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): 885 | return 'utf-16' # BOM included 886 | nullcount = sample.count(_null) 887 | if nullcount == 0: 888 | return 'utf-8' 889 | if nullcount == 2: 890 | if sample[::2] == _null2: # 1st and 3rd are null 891 | return 'utf-16-be' 892 | if sample[1::2] == _null2: # 2nd and 4th are null 893 | return 'utf-16-le' 894 | # Did not detect 2 valid UTF-16 ascii-range characters 895 | if nullcount == 3: 896 | if sample[:3] == _null3: 897 | return 'utf-32-be' 898 | if sample[1:] == _null3: 899 | return 'utf-32-le' 900 | # Did not detect a valid UTF-32 ascii-range character 901 | return None 902 | 903 | 904 | def prepend_scheme_if_needed(url, new_scheme): 905 | """Given a URL that may or may not have a scheme, prepend the given scheme. 906 | Does not replace a present scheme with the one provided as an argument. 907 | 908 | :rtype: str 909 | """ 910 | scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) 911 | 912 | # urlparse is a finicky beast, and sometimes decides that there isn't a 913 | # netloc present. Assume that it's being over-cautious, and switch netloc 914 | # and path if urlparse decided there was no netloc. 915 | if not netloc: 916 | netloc, path = path, netloc 917 | 918 | return urlunparse((scheme, netloc, path, params, query, fragment)) 919 | 920 | 921 | def get_auth_from_url(url): 922 | """Given a url with authentication components, extract them into a tuple of 923 | username,password. 924 | 925 | :rtype: (str,str) 926 | """ 927 | parsed = urlparse(url) 928 | 929 | try: 930 | auth = (unquote(parsed.username), unquote(parsed.password)) 931 | except (AttributeError, TypeError): 932 | auth = ('', '') 933 | 934 | return auth 935 | 936 | 937 | # Moved outside of function to avoid recompile every call 938 | _CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') 939 | _CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') 940 | 941 | 942 | def check_header_validity(header): 943 | """Verifies that header value is a string which doesn't contain 944 | leading whitespace or return characters. This prevents unintended 945 | header injection. 946 | 947 | :param header: tuple, in the format (name, value). 948 | """ 949 | name, value = header 950 | 951 | if isinstance(value, bytes): 952 | pat = _CLEAN_HEADER_REGEX_BYTE 953 | else: 954 | pat = _CLEAN_HEADER_REGEX_STR 955 | try: 956 | if not pat.match(value): 957 | raise InvalidHeader("Invalid return character or leading space in header: %s" % name) 958 | except TypeError: 959 | raise InvalidHeader("Value for header {%s: %s} must be of type str or " 960 | "bytes, not %s" % (name, value, type(value))) 961 | 962 | 963 | def urldefragauth(url): 964 | """ 965 | Given a url remove the fragment and the authentication part. 966 | 967 | :rtype: str 968 | """ 969 | scheme, netloc, path, params, query, fragment = urlparse(url) 970 | 971 | # see func:`prepend_scheme_if_needed` 972 | if not netloc: 973 | netloc, path = path, netloc 974 | 975 | netloc = netloc.rsplit('@', 1)[-1] 976 | 977 | return urlunparse((scheme, netloc, path, params, query, '')) 978 | 979 | 980 | def rewind_body(prepared_request): 981 | """Move file pointer back to its recorded starting position 982 | so it can be read again on redirect. 983 | """ 984 | body_seek = getattr(prepared_request.body, 'seek', None) 985 | if body_seek is not None and isinstance(prepared_request._body_position, integer_types): 986 | try: 987 | body_seek(prepared_request._body_position) 988 | except (IOError, OSError): 989 | raise UnrewindableBodyError("An error occurred when rewinding request " 990 | "body for redirect.") 991 | else: 992 | raise UnrewindableBodyError("Unable to rewind request body for redirect.") 993 | -------------------------------------------------------------------------------- /requests/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.models 5 | ~~~~~~~~~~~~~~~ 6 | 7 | This module contains the primary objects that power Requests. 8 | """ 9 | 10 | import datetime 11 | import sys 12 | 13 | # Import encoding now, to avoid implicit import later. 14 | # Implicit import within threads may cause LookupError when standard library is in a ZIP, 15 | # such as in Embedded Python. See https://github.com/psf/requests/issues/3578. 16 | import encodings.idna 17 | 18 | from urllib3.fields import RequestField 19 | from urllib3.filepost import encode_multipart_formdata 20 | from urllib3.util import parse_url 21 | from urllib3.exceptions import ( 22 | DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) 23 | 24 | from io import UnsupportedOperation 25 | from .hooks import default_hooks 26 | from .structures import CaseInsensitiveDict 27 | 28 | from .auth import HTTPBasicAuth 29 | from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar 30 | from .exceptions import ( 31 | HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, 32 | ContentDecodingError, ConnectionError, StreamConsumedError) 33 | from ._internal_utils import to_native_string, unicode_is_ascii 34 | from .utils import ( 35 | guess_filename, get_auth_from_url, requote_uri, 36 | stream_decode_response_unicode, to_key_val_list, parse_header_links, 37 | iter_slices, guess_json_utf, super_len, check_header_validity) 38 | from .compat import ( 39 | Callable, Mapping, 40 | cookielib, urlunparse, urlsplit, urlencode, str, bytes, 41 | is_py2, chardet, builtin_str, basestring) 42 | from .compat import json as complexjson 43 | from .status_codes import codes 44 | 45 | #: The set of HTTP status codes that indicate an automatically 46 | #: processable redirect. 47 | REDIRECT_STATI = ( 48 | codes.moved, # 301 49 | codes.found, # 302 50 | codes.other, # 303 51 | codes.temporary_redirect, # 307 52 | codes.permanent_redirect, # 308 53 | ) 54 | 55 | DEFAULT_REDIRECT_LIMIT = 30 56 | CONTENT_CHUNK_SIZE = 10 * 1024 57 | ITER_CHUNK_SIZE = 512 58 | 59 | 60 | class RequestEncodingMixin(object): 61 | @property 62 | def path_url(self): 63 | """Build the path URL to use.""" 64 | 65 | url = [] 66 | 67 | p = urlsplit(self.url) 68 | 69 | path = p.path 70 | if not path: 71 | path = '/' 72 | 73 | url.append(path) 74 | 75 | query = p.query 76 | if query: 77 | url.append('?') 78 | url.append(query) 79 | 80 | return ''.join(url) 81 | 82 | @staticmethod 83 | def _encode_params(data): 84 | """Encode parameters in a piece of data. 85 | 86 | Will successfully encode parameters when passed as a dict or a list of 87 | 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary 88 | if parameters are supplied as a dict. 89 | """ 90 | 91 | if isinstance(data, (str, bytes)): 92 | return data 93 | elif hasattr(data, 'read'): 94 | return data 95 | elif hasattr(data, '__iter__'): 96 | result = [] 97 | for k, vs in to_key_val_list(data): 98 | if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): 99 | vs = [vs] 100 | for v in vs: 101 | if v is not None: 102 | result.append( 103 | (k.encode('utf-8') if isinstance(k, str) else k, 104 | v.encode('utf-8') if isinstance(v, str) else v)) 105 | return urlencode(result, doseq=True) 106 | else: 107 | return data 108 | 109 | @staticmethod 110 | def _encode_files(files, data): 111 | """Build the body for a multipart/form-data request. 112 | 113 | Will successfully encode files when passed as a dict or a list of 114 | tuples. Order is retained if data is a list of tuples but arbitrary 115 | if parameters are supplied as a dict. 116 | The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) 117 | or 4-tuples (filename, fileobj, contentype, custom_headers). 118 | """ 119 | if (not files): 120 | raise ValueError("Files must be provided.") 121 | elif isinstance(data, basestring): 122 | raise ValueError("Data must not be a string.") 123 | 124 | new_fields = [] 125 | fields = to_key_val_list(data or {}) 126 | files = to_key_val_list(files or {}) 127 | 128 | for field, val in fields: 129 | if isinstance(val, basestring) or not hasattr(val, '__iter__'): 130 | val = [val] 131 | for v in val: 132 | if v is not None: 133 | # Don't call str() on bytestrings: in Py3 it all goes wrong. 134 | if not isinstance(v, bytes): 135 | v = str(v) 136 | 137 | new_fields.append( 138 | (field.decode('utf-8') if isinstance(field, bytes) else field, 139 | v.encode('utf-8') if isinstance(v, str) else v)) 140 | 141 | for (k, v) in files: 142 | # support for explicit filename 143 | ft = None 144 | fh = None 145 | if isinstance(v, (tuple, list)): 146 | if len(v) == 2: 147 | fn, fp = v 148 | elif len(v) == 3: 149 | fn, fp, ft = v 150 | else: 151 | fn, fp, ft, fh = v 152 | else: 153 | fn = guess_filename(v) or k 154 | fp = v 155 | 156 | if isinstance(fp, (str, bytes, bytearray)): 157 | fdata = fp 158 | elif hasattr(fp, 'read'): 159 | fdata = fp.read() 160 | elif fp is None: 161 | continue 162 | else: 163 | fdata = fp 164 | 165 | rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) 166 | rf.make_multipart(content_type=ft) 167 | new_fields.append(rf) 168 | 169 | body, content_type = encode_multipart_formdata(new_fields) 170 | 171 | return body, content_type 172 | 173 | 174 | class RequestHooksMixin(object): 175 | def register_hook(self, event, hook): 176 | """Properly register a hook.""" 177 | 178 | if event not in self.hooks: 179 | raise ValueError('Unsupported event specified, with event name "%s"' % (event)) 180 | 181 | if isinstance(hook, Callable): 182 | self.hooks[event].append(hook) 183 | elif hasattr(hook, '__iter__'): 184 | self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) 185 | 186 | def deregister_hook(self, event, hook): 187 | """Deregister a previously registered hook. 188 | Returns True if the hook existed, False if not. 189 | """ 190 | 191 | try: 192 | self.hooks[event].remove(hook) 193 | return True 194 | except ValueError: 195 | return False 196 | 197 | 198 | class Request(RequestHooksMixin): 199 | """A user-created :class:`Request ` object. 200 | 201 | Used to prepare a :class:`PreparedRequest `, which is sent to the server. 202 | 203 | :param method: HTTP method to use. 204 | :param url: URL to send. 205 | :param headers: dictionary of headers to send. 206 | :param files: dictionary of {filename: fileobject} files to multipart upload. 207 | :param data: the body to attach to the request. If a dictionary or 208 | list of tuples ``[(key, value)]`` is provided, form-encoding will 209 | take place. 210 | :param json: json for the body to attach to the request (if files or data is not specified). 211 | :param params: URL parameters to append to the URL. If a dictionary or 212 | list of tuples ``[(key, value)]`` is provided, form-encoding will 213 | take place. 214 | :param auth: Auth handler or (user, pass) tuple. 215 | :param cookies: dictionary or CookieJar of cookies to attach to this request. 216 | :param hooks: dictionary of callback hooks, for internal usage. 217 | 218 | Usage:: 219 | 220 | >>> import requests 221 | >>> req = requests.Request('GET', 'https://httpbin.org/get') 222 | >>> req.prepare() 223 | 224 | """ 225 | 226 | def __init__(self, 227 | method=None, url=None, headers=None, files=None, data=None, 228 | params=None, auth=None, cookies=None, hooks=None, json=None): 229 | 230 | # Default empty dicts for dict params. 231 | data = [] if data is None else data 232 | files = [] if files is None else files 233 | headers = {} if headers is None else headers 234 | params = {} if params is None else params 235 | hooks = {} if hooks is None else hooks 236 | 237 | self.hooks = default_hooks() 238 | for (k, v) in list(hooks.items()): 239 | self.register_hook(event=k, hook=v) 240 | 241 | self.method = method 242 | self.url = url 243 | self.headers = headers 244 | self.files = files 245 | self.data = data 246 | self.json = json 247 | self.params = params 248 | self.auth = auth 249 | self.cookies = cookies 250 | 251 | def __repr__(self): 252 | return '' % (self.method) 253 | 254 | def prepare(self): 255 | """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" 256 | p = PreparedRequest() 257 | p.prepare( 258 | method=self.method, 259 | url=self.url, 260 | headers=self.headers, 261 | files=self.files, 262 | data=self.data, 263 | json=self.json, 264 | params=self.params, 265 | auth=self.auth, 266 | cookies=self.cookies, 267 | hooks=self.hooks, 268 | ) 269 | return p 270 | 271 | 272 | class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): 273 | """The fully mutable :class:`PreparedRequest ` object, 274 | containing the exact bytes that will be sent to the server. 275 | 276 | Instances are generated from a :class:`Request ` object, and 277 | should not be instantiated manually; doing so may produce undesirable 278 | effects. 279 | 280 | Usage:: 281 | 282 | >>> import requests 283 | >>> req = requests.Request('GET', 'https://httpbin.org/get') 284 | >>> r = req.prepare() 285 | >>> r 286 | 287 | 288 | >>> s = requests.Session() 289 | >>> s.send(r) 290 | 291 | """ 292 | 293 | def __init__(self): 294 | #: HTTP verb to send to the server. 295 | self.method = None 296 | #: HTTP URL to send the request to. 297 | self.url = None 298 | #: dictionary of HTTP headers. 299 | self.headers = None 300 | # The `CookieJar` used to create the Cookie header will be stored here 301 | # after prepare_cookies is called 302 | self._cookies = None 303 | #: request body to send to the server. 304 | self.body = None 305 | #: dictionary of callback hooks, for internal usage. 306 | self.hooks = default_hooks() 307 | #: integer denoting starting position of a readable file-like body. 308 | self._body_position = None 309 | 310 | def prepare(self, 311 | method=None, url=None, headers=None, files=None, data=None, 312 | params=None, auth=None, cookies=None, hooks=None, json=None): 313 | """Prepares the entire request with the given parameters.""" 314 | 315 | self.prepare_method(method) 316 | self.prepare_url(url, params) 317 | self.prepare_headers(headers) 318 | self.prepare_cookies(cookies) 319 | self.prepare_body(data, files, json) 320 | self.prepare_auth(auth, url) 321 | 322 | # Note that prepare_auth must be last to enable authentication schemes 323 | # such as OAuth to work on a fully prepared request. 324 | 325 | # This MUST go after prepare_auth. Authenticators could add a hook 326 | self.prepare_hooks(hooks) 327 | 328 | def __repr__(self): 329 | return '' % (self.method) 330 | 331 | def copy(self): 332 | p = PreparedRequest() 333 | p.method = self.method 334 | p.url = self.url 335 | p.headers = self.headers.copy() if self.headers is not None else None 336 | p._cookies = _copy_cookie_jar(self._cookies) 337 | p.body = self.body 338 | p.hooks = self.hooks 339 | p._body_position = self._body_position 340 | return p 341 | 342 | def prepare_method(self, method): 343 | """Prepares the given HTTP method.""" 344 | self.method = method 345 | if self.method is not None: 346 | self.method = to_native_string(self.method.upper()) 347 | 348 | @staticmethod 349 | def _get_idna_encoded_host(host): 350 | import idna 351 | 352 | try: 353 | host = idna.encode(host, uts46=True).decode('utf-8') 354 | except idna.IDNAError: 355 | raise UnicodeError 356 | return host 357 | 358 | def prepare_url(self, url, params): 359 | """Prepares the given HTTP URL.""" 360 | #: Accept objects that have string representations. 361 | #: We're unable to blindly call unicode/str functions 362 | #: as this will include the bytestring indicator (b'') 363 | #: on python 3.x. 364 | #: https://github.com/psf/requests/pull/2238 365 | if isinstance(url, bytes): 366 | url = url.decode('utf8') 367 | else: 368 | url = unicode(url) if is_py2 else str(url) 369 | 370 | # Remove leading whitespaces from url 371 | url = url.lstrip() 372 | 373 | # Don't do any URL preparation for non-HTTP schemes like `mailto`, 374 | # `data` etc to work around exceptions from `url_parse`, which 375 | # handles RFC 3986 only. 376 | if ':' in url and not url.lower().startswith('http'): 377 | self.url = url 378 | return 379 | 380 | # Support for unicode domain names and paths. 381 | try: 382 | scheme, auth, host, port, path, query, fragment = parse_url(url) 383 | except LocationParseError as e: 384 | raise InvalidURL(*e.args) 385 | 386 | if not scheme: 387 | error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") 388 | error = error.format(to_native_string(url, 'utf8')) 389 | 390 | raise MissingSchema(error) 391 | 392 | if not host: 393 | raise InvalidURL("Invalid URL %r: No host supplied" % url) 394 | 395 | # In general, we want to try IDNA encoding the hostname if the string contains 396 | # non-ASCII characters. This allows users to automatically get the correct IDNA 397 | # behaviour. For strings containing only ASCII characters, we need to also verify 398 | # it doesn't start with a wildcard (*), before allowing the unencoded hostname. 399 | if not unicode_is_ascii(host): 400 | try: 401 | host = self._get_idna_encoded_host(host) 402 | except UnicodeError: 403 | raise InvalidURL('URL has an invalid label.') 404 | elif host.startswith(u'*'): 405 | raise InvalidURL('URL has an invalid label.') 406 | 407 | # Carefully reconstruct the network location 408 | netloc = auth or '' 409 | if netloc: 410 | netloc += '@' 411 | netloc += host 412 | if port: 413 | netloc += ':' + str(port) 414 | 415 | # Bare domains aren't valid URLs. 416 | if not path: 417 | path = '/' 418 | 419 | if is_py2: 420 | if isinstance(scheme, str): 421 | scheme = scheme.encode('utf-8') 422 | if isinstance(netloc, str): 423 | netloc = netloc.encode('utf-8') 424 | if isinstance(path, str): 425 | path = path.encode('utf-8') 426 | if isinstance(query, str): 427 | query = query.encode('utf-8') 428 | if isinstance(fragment, str): 429 | fragment = fragment.encode('utf-8') 430 | 431 | if isinstance(params, (str, bytes)): 432 | params = to_native_string(params) 433 | 434 | enc_params = self._encode_params(params) 435 | if enc_params: 436 | if query: 437 | query = '%s&%s' % (query, enc_params) 438 | else: 439 | query = enc_params 440 | 441 | url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) 442 | self.url = url 443 | 444 | def prepare_headers(self, headers): 445 | """Prepares the given HTTP headers.""" 446 | 447 | self.headers = CaseInsensitiveDict() 448 | if headers: 449 | for header in headers.items(): 450 | # Raise exception on invalid header value. 451 | check_header_validity(header) 452 | name, value = header 453 | self.headers[to_native_string(name)] = value 454 | 455 | def prepare_body(self, data, files, json=None): 456 | """Prepares the given HTTP body data.""" 457 | 458 | # Check if file, fo, generator, iterator. 459 | # If not, run through normal process. 460 | 461 | # Nottin' on you. 462 | body = None 463 | content_type = None 464 | 465 | if not data and json is not None: 466 | # urllib3 requires a bytes-like body. Python 2's json.dumps 467 | # provides this natively, but Python 3 gives a Unicode string. 468 | content_type = 'application/json' 469 | body = complexjson.dumps(json) 470 | if not isinstance(body, bytes): 471 | body = body.encode('utf-8') 472 | 473 | is_stream = all([ 474 | hasattr(data, '__iter__'), 475 | not isinstance(data, (basestring, list, tuple, Mapping)) 476 | ]) 477 | 478 | if is_stream: 479 | try: 480 | length = super_len(data) 481 | except (TypeError, AttributeError, UnsupportedOperation): 482 | length = None 483 | 484 | body = data 485 | 486 | if getattr(body, 'tell', None) is not None: 487 | # Record the current file position before reading. 488 | # This will allow us to rewind a file in the event 489 | # of a redirect. 490 | try: 491 | self._body_position = body.tell() 492 | except (IOError, OSError): 493 | # This differentiates from None, allowing us to catch 494 | # a failed `tell()` later when trying to rewind the body 495 | self._body_position = object() 496 | 497 | if files: 498 | raise NotImplementedError('Streamed bodies and files are mutually exclusive.') 499 | 500 | if length: 501 | self.headers['Content-Length'] = builtin_str(length) 502 | else: 503 | self.headers['Transfer-Encoding'] = 'chunked' 504 | else: 505 | # Multi-part file uploads. 506 | if files: 507 | (body, content_type) = self._encode_files(files, data) 508 | else: 509 | if data: 510 | body = self._encode_params(data) 511 | if isinstance(data, basestring) or hasattr(data, 'read'): 512 | content_type = None 513 | else: 514 | content_type = 'application/x-www-form-urlencoded' 515 | 516 | self.prepare_content_length(body) 517 | 518 | # Add content-type if it wasn't explicitly provided. 519 | if content_type and ('content-type' not in self.headers): 520 | self.headers['Content-Type'] = content_type 521 | 522 | self.body = body 523 | 524 | def prepare_content_length(self, body): 525 | """Prepare Content-Length header based on request method and body""" 526 | if body is not None: 527 | length = super_len(body) 528 | if length: 529 | # If length exists, set it. Otherwise, we fallback 530 | # to Transfer-Encoding: chunked. 531 | self.headers['Content-Length'] = builtin_str(length) 532 | elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: 533 | # Set Content-Length to 0 for methods that can have a body 534 | # but don't provide one. (i.e. not GET or HEAD) 535 | self.headers['Content-Length'] = '0' 536 | 537 | def prepare_auth(self, auth, url=''): 538 | """Prepares the given HTTP auth data.""" 539 | 540 | # If no Auth is explicitly provided, extract it from the URL first. 541 | if auth is None: 542 | url_auth = get_auth_from_url(self.url) 543 | auth = url_auth if any(url_auth) else None 544 | 545 | if auth: 546 | if isinstance(auth, tuple) and len(auth) == 2: 547 | # special-case basic HTTP auth 548 | auth = HTTPBasicAuth(*auth) 549 | 550 | # Allow auth to make its changes. 551 | r = auth(self) 552 | 553 | # Update self to reflect the auth changes. 554 | self.__dict__.update(r.__dict__) 555 | 556 | # Recompute Content-Length 557 | self.prepare_content_length(self.body) 558 | 559 | def prepare_cookies(self, cookies): 560 | """Prepares the given HTTP cookie data. 561 | 562 | This function eventually generates a ``Cookie`` header from the 563 | given cookies using cookielib. Due to cookielib's design, the header 564 | will not be regenerated if it already exists, meaning this function 565 | can only be called once for the life of the 566 | :class:`PreparedRequest ` object. Any subsequent calls 567 | to ``prepare_cookies`` will have no actual effect, unless the "Cookie" 568 | header is removed beforehand. 569 | """ 570 | if isinstance(cookies, cookielib.CookieJar): 571 | self._cookies = cookies 572 | else: 573 | self._cookies = cookiejar_from_dict(cookies) 574 | 575 | cookie_header = get_cookie_header(self._cookies, self) 576 | if cookie_header is not None: 577 | self.headers['Cookie'] = cookie_header 578 | 579 | def prepare_hooks(self, hooks): 580 | """Prepares the given hooks.""" 581 | # hooks can be passed as None to the prepare method and to this 582 | # method. To prevent iterating over None, simply use an empty list 583 | # if hooks is False-y 584 | hooks = hooks or [] 585 | for event in hooks: 586 | self.register_hook(event, hooks[event]) 587 | 588 | 589 | class Response(object): 590 | """The :class:`Response ` object, which contains a 591 | server's response to an HTTP request. 592 | """ 593 | 594 | __attrs__ = [ 595 | '_content', 'status_code', 'headers', 'url', 'history', 596 | 'encoding', 'reason', 'cookies', 'elapsed', 'request' 597 | ] 598 | 599 | def __init__(self): 600 | self._content = False 601 | self._content_consumed = False 602 | self._next = None 603 | 604 | #: Integer Code of responded HTTP Status, e.g. 404 or 200. 605 | self.status_code = None 606 | 607 | #: Case-insensitive Dictionary of Response Headers. 608 | #: For example, ``headers['content-encoding']`` will return the 609 | #: value of a ``'Content-Encoding'`` response header. 610 | self.headers = CaseInsensitiveDict() 611 | 612 | #: File-like object representation of response (for advanced usage). 613 | #: Use of ``raw`` requires that ``stream=True`` be set on the request. 614 | #: This requirement does not apply for use internally to Requests. 615 | self.raw = None 616 | 617 | #: Final URL location of Response. 618 | self.url = None 619 | 620 | #: Encoding to decode with when accessing r.text. 621 | self.encoding = None 622 | 623 | #: A list of :class:`Response ` objects from 624 | #: the history of the Request. Any redirect responses will end 625 | #: up here. The list is sorted from the oldest to the most recent request. 626 | self.history = [] 627 | 628 | #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". 629 | self.reason = None 630 | 631 | #: A CookieJar of Cookies the server sent back. 632 | self.cookies = cookiejar_from_dict({}) 633 | 634 | #: The amount of time elapsed between sending the request 635 | #: and the arrival of the response (as a timedelta). 636 | #: This property specifically measures the time taken between sending 637 | #: the first byte of the request and finishing parsing the headers. It 638 | #: is therefore unaffected by consuming the response content or the 639 | #: value of the ``stream`` keyword argument. 640 | self.elapsed = datetime.timedelta(0) 641 | 642 | #: The :class:`PreparedRequest ` object to which this 643 | #: is a response. 644 | self.request = None 645 | 646 | def __enter__(self): 647 | return self 648 | 649 | def __exit__(self, *args): 650 | self.close() 651 | 652 | def __getstate__(self): 653 | # Consume everything; accessing the content attribute makes 654 | # sure the content has been fully read. 655 | if not self._content_consumed: 656 | self.content 657 | 658 | return {attr: getattr(self, attr, None) for attr in self.__attrs__} 659 | 660 | def __setstate__(self, state): 661 | for name, value in state.items(): 662 | setattr(self, name, value) 663 | 664 | # pickled objects do not have .raw 665 | setattr(self, '_content_consumed', True) 666 | setattr(self, 'raw', None) 667 | 668 | def __repr__(self): 669 | return '' % (self.status_code) 670 | 671 | def __bool__(self): 672 | """Returns True if :attr:`status_code` is less than 400. 673 | 674 | This attribute checks if the status code of the response is between 675 | 400 and 600 to see if there was a client error or a server error. If 676 | the status code, is between 200 and 400, this will return True. This 677 | is **not** a check to see if the response code is ``200 OK``. 678 | """ 679 | return self.ok 680 | 681 | def __nonzero__(self): 682 | """Returns True if :attr:`status_code` is less than 400. 683 | 684 | This attribute checks if the status code of the response is between 685 | 400 and 600 to see if there was a client error or a server error. If 686 | the status code, is between 200 and 400, this will return True. This 687 | is **not** a check to see if the response code is ``200 OK``. 688 | """ 689 | return self.ok 690 | 691 | def __iter__(self): 692 | """Allows you to use a response as an iterator.""" 693 | return self.iter_content(128) 694 | 695 | @property 696 | def ok(self): 697 | """Returns True if :attr:`status_code` is less than 400, False if not. 698 | 699 | This attribute checks if the status code of the response is between 700 | 400 and 600 to see if there was a client error or a server error. If 701 | the status code is between 200 and 400, this will return True. This 702 | is **not** a check to see if the response code is ``200 OK``. 703 | """ 704 | try: 705 | self.raise_for_status() 706 | except HTTPError: 707 | return False 708 | return True 709 | 710 | @property 711 | def is_redirect(self): 712 | """True if this Response is a well-formed HTTP redirect that could have 713 | been processed automatically (by :meth:`Session.resolve_redirects`). 714 | """ 715 | return ('location' in self.headers and self.status_code in REDIRECT_STATI) 716 | 717 | @property 718 | def is_permanent_redirect(self): 719 | """True if this Response one of the permanent versions of redirect.""" 720 | return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) 721 | 722 | @property 723 | def next(self): 724 | """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" 725 | return self._next 726 | 727 | @property 728 | def apparent_encoding(self): 729 | """The apparent encoding, provided by the chardet library.""" 730 | return chardet.detect(self.content)['encoding'] 731 | 732 | def iter_content(self, chunk_size=1, decode_unicode=False): 733 | """Iterates over the response data. When stream=True is set on the 734 | request, this avoids reading the content at once into memory for 735 | large responses. The chunk size is the number of bytes it should 736 | read into memory. This is not necessarily the length of each item 737 | returned as decoding can take place. 738 | 739 | chunk_size must be of type int or None. A value of None will 740 | function differently depending on the value of `stream`. 741 | stream=True will read data as it arrives in whatever size the 742 | chunks are received. If stream=False, data is returned as 743 | a single chunk. 744 | 745 | If decode_unicode is True, content will be decoded using the best 746 | available encoding based on the response. 747 | """ 748 | 749 | def generate(): 750 | # Special case for urllib3. 751 | if hasattr(self.raw, 'stream'): 752 | try: 753 | for chunk in self.raw.stream(chunk_size, decode_content=True): 754 | yield chunk 755 | except ProtocolError as e: 756 | raise ChunkedEncodingError(e) 757 | except DecodeError as e: 758 | raise ContentDecodingError(e) 759 | except ReadTimeoutError as e: 760 | raise ConnectionError(e) 761 | else: 762 | # Standard file-like object. 763 | while True: 764 | chunk = self.raw.read(chunk_size) 765 | if not chunk: 766 | break 767 | yield chunk 768 | 769 | self._content_consumed = True 770 | 771 | if self._content_consumed and isinstance(self._content, bool): 772 | raise StreamConsumedError() 773 | elif chunk_size is not None and not isinstance(chunk_size, int): 774 | raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) 775 | # simulate reading small chunks of the content 776 | reused_chunks = iter_slices(self._content, chunk_size) 777 | 778 | stream_chunks = generate() 779 | 780 | chunks = reused_chunks if self._content_consumed else stream_chunks 781 | 782 | if decode_unicode: 783 | chunks = stream_decode_response_unicode(chunks, self) 784 | 785 | return chunks 786 | 787 | def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): 788 | """Iterates over the response data, one line at a time. When 789 | stream=True is set on the request, this avoids reading the 790 | content at once into memory for large responses. 791 | 792 | .. note:: This method is not reentrant safe. 793 | """ 794 | 795 | pending = None 796 | 797 | for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): 798 | 799 | if pending is not None: 800 | chunk = pending + chunk 801 | 802 | if delimiter: 803 | lines = chunk.split(delimiter) 804 | else: 805 | lines = chunk.splitlines() 806 | 807 | if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: 808 | pending = lines.pop() 809 | else: 810 | pending = None 811 | 812 | for line in lines: 813 | yield line 814 | 815 | if pending is not None: 816 | yield pending 817 | 818 | @property 819 | def content(self): 820 | """Content of the response, in bytes.""" 821 | 822 | if self._content is False: 823 | # Read the contents. 824 | if self._content_consumed: 825 | raise RuntimeError( 826 | 'The content for this response was already consumed') 827 | 828 | if self.status_code == 0 or self.raw is None: 829 | self._content = None 830 | else: 831 | self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' 832 | 833 | self._content_consumed = True 834 | # don't need to release the connection; that's been handled by urllib3 835 | # since we exhausted the data. 836 | return self._content 837 | 838 | @property 839 | def text(self): 840 | """Content of the response, in unicode. 841 | 842 | If Response.encoding is None, encoding will be guessed using 843 | ``chardet``. 844 | 845 | The encoding of the response content is determined based solely on HTTP 846 | headers, following RFC 2616 to the letter. If you can take advantage of 847 | non-HTTP knowledge to make a better guess at the encoding, you should 848 | set ``r.encoding`` appropriately before accessing this property. 849 | """ 850 | 851 | # Try charset from content-type 852 | content = None 853 | encoding = self.encoding 854 | 855 | if not self.content: 856 | return str('') 857 | 858 | # Fallback to auto-detected encoding. 859 | if self.encoding is None: 860 | encoding = self.apparent_encoding 861 | 862 | # Decode unicode from given encoding. 863 | try: 864 | content = str(self.content, encoding, errors='replace') 865 | except (LookupError, TypeError): 866 | # A LookupError is raised if the encoding was not found which could 867 | # indicate a misspelling or similar mistake. 868 | # 869 | # A TypeError can be raised if encoding is None 870 | # 871 | # So we try blindly encoding. 872 | content = str(self.content, errors='replace') 873 | 874 | return content 875 | 876 | def json(self, **kwargs): 877 | r"""Returns the json-encoded content of a response, if any. 878 | 879 | :param \*\*kwargs: Optional arguments that ``json.loads`` takes. 880 | :raises ValueError: If the response body does not contain valid json. 881 | """ 882 | 883 | if not self.encoding and self.content and len(self.content) > 3: 884 | # No encoding set. JSON RFC 4627 section 3 states we should expect 885 | # UTF-8, -16 or -32. Detect which one to use; If the detection or 886 | # decoding fails, fall back to `self.text` (using chardet to make 887 | # a best guess). 888 | encoding = guess_json_utf(self.content) 889 | if encoding is not None: 890 | try: 891 | return complexjson.loads( 892 | self.content.decode(encoding), **kwargs 893 | ) 894 | except UnicodeDecodeError: 895 | # Wrong UTF codec detected; usually because it's not UTF-8 896 | # but some other 8-bit codec. This is an RFC violation, 897 | # and the server didn't bother to tell us what codec *was* 898 | # used. 899 | pass 900 | return complexjson.loads(self.text, **kwargs) 901 | 902 | @property 903 | def links(self): 904 | """Returns the parsed header links of the response, if any.""" 905 | 906 | header = self.headers.get('link') 907 | 908 | # l = MultiDict() 909 | l = {} 910 | 911 | if header: 912 | links = parse_header_links(header) 913 | 914 | for link in links: 915 | key = link.get('rel') or link.get('url') 916 | l[key] = link 917 | 918 | return l 919 | 920 | def raise_for_status(self): 921 | """Raises :class:`HTTPError`, if one occurred.""" 922 | 923 | http_error_msg = '' 924 | if isinstance(self.reason, bytes): 925 | # We attempt to decode utf-8 first because some servers 926 | # choose to localize their reason strings. If the string 927 | # isn't utf-8, we fall back to iso-8859-1 for all other 928 | # encodings. (See PR #3538) 929 | try: 930 | reason = self.reason.decode('utf-8') 931 | except UnicodeDecodeError: 932 | reason = self.reason.decode('iso-8859-1') 933 | else: 934 | reason = self.reason 935 | 936 | if 400 <= self.status_code < 500: 937 | http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) 938 | 939 | elif 500 <= self.status_code < 600: 940 | http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) 941 | 942 | if http_error_msg: 943 | raise HTTPError(http_error_msg, response=self) 944 | 945 | def close(self): 946 | """Releases the connection back to the pool. Once this method has been 947 | called the underlying ``raw`` object must not be accessed again. 948 | 949 | *Note: Should not normally need to be called explicitly.* 950 | """ 951 | if not self._content_consumed: 952 | self.raw.close() 953 | 954 | release_conn = getattr(self.raw, 'release_conn', None) 955 | if release_conn is not None: 956 | release_conn() 957 | --------------------------------------------------------------------------------