├── README.md ├── _http.py ├── async_http.py ├── down.py ├── setup.py └── simple_http.py /README.md: -------------------------------------------------------------------------------- 1 | ## 简单易用的同步/异步http库 2 | ### 安装 3 | ```shell 4 | sudo python setup.py install 5 | ``` 6 | 7 | ### 异步方式 8 | ```shell 9 | 10 | In [21]: def print_it(x): 11 | import pprint 12 | ....: pprint.pprint(x) 13 | ....: 14 | 15 | In [22]: async_http.repeat_tasks([{"url": "http://www.baidu.com", "parser": print_it}]) 16 | {'chain': None, 17 | 'chain_idx': 0, 18 | 'con': , 19 | 'fd': 5, 20 | 'header_only': False, 21 | 'parser': , 22 | 'proxy': '', 23 | 'random': '60804c2a0b053fbd', 24 | 'recv': , 25 | 'redirect': 0, 26 | 'res_cookie': {'BAIDUID': {'domain': '.baidu.com', 27 | 'expires': 'Thu, 31-Dec-37 23:55:55 GMT', 28 | 'max-age': '2147483647', 29 | 'path': '/', 30 | 'value': 'BCB0BBBB4312D00C88BCDC9EEAAE3726:FG=1'}, 31 | 'BD_LAST_QID': {'Max-Age': '1', 32 | 'path': '/', 33 | 'value': '16069052107084303783'}, 34 | 'BIDUPSID': {'domain': '.baidu.com', 35 | 'expires': 'Thu, 31-Dec-37 23:55:55 GMT', 36 | 'max-age': '2147483647', 37 | 'path': '/', 38 | 'value': 'BCB0BBBB4312D00C88BCDC9EEAAE3726'}}, 39 | 'res_header': {'Connection': 'Keep-Alive', 40 | 'Content-Length': '215', 41 | 'Content-Type': 'text/html', 42 | 'Date': 'Thu, 21 May 2015 15:50:43 GMT', 43 | 'Location': 'https://www.baidu.com/', 44 | 'P3P': 'CP=" OTI DSP COR IVA OUR IND COM "', 45 | 'Server': 'BWS/1.1', 46 | 'Set-Cookie': 'BAIDUID=BCB0BBBB4312D00C88BCDC9EEAAE3726:FG=1; expires=Thu, 31-Dec-37 23:55:55 GMT; max-age=2147483647; path=/; domain=.baidu.com\r\nBIDUPSID=BCB0BBBB4312D00C88BCDC9EEAAE3726; expires=Thu, 31-Dec-37 23:55:55 GMT; max-age=2147483647; path=/; domain=.baidu.com\r\nBD_LAST_QID=16069052107084303783; path=/; Max-Age=1', 47 | 'X-UA-Compatible': 'IE=Edge,chrome=1'}, 48 | 'res_status': {'message': 'Moved Temporarily', 49 | 'protocol': 'HTTP/1.1', 50 | 'status': 302}, 51 | 'retry': 0, 52 | 'send': , 53 | 'ssl': False, 54 | 'start': 1432223278.489937, 55 | 'status': 512, 56 | 'text': '\r\n302 Found\r\n\r\n

302 Found

\r\n
pr-nginx_1-0-221_BRANCH Branch\nTime : Wed May 20 10:35:46 CST 2015
\r\n\r\n\r\n', 57 | 'url': 'http://www.baidu.com'} 58 | async_http Thu May 21 23:47:58 2015: 'acnt: 1, fcnt: 0, time: 0' 59 | ``` 60 | 61 | 62 | ### 同步方式 63 | 64 | ```shell 65 | In [1]: import simple_http 66 | 67 | In [2]: res = simple_http.get("https://github.com") 68 | 69 | In [4]: res["status"] 70 | Out[4]: 200 71 | 72 | In [5]: res["message"] 73 | Out[5]: 'OK' 74 | 75 | In [6]: res["protocol"] 76 | Out[6]: 'HTTP/1.1' 77 | 78 | In [7]: res["header"] 79 | Out[7]: 80 | {'Cache-Control': 'no-cache', 81 | 'Content-Security-Policy': "default-src *; script-src assets-cdn.github.com collector-cdn.github.com; object-src assets-cdn.github.com; style-src 'self' 'unsafe-inline' 'unsafe-eval' assets-cdn.github.com; img-src 'self' data:assets-cdn.github.com identicons.github.com www.google-analytics.com collector.githubapp.com *.githubusercontent.com *.gravatar.com *.wp.com; media-src 'none'; frame-src 'self' render.githubusercontent.com gist.github.com www.youtube.com player.vimeo.com checkout.paypal.com; font-src assets-cdn.github.com; connect-src 'self' live.github.com wss://live.github.com uploads.github.com status.github.com api.github.com www.google-analytics.com github-cloud.s3.amazonaws.com", 82 | 'Content-Type': 'text/html; charset=utf-8', 83 | 'Date': 'Thu, 21 May 2015 15:38:29 GMT', 84 | 'Server': 'GitHub.com', 85 | 'Set-Cookie': 'logged_in=no; domain=.github.com; path=/; expires=Mon, 21 May 2035 15:38:29 -0000; secure; HttpOnly\r\n_gh_sess=eyJzZXNzaW9uX2lkIjoiNzk3MWNkZDEzZDJhOTA2NzZjYTEzYjExZDYxN2VhMjMiLCJfY3NyZl90b2tlbiI6IjZ0OENRUllFWjQ4NVlud2VGaC96aGVRbTBsZSs2K1FCVTJxcTdNSjlIM0E9In0%3D--1a2444a9b86de98df0ea2556dbc5644b239aa7b0; path=/; secure; HttpOnly', 86 | 'Status': '200 OK', 87 | 'Strict-Transport-Security': 'max-age=31536000; includeSubdomains; preload', 88 | 'Transfer-Encoding': 'chunked', 89 | 'Vary': 'Accept-Encoding', 90 | 'X-Content-Type-Options': 'nosniff', 91 | 'X-Frame-Options': 'deny', 92 | 'X-GitHub-Request-Id': '774ED627:7040:139FB8B:555DFBF4', 93 | 'X-Request-Id': 'c23d860f8a94048323f1185a15176c13', 94 | 'X-Runtime': '0.007538', 95 | 'X-Served-By': '63914e33d55e1647962cf498030a7c16', 96 | 'X-UA-Compatible': 'IE=Edge,chrome=1', 97 | 'X-XSS-Protection': '1; mode=block' 98 | 99 | In [8]: res["text"][:100] 100 | Out[8]: '\n\n \r\n302 Found\r\n\r\n

302 Found

\r\n
bfe/1.0.8.2
\r\n\r\n\r\n', 120 | 'total_length': 160, 121 | 'url': 'http://baidu.com'} 122 | 123 | In [6]: res = simple_http.get("http://www.baidu.com", redirect=10) 124 | redirect to https://www.baidu.com/ 125 | 126 | In [7]: res["status"] 127 | Out[7]: 200 128 | 129 | In [8]: res["header"] 130 | Out[8]: 131 | {'BDPAGETYPE': '1', 132 | 'BDQID': '0x857e6d5d0000bf0d', 133 | 'BDUSERID': '0', 134 | 'Cache-Control': 'private', 135 | 'Connection': 'keep-alive', 136 | 'Content-Encoding': 'gzip', 137 | 'Content-Type': 'text/html; charset=utf-8', 138 | 'Cxy_all': 'baidu+f132f05584d0062745fea455fbb7d59f', 139 | 'Date': 'Thu, 21 May 2015 15:54:44 GMT', 140 | 'Expires': 'Thu, 21 May 2015 15:54:44 GMT', 141 | 'Server': 'bfe/1.0.8.2', 142 | 'Set-Cookie': 'BDSVRTM=11; path=/\r\nBD_HOME=0; path=/\r\nH_PS_PSSID=13782_1426_13519_13075_12868_14166_14297_10562_12722_14155_14172_13203_14244_11518_13932_14309_14321_14182_8498_14195; path=/; domain=.baidu.com\r\n__bsi=11945547936248309498_00_34_N_N_17_0303_C02F_N_N_N; expires=Thu, 21-May-15 15:54:49 GMT; domain=www.baidu.com; path=/', 143 | 'Transfer-Encoding': 'chunked', 144 | 'Vary': 'Accept-Encoding', 145 | 'X-Powered-By': 'HPHP', 146 | 'X-UA-Compatible': 'IE=Edge,chrome=1'} 147 | ``` 148 | 149 | ### 使用不同的header 150 | 默认情况下simple_http使用firefox的User-Agent, 151 | ```shell 152 | myheader = { 153 | "Accept": ... 154 | } 155 | simple_http.get("https://google.com", header=myheader) 156 | ``` 157 | ### 使用Cookie 158 | ```shell 159 | cookie = { 160 | "name": "value" 161 | } 162 | simple_http.get("https://github.com", cookie=cookie) 163 | ``` 164 | ### 从header里取Cookie用 165 | ```shell 166 | simple_http.get_cookie(res["cookie"]) 167 | 168 | ``` 169 | ### GET请求添加参数 170 | ```shell 171 | query = { 172 | "params": "value" 173 | } 174 | simple_http.get("https://google.com", query=query) 175 | ``` 176 | ### POST添加参数 177 | ```shell 178 | payload = { 179 | "params": "value" 180 | } 181 | simple_http.post("https://google.com", payload=payload) 182 | ``` 183 | ### POST里使用文件 184 | ```shell 185 | payload = { 186 | "name": open("test", "r") 187 | } 188 | simple_http.post("https://google.com", payload=payload) 189 | ``` 190 | 191 | ### 使用代理 192 | #### Socks5 193 | ```shell 194 | In [3]: simple_http.get("https://google.com", proxy='socks5://127.0.0.1:9988') 195 | redirect to https://www.google.co.jp/?gfe_rd=cr&ei=phuEVfPKEYuT8QfC4YCgBA 196 | Out[3]: 197 | {'cookie': {}, 198 | 'header': {'Alternate-Protocol': '443:quic,p=1', 199 | 'Cache-Control': 'private', 200 | 'Content-Length': '262', 201 | 'Content-Type': 'text/html; charset=UTF-8', 202 | 'Date': 'Fri, 19 Jun 2015 13:39:50 GMT', 203 | 'Location': 'https://www.google.co.jp/?gfe_rd=cr&ei=phuEVfPKEYuT8QfC4YCgBA', 204 | 'Server': 'GFE/2.0'}, 205 | 'message': 'Found', 206 | 'protocol': 'HTTP/1.1', 207 | 'status': 302, 208 | 'text': '\n302 Moved\n

302 Moved

\nThe document has moved\nhere.\r\n\r\n', 209 | 'total_length': 262, 210 | 'url': 'https://google.com'} 211 | 212 | ``` 213 | #### http代理 214 | ```shell 215 | In [3]: simple_http.get("https://google.com", proxy='http://127.0.0.1:9988') 216 | redirect to https://www.google.co.jp/?gfe_rd=cr&ei=phuEVfPKEYuT8QfC4YCgBA 217 | Out[3]: 218 | {'cookie': {}, 219 | 'header': {'Alternate-Protocol': '443:quic,p=1', 220 | 'Cache-Control': 'private', 221 | 'Content-Length': '262', 222 | 'Content-Type': 'text/html; charset=UTF-8', 223 | 'Date': 'Fri, 19 Jun 2015 13:39:50 GMT', 224 | 'Location': 'https://www.google.co.jp/?gfe_rd=cr&ei=phuEVfPKEYuT8QfC4YCgBA', 225 | 'Server': 'GFE/2.0'}, 226 | 'message': 'Found', 227 | 'protocol': 'HTTP/1.1', 228 | 'status': 302, 229 | 'text': '\n302 Moved\n

302 Moved

\nThe document has moved\nhere.\r\n\r\n', 230 | 'total_length': 262, 231 | 'url': 'https://google.com'} 232 | ``` 233 | 234 | 235 | 236 | -------------------------------------------------------------------------------- /_http.py: -------------------------------------------------------------------------------- 1 | # -*-encoding=utf-8-*- 2 | import os.path 3 | import base64 4 | import string 5 | 6 | from uuid import uuid4 7 | 8 | try: 9 | import ssl 10 | 11 | has_ssl = True 12 | except: 13 | has_ssl = False 14 | 15 | convert_table = [0 for x in range(256)] 16 | 17 | # url reversed characters 18 | reversed_table = { 19 | "\x21": "%21", # ! 20 | "\x23": "%23", ## 21 | "\x24": "%24", # $ 22 | "\x26": "%26", # & 23 | "\x27": "%27", # ' 24 | "\x28": "%28", # ( 25 | "\x29": "%29", # ) 26 | "\x2A": "%2A", # * 27 | "\x2B": "%2B", # + 28 | "\x2C": "%2C", # , 29 | "\x2F": "%2F", # / 30 | "\x3A": "%3A", #: 31 | "\x3B": "%3B", # ; 32 | "\x3D": "%3D", # = 33 | "\x3F": "%3F", # ? 34 | "\x40": "%40", # @ 35 | "\x5B": "%5B", # [ 36 | "\x5D": "%5D" # ] 37 | } 38 | 39 | # 使用0x0 - xff数组优化查找 40 | for k, v in reversed_table.items(): 41 | convert_table[ord(k)] = v 42 | 43 | # url common characters 44 | common_chars_table = { 45 | "\x20": "%20", # space 46 | "\x22": "%22", # " 47 | "\x25": "%25", # % 48 | # "\x2D": "%2D", #- 49 | # 0x2E: "%2E", #. 50 | "\x3C": "%3C", # < 51 | "\x3E": "%3E", # > 52 | "\x5C": "%5C", # \ 53 | "\x5E": "%5E", # ^ 54 | # 0x5F: "%5F", #_ 55 | "\x60": "%60", # ` 56 | "\x7B": "%7B", # { 57 | "\x7C": "%7C", # | 58 | "\x7D": "%7D", # } 59 | "\x7E": "%7E" # ~ 60 | } 61 | 62 | # 使用0x0 - 0xff数组优化查找 63 | for k, v in common_chars_table.items(): 64 | convert_table[ord(k)] = v 65 | 66 | # 是否是字符 67 | letters = [0 for x in range(256)] 68 | for x in string.letters: 69 | letters[ord(x)] = 1 70 | 71 | hex_digits_set = set(string.hexdigits) 72 | 73 | default_header = { 74 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 75 | # "Accept-Encoding": "gzip, deflate", 76 | "Accept-Language": "zh,zh-cn;q=0.8,en-us;q=0.5,en;q=0.3", 77 | "Connection": "keep-alive", 78 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36" 79 | } 80 | 81 | download_header = { 82 | "Accept": "*/*", 83 | "Connection": "Keep-Alive" 84 | } 85 | 86 | image_header = { 87 | "Accept": "*/*", 88 | "Accept-Encoding": "gzip, deflate", 89 | "Connection": "Keep-Alive", 90 | } 91 | 92 | 93 | html_header = { 94 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 95 | "Accept-Encoding": "gzip, deflate", 96 | "Accept-Language": "zh,zh-cn;q=0.8,en-us;q=0.5,en;q=0.3", 97 | "Connection": "keep-alive", 98 | "Cache-Control": "no-cache", 99 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36", 100 | } 101 | 102 | json_header = { 103 | "Accept": "application/json,text/javascript,*/*;q=0.01", 104 | "Accept-Language": "zh,zh-cn;q=0.8,en-us;q=0.5,en;q=0.3", 105 | "Connection": "keep-alive", 106 | "Content-Type": "application/x-www-form-urlencoded", 107 | "Cache-Control": "no-cache", 108 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36", 109 | "X-Requested-With": "XMLHttpRequest" 110 | } 111 | 112 | 113 | 114 | # common mimetypes 115 | common_types = { 116 | "pdf": "application/pdf", 117 | "zip": "application/zip", 118 | "gz": "application/x-gzip", 119 | "doc": "application/msword", 120 | "ogg": "application/ogg", 121 | "default": "application/octet-stream", 122 | "json": "application/json", 123 | "xml": "application/xml", 124 | "js": "application/x-javascript", 125 | "7z": "application/x-7z-compressed", 126 | "deb": "application/x-deb", 127 | "tar": "application/x-tar", 128 | "swf": "application/x-shockwave-flash", 129 | "torrent": "application/x-bittorrent", 130 | "bmp": "image/bmp", 131 | "gif": "image/gif", 132 | "jpg": "image/jpeg", 133 | "jpeg": "image/jpeg", 134 | "png": "image/png", 135 | "svg": "image/svg+xml", 136 | "tiff": "image/tiff", 137 | "mp3": "audio/mpeg", 138 | "wav": "audio/x-wav", 139 | "css": "text/css", 140 | "text": "text/plain", 141 | "html": "text/html", 142 | "vcard": "text/vcard", 143 | "md": "text/x-markdown", 144 | "mov": "video/quicktime", 145 | "mp4": "video/mp4", 146 | "mkv": "video/x-matroska", 147 | "wmv": "video/x-ms-wmv", 148 | "flv": "video/x-flv", 149 | "mpg": "video/mpeg", 150 | "mpeg": "video/mpeg" 151 | } 152 | 153 | 154 | # http consts 155 | resp_codes = { 156 | 100: "100 Continue", 157 | 101: "101 Switching Protocols", 158 | 102: "102 Processing", 159 | 200: "200 OK", 160 | 201: "201 Created", 161 | 202: "202 Accepted", 162 | 203: "203 Non-Authoritative Information", 163 | 204: "204 No Content", 164 | 205: "205 Reset Content", 165 | 206: "206 Partial Content", 166 | 300: "300 Multiple Choices", 167 | 301: "301 Moved Permanently", 168 | 302: "302 Found", 169 | 303: "303 See Other", 170 | 304: "304 Not Modified", 171 | 305: "305 Use Proxy", 172 | 306: "306 Switch Proxy", 173 | 307: "307 Temporary Redirect", 174 | 400: "400 Bad Request", 175 | 401: "401 Unauthorized", 176 | 402: "402 Payment Required", 177 | 403: "403 Forbidden", 178 | 404: "404 Not Found", 179 | 405: "405 Method Not Allowed", 180 | 406: "406 Not Acceptable", 181 | 407: "407 Proxy Authentication Required", 182 | 408: "408 Request Timeout", 183 | 409: "409 Conflict", 184 | 410: "410 Gone", 185 | 411: "411 Length Required", 186 | 412: "412 Precondition Failed", 187 | 413: "413 Request Entity Too Large", 188 | 414: "414 Request-URI Too Long", 189 | 415: "415 Unsupportd Media Type", 190 | 416: "416 Requested Range Not Satisfiable", 191 | 417: "417 Expectation Failed", 192 | 418: "418 I'm a teapot", 193 | 421: "421 There are too many remotes from your Internet Address", 194 | 422: "422 Unprocessable Entity", 195 | 423: "423 Locked", 196 | 424: "424 Failed Dependency", 197 | 425: "425 Unordered Collection", 198 | 426: "426 Upgrade Required", 199 | 449: "449 Retry With", 200 | 500: "500 Internal Server Error", 201 | 501: "501 Not implemented", 202 | 502: "502 Bad Gateway", 203 | 503: "503 Service Unavailable", 204 | 504: "504 Gateway Timeout", 205 | 505: "505 HTTP Version Not Supported", 206 | 506: "506 Variant Also Negotiates", 207 | 507: "507 Insufficient Storage", 208 | 509: "509 Bandwidth Limit Exceeded", 209 | 510: "Not Extended" 210 | } 211 | 212 | default_timeout = 30 213 | 214 | HTTP_VERSION = "HTTP/1.1" 215 | HEADER_END = "\x0d\x0a\x0d\x0a" 216 | HEADER_END2 = "\n\n" 217 | METHOD_GET = "GET" 218 | METHOD_POST = "POST" 219 | METHOD_DELETE = "DELETE" 220 | METHOD_PUT = "PUT" 221 | METHOD_OPTIONS = "OPTIONS" 222 | METHOD_TRACE = "TRACE" 223 | METHOD_HEAD = "HEAD" 224 | 225 | 226 | def set_boundary(uid): 227 | g = globals() 228 | g["BOUNDARY"] = uid 229 | g["BOUNDARY_STRING"] = "--%s\r\n" % BOUNDARY 230 | g["BOUNDARY_END"] = "--%s--" % BOUNDARY 231 | g["FORM_FILE"] = 'Content-Disposition: form-data; name="%s"; filename="%s"\r\nContent-Type: %s\r\n\r\n' 232 | g["FORM_STRING"] = 'Content-Disposition: form-data; name="%s"\r\n\r\n%s\r\n' 233 | g["FORM_SIMPLE_TYPE"] = "application/x-www-form-urlencoded" 234 | g["FORM_COMPLEX_TYPE"] = "multipart/form-data; boundary=%s" % BOUNDARY 235 | 236 | 237 | set_boundary(uuid4().hex) 238 | 239 | 240 | def basic_auth_msg(user, password): 241 | if user and password: 242 | return "Basic %s" % base64.b64encode("%s:%s" % (user, password)) 243 | if user: 244 | return "Basic %s" % base64.b64encode(user) 245 | 246 | 247 | def proxy_auth_msg(proxy): 248 | proxyd = urlparse(proxy) 249 | if proxyd["schema"] == "http": 250 | return basic_auth_msg(proxyd.get("user"), proxyd.get("password")) 251 | 252 | 253 | def generate_query(query): 254 | ql = [] 255 | for k, v in query.items(): 256 | ql.append("%s=%s" % (quote_plus(k), quote_plus(v))) 257 | return "&".join(ql) 258 | 259 | 260 | def parse_query(query): 261 | qd = {} 262 | for q in query.split("&"): 263 | i = q.find("=") 264 | if i > -1: 265 | qd[unquote_plus(q[:i])] = unquote_plus(q[i + 1:]) 266 | else: 267 | qd[unquote_plus(q)] = None 268 | return qd 269 | 270 | 271 | def generate_simple_post(payload): 272 | pl = [] 273 | for k, v in payload.items(): 274 | pl.append("%s=%s" % (quote_plus(k), quote_plus(v))) 275 | return "&".join(pl) 276 | 277 | 278 | def generate_complex_post(payload): 279 | cl = [] 280 | for k, v in payload.items(): 281 | if isinstance(v, str): 282 | cl.append(BOUNDARY_STRING) 283 | cl.append(FORM_STRING % (k, v)) 284 | if isinstance(v, file): 285 | filename = os.path.basename(v.name) 286 | if not filename: 287 | filename = "unknown" 288 | cl.append(BOUNDARY_STRING) 289 | cl.append(FORM_FILE % (k, filename, 290 | auto_content_type(filename))) 291 | cl.append(v.read()) 292 | cl.append("\r\n") 293 | cl.append(BOUNDARY_END) 294 | return cl 295 | 296 | 297 | def generate_post(header, payload): 298 | if isinstance(payload, str): 299 | return payload 300 | elif isinstance(payload, unicode): 301 | return payload.encode("utf-8") 302 | elif isinstance(payload, dict): 303 | has_file = False 304 | # use multipart/form-data or not 305 | for k, v in payload.items(): 306 | if isinstance(v, unicode): 307 | payload[k] = v.encode("utf-8") 308 | elif isinstance(v, file): 309 | has_file = True 310 | elif isinstance(v, str): 311 | continue 312 | else: 313 | raise Exception("payload value: str or unicode or fileobject") 314 | if has_file: 315 | header["Content-Type"] = FORM_COMPLEX_TYPE 316 | return "".join(generate_complex_post(payload)) 317 | else: 318 | header["Content-Type"] = FORM_SIMPLE_TYPE 319 | cl = generate_simple_post(payload) 320 | return "".join(cl) 321 | else: 322 | raise ValueError("unknown payload type: %s" % type(payload)) 323 | 324 | 325 | def quote(url): 326 | result = [] 327 | for char in url.decode("utf-8"): 328 | x = ord(char) 329 | if x < 256: 330 | if convert_table[x]: 331 | result.append(convert_table[x]) 332 | else: 333 | result.append(char) 334 | else: 335 | result.extend(["%" + i.encode("hex").upper() for i in char.encode("utf-8")]) 336 | return "".join(result) 337 | 338 | 339 | # fix bug 340 | def unquote(url): 341 | ret = [] 342 | i = 0 343 | ulen = len(url) 344 | if not ulen: 345 | return url 346 | while i < ulen: 347 | char = url[i] 348 | if char == "%": 349 | ret.append(url[i + 1:i + 3].decode("hex")) 350 | i = i + 3 351 | else: 352 | ret.append(char) 353 | i += 1 354 | return "".join(ret) 355 | 356 | 357 | def quote_plus(url): 358 | if ' ' in url: 359 | # dirty hack 360 | convert_table[0x20] = '+' 361 | ret = quote(url) 362 | convert_table[0x20] = '%20' 363 | return ret 364 | return quote(url) 365 | 366 | 367 | def unquote_plus(url): 368 | url = url.replace("+", " ") 369 | return unquote(url) 370 | 371 | 372 | def auto_content_type(name): 373 | dot_offset = name.rfind(".") 374 | if dot_offset < 0: 375 | return common_types["default"] 376 | else: 377 | return common_types.get(name[dot_offset + 1:], common_types["default"]) 378 | 379 | 380 | def generate_url(d): 381 | ret = [] 382 | if "schema" in d: 383 | ret.append(d["schema"] + "://") 384 | if "user" in d: 385 | ret.append(d["user"]) 386 | if "password" in d: 387 | result.append(":" + d["password"]) 388 | ret.append("@") 389 | if "host" in d: 390 | ret.append(d["host"]) 391 | if "port" in d: 392 | ret.append(":" + str(d["port"])) 393 | if "path" in d: 394 | if not d["path"].startswith("/"): 395 | ret.append("/") 396 | ret.append(d["path"]) 397 | if "query" in d: 398 | ret.append("?" + d["query"]) 399 | if "params" in d: 400 | if isinstance(d["params"], list): 401 | ret.append(";" + ";".join(d["params"])) 402 | else: 403 | ret.append(";") 404 | ret.append(d["params"]) 405 | if "fragment" in d: 406 | ret.append("#") 407 | ret.append(d["fragment"]) 408 | return "".join(ret) 409 | 410 | 411 | def urlparse(url): 412 | d = {} 413 | order = (("fragment", url.rfind("#")), 414 | ("params", url.rfind(";")), 415 | ("query", url.rfind("?"))) 416 | order = sorted(order, key=lambda x: x[1], reverse=True) 417 | pv = len(url) 418 | for n, i in order: 419 | if i > 0: 420 | if n == "query" and url[i - 1] == "?": 421 | continue 422 | d[n] = url[i + 1:pv] 423 | pv = i 424 | else: 425 | break 426 | ne = url[:pv] 427 | hps = ne.split("://") 428 | host = hps[0] 429 | if len(hps) > 1: 430 | d["schema"] = hps[0] 431 | host = hps[1] 432 | # http://v2ex.com/static/img/qbar_light@2x.png' 433 | us = host.find("@") 434 | p = host.find("/") 435 | if 0 < us < p: 436 | user = host[:us] 437 | host = host[us + 1:] 438 | ac = user.split(":") 439 | if len(ac) > 0: 440 | user = ac[0] 441 | d["password"] = ac[1] 442 | d["user"] = user 443 | p = host.find("/") 444 | if p > 0: 445 | d["path"] = host[p + 1:] 446 | host = host[:p] 447 | ph = host.split(":") 448 | port = None 449 | if len(ph) > 1: 450 | host = ph[0] 451 | d["port"] = ph[1] 452 | d["host"] = host 453 | if "path" not in d: 454 | d["path"] = "/" 455 | return d 456 | 457 | 458 | def generate_cookie(cookie): 459 | ret = [] 460 | has_unicode = False 461 | for k, v in cookie.items(): 462 | if isinstance(k, unicode) or isinstance(v, unicode): 463 | has_unicode = True 464 | ret.append("%s=%s; " % (k, v)) 465 | if has_unicode: 466 | return "".join(ret)[:-2].encode("utf-8") 467 | else: 468 | return "".join(ret)[:-2] 469 | 470 | 471 | def parse_cookie(cookie): 472 | cd = {} 473 | for cookie in cookie.split(";"): 474 | kv = cookie.split("=") 475 | cd[kv[0].strip()] = kv[1].strip() 476 | return cd 477 | 478 | 479 | def generate_setcookie(cl): 480 | ret = [] 481 | for cd in cl: 482 | items_list = [] 483 | for k, v in cd.items(): 484 | if k == "cookie": 485 | item_list.append('%s; ' % v) 486 | else: 487 | item_list.append('%s=%s; ' % (k, v)) 488 | ret.append("".join(items_list)[:-2]) 489 | ret.append("\r\n") 490 | return "".join(ret)[:-2] 491 | 492 | 493 | def parse_setcookie(data): 494 | cl = {} 495 | for line in data.split("\r\n"): 496 | lines = line.split(";") 497 | cookie = {} 498 | vl = lines[0] 499 | idx = vl.find("=") 500 | name = vl[:idx] 501 | value = vl[idx + 1:] 502 | cookie["value"] = value 503 | for part in lines[1:]: 504 | kv = part.split("=") 505 | # path=/ or httponly 506 | key = kv[0].strip() 507 | if len(kv) == 2: 508 | cookie[key] = kv[1] 509 | else: 510 | cookie[key] = True 511 | cl[name] = cookie 512 | return cl 513 | 514 | 515 | def get_cookie(cookies): 516 | c = {} 517 | for key, value in cookies.items(): 518 | c[key] = value["value"] 519 | return c 520 | 521 | 522 | def parse_simple_post(data): 523 | post_dict = {} 524 | for i in data.split("&"): 525 | k, v = i.replace("+", " ").split("=") 526 | post_dict[unquote_plus(k)] = unquote_plus(v) 527 | return post_dict 528 | 529 | 530 | def generate_request_header(header, method, path): 531 | sl = "%s %s %s\r\n" % (method, path, HTTP_VERSION) 532 | b = [sl] 533 | for k, v in header.items(): 534 | b.append("%s: %s\r\n" % (k, v)) 535 | return "".join(b) 536 | 537 | 538 | def generate_response_header(header, status): 539 | sl = "%s %s\r\n" % (HTTP_VERSION, resp_codes["status"]) 540 | b = [sl] 541 | for k, v in header.items(): 542 | b.append("%s: %s\r\n" % (k, v)) 543 | return "".join(b) 544 | 545 | 546 | def parse_request_header(text): 547 | parts = text.split("\r\n") 548 | status_parts = parts[0].split(" ") 549 | status = { 550 | "method": status_parts[0], 551 | "path": status_parts[1], 552 | "protocol": status_parts[-1] 553 | } 554 | header = {} 555 | last = None 556 | for line in parts[1:]: 557 | kv = [x.strip() for x in line.split(":")] 558 | # maybe : in value 559 | if len(kv) > 2: 560 | kv[1] = ":".join(kv[1:]) 561 | # maybe multiple lines 562 | if len(kv) == 1: 563 | header[last[0]] += "\r\n" + "".join(kv) 564 | continue 565 | last = kv 566 | header[kv[0]] = kv[1] 567 | cookie = header.get("Cookie", {}) 568 | if cookie: 569 | del header["Cookie"] 570 | cookie = parse_cookie(cookie) 571 | return status, cookie, header 572 | 573 | 574 | def parse_response_header(text): 575 | parts = text.split("\r\n") 576 | status_parts = parts[0].split(" ") 577 | status = { 578 | "protocol": status_parts[0], 579 | "status": int(status_parts[1]), 580 | "message": " ".join(status_parts[2:]) 581 | } 582 | header = {} 583 | last = None 584 | for line in parts[1:]: 585 | kv = [x.strip() for x in line.split(":")] 586 | # maybe : in value 587 | if len(kv) > 2: 588 | kv[1] = ":".join(kv[1:]) 589 | # maybe multiple lines 590 | if len(kv) == 1: 591 | header[last[0]] += "\r\n" + kv[0] 592 | continue 593 | last = kv 594 | key = kv[0] 595 | if key in header and "set-cookie" in key.lower(): 596 | header[key] += "\r\n" + kv[1] 597 | else: 598 | header[kv[0]] = kv[1] 599 | cookie = {} 600 | cookie1 = header.get("Set-Cookie") 601 | if cookie1: 602 | cookie.update(parse_setcookie(cookie1)) 603 | cookie2 = header.get("Set-Cookie2") 604 | if cookie2: 605 | cookie.update(parse_setcookie(cookie2)) 606 | return status, cookie, header 607 | -------------------------------------------------------------------------------- /async_http.py: -------------------------------------------------------------------------------- 1 | # -*-encoding=utf-8-*- 2 | # ! /usr/bin/env python 3 | import ctypes 4 | import socket 5 | import os 6 | import pdb 7 | import errno 8 | import pprint 9 | import io 10 | import re 11 | import subprocess 12 | import random 13 | import struct 14 | import traceback 15 | 16 | import time 17 | import zlib 18 | from cStringIO import StringIO 19 | from select import * 20 | from _http import * 21 | 22 | 23 | def get_random(): 24 | return os.urandom(16).encode("hex") 25 | 26 | 27 | def random_header(): 28 | h = default_header.copy() 29 | h["isp-tracker"] = get_random() 30 | return h 31 | 32 | 33 | def genua_firefox(): 34 | v = random.choice(b_ver["firefox"]) 35 | os = random.choice(os_str["firefox"]) 36 | return base_agents["firefox"] % (os % v, v) 37 | 38 | 39 | def genua_mac(): 40 | return base_agents["safari"] % random.choice(b_ver["safari"]) 41 | 42 | 43 | def genua_opera(): 44 | return base_agents["opera"] % random.choice(b_ver["opera"]) 45 | 46 | 47 | ua_tps = { 48 | "firefox": genua_firefox, 49 | "safari": genua_mac, 50 | "opera": genua_opera 51 | } 52 | 53 | base_agents = { 54 | "firefox": "Mozilla/5.0 (%s) Gecko/20100101 Firefox/%s.0", 55 | "opera": "Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.12.388 Version/11.%s", 56 | "safari": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_1; zh-cn) AppleWebKit/537.22.1 (KHTML, like Gecko) Version/7.0.3 Safari/534.%s.1" 57 | } 58 | 59 | os_str = { 60 | "firefox": ( 61 | "Windows NT 6.3; rv: %s.0", 62 | "Macintosh; Intel Mac OS X 10_10; rv: %s.0", 63 | "X11; Linux x86_64; rv: %s.0", 64 | ) 65 | } 66 | 67 | b_ver = { 68 | "firefox": range(9, 3300), 69 | "opera": range(1, 6200), 70 | "safari": range(1, 5300), 71 | } 72 | 73 | 74 | def random_useragent(): 75 | ua = random.choice(ua_tps.keys()) 76 | return ua_tps[ua]() 77 | 78 | 79 | def nope_parser(body): 80 | pass 81 | 82 | 83 | fd_task = {} 84 | 85 | tasks = {} 86 | 87 | g = globals() 88 | 89 | debug = False 90 | 91 | config = { 92 | "limit": 20, 93 | "timeout": 30, 94 | "interval": 1, 95 | "retry": True, 96 | "retry_limit": 10, 97 | } 98 | 99 | failed_tasks = {} 100 | 101 | on_failed = None 102 | on_timeout = None 103 | on_socket_error = None 104 | 105 | internal_keys = set(("con", 106 | "recv", 107 | "send", 108 | "status", 109 | "fd", 110 | "random", 111 | "start", 112 | "res_status", 113 | "res_cookie", 114 | "res_header", 115 | "text", 116 | "why", 117 | "ssl", 118 | "reason", 119 | "header_only" 120 | "chunked_idx", 121 | "chunked_b", 122 | "socks5_request_message", 123 | "socks5_proxy", 124 | "socks5_request_content", 125 | )) 126 | 127 | possible_methods = set(("GET", 128 | "POST", 129 | "HEAD", 130 | "PUT", 131 | "DELETE")) 132 | 133 | 134 | def default_copy_func(task): 135 | t = {} 136 | for i in task: 137 | if i not in internal_keys: 138 | t[i] = task[i] 139 | return t 140 | 141 | 142 | def generate_request(task): 143 | assert task["url"] and task["method"].upper() in possible_methods 144 | url = task["url"] 145 | rl = [] 146 | url_parts = urlparse(url) 147 | if task.get("query"): 148 | url_parts["query"] = generate_query(task["query"]) 149 | if "header" in task: 150 | if not task["header"]: 151 | header = default_header.copy() 152 | else: 153 | header = task["header"] 154 | else: 155 | header = default_header.copy() 156 | host = url_parts["host"] 157 | if "port" in url_parts: 158 | port = int(url_parts["port"]) 159 | header["Host"] = "%s:%d" % (host, port) 160 | else: 161 | port = 80 162 | header["Host"] = host 163 | if url_parts.get("schema") == "https": 164 | task["ssl"] = True 165 | port = 443 166 | if task.get("proxy", "").startswith("socks5"): 167 | task["ssl_mark"] = True 168 | else: 169 | task["ssl"] = False 170 | if "port" in url_parts: 171 | port = int(url_parts["port"]) 172 | # 没代理 173 | if not task.get("proxy"): 174 | del url_parts["schema"] 175 | del url_parts["host"] 176 | if "port" in url_parts: 177 | del url_parts["port"] 178 | else: 179 | # 有代理更新, 连接点换成代理 180 | pd = urlparse(task["proxy"]) 181 | if pd["schema"] in "https": 182 | pass 183 | elif pd["schema"].lower() == "socks5": 184 | if "ssl" in task: 185 | del task["ssl"] 186 | task["socks5_proxy"] = True 187 | task["socks5_request_message"] = "\x05\x01\x00\x03%s%s%s" % (struct.pack("B", len(host)), host, struct.pack(">H", port)) 188 | else: 189 | raise Exception("不支持的代理格式") 190 | host = pd["host"] 191 | port = int(pd["port"]) 192 | # 不处理fragment 193 | if "fragment" in url_parts: 194 | del url_parts["fragment"] 195 | path = generate_url(url_parts) 196 | method = task.get("method", METHOD_GET).upper() 197 | if method not in possible_methods: 198 | raise ValueError("unsupported method: %s" % method) 199 | if method in ("POST", "PUT"): 200 | content = generate_post(header, task["payload"]) 201 | header["Content-Length"] = str(len(content)) 202 | rl.append(generate_request_header(header, method, path)) 203 | if task.get("cookie"): 204 | rl.append("Cookie: ") 205 | rl.append(generate_cookie(task["cookie"])) 206 | rl.append(HEADER_END) 207 | else: 208 | rl.append("\r\n") 209 | if method in ("POST", "PUT"): 210 | rl.append(content) 211 | body = "".join(rl) 212 | if task.get("socks5_proxy"): 213 | task["socks5_request_content"] = body 214 | return (host, port), body 215 | 216 | 217 | # dns缓存 218 | 219 | dns_preset = { } 220 | 221 | dns_buffer = dns_preset.copy() 222 | 223 | dns_list = [ 224 | #alidns 225 | "223.5.5.5", 226 | #dnspod 227 | "119.29.29.29", 228 | #114, 229 | "114.114.114.114", 230 | ] 231 | 232 | 233 | def safe_dns_request(host, port): 234 | while True: 235 | try: 236 | addrs = socket.getaddrinfo(host, port) 237 | return addrs[0][-1] 238 | except: 239 | for i in dns_list: 240 | try: 241 | out = subprocess.check_output("dig @%s %s" % (i, host), shell=True) 242 | except subprocess.CalledProcessError: 243 | raise OSError("public dns: %s, dig exception, dns domain %s" % (i, host)) 244 | addrs = re.findall("ANSWER SECTION.*?([0-9.]{7,15}).*?;;", out, re.S) 245 | if addrs: 246 | return (addrs[0], port) 247 | else: 248 | if "SOA" in out: 249 | raise OSError("public dns: %s, unknown domain %s" % (i, host)) 250 | print "lookup dns server %s for %s failed" % (i, host) 251 | print out 252 | time.sleep(1) 253 | 254 | 255 | def set_dns_buffer(hosts): 256 | for i in hosts: 257 | d = urlparse(i) 258 | if "port" in d: 259 | port = int(d["port"]) 260 | else: 261 | port = 80 262 | host = d["host"] 263 | start = time.time() 264 | ret = safe_dns_request(host, port) 265 | cost = time.time() - start 266 | if cost > 1: 267 | print "dns slow query: %s, %s" % (cost, host) 268 | if not len(ret): 269 | raise socket.error("dns query failed: %s" % host) 270 | dns_buffer["%s:%s" % (host, port)] = ret[0][-1] 271 | 272 | 273 | def auto_redirect(task): 274 | task["redirect"] = task["redirect"] - 1 275 | l1 = task["res_header"].get("Location") 276 | if not l1: 277 | l1 = task["res_header"].get("location") 278 | if not l1: 279 | log_with_time("redirect without a location: %s" % str(task["url"])) 280 | return 281 | if not l1.startswith("http"): 282 | d = urlparse(task["url"]) 283 | if l1.startswith("//"): 284 | l1 = "%s:%s" % (d["schema"], l1) 285 | else: 286 | if l1.startswith("/"): 287 | l1 = l1[1:] 288 | if not d.get("port"): 289 | l1 = "%s://%s/%s" % (d["schema"], d["host"], l1) 290 | else: 291 | l1 = "%s://%s:%d/%s" % (d["schema"], d["host"], d["port"], l1) 292 | log_with_time("redirect to: %s" % l1) 293 | urlsset = task["urlsset"] 294 | if l1 in urlsset: 295 | urlsset[l1] += 1 296 | else: 297 | urlsset[l1] = 1 298 | if urlsset[l1] > 3 or len(urlsset) > 10: 299 | log_with_time("endless redirect: %s" % l1) 300 | remove_task(task, why="endless redirect") 301 | return 302 | if not "cookie" in task: 303 | task["cookie"] = {} 304 | task["cookie"].update(get_cookie(task["res_cookie"])) 305 | task["url"] = l1 306 | if task.get("patch_redirect"): 307 | task = task["patch_redirect"](task) 308 | insert_task(task) 309 | 310 | 311 | def call_chain_filter(task): 312 | flt = chain_next(task) 313 | if not flt: 314 | return 315 | next = flt(task) 316 | insert_task(next) 317 | 318 | 319 | def assign_key(d1, d2, *keys): 320 | for key in keys: 321 | if key in d2: 322 | d1[key] = d2[key] 323 | 324 | 325 | def call_parser(task): 326 | status = task["res_status"]["status"] 327 | res_header = task["res_header"] 328 | if task["redirect"] > 0 and ( 329 | res_header.get("Location") or 330 | res_header.get("location")): 331 | auto_redirect(task) 332 | return 333 | if task.get("chain"): 334 | if call_chain_filter(task): 335 | return 336 | prev = task["prev"] 337 | assign_key(prev, task, 338 | "res_status", "res_cookie", 339 | "res_header", "recv") 340 | task = prev 341 | enc = task["res_header"].get("Content-Encoding") 342 | text = task["recv"].getvalue() 343 | task["recv"].truncate(0) 344 | task["text"] = text 345 | if text and enc == "gzip": 346 | task["text"] = zlib.decompress(text, 16 + zlib.MAX_WBITS) 347 | elif text and enc == "deflate": 348 | task["text"] = zlib.decompress(text, -zlib.MAX_WBITS) 349 | try: 350 | task["parser"](task) 351 | except: 352 | traceback.print_exc() 353 | exit(1) 354 | 355 | 356 | def decode_chunked(task): 357 | normal = StringIO() 358 | try: 359 | convert_chunked(task["recv"], normal) 360 | except Exception as e: 361 | remove_task(task, why="chunked: %s" % e) 362 | return 363 | task["recv"].close() 364 | task["recv"] = normal 365 | 366 | 367 | def convert_chunked(cbuf, normal_stream): 368 | end = cbuf.tell() 369 | cbuf.seek(0) 370 | goout = 0 371 | while True: 372 | num = "" 373 | while True: 374 | char = cbuf.read(1) 375 | if not char: 376 | goout = True 377 | break 378 | if char == "\r": 379 | break 380 | num += char 381 | if goout: 382 | break 383 | cbuf.seek(1, io.SEEK_CUR) 384 | x = int(num, 16) 385 | if not x: 386 | break 387 | chunk = cbuf.read(x) 388 | if len(chunk) != x: 389 | break 390 | cbuf.seek(2, io.SEEK_CUR) 391 | normal_stream.write(chunk) 392 | 393 | 394 | def decode_chunk_stream(task): 395 | goout = False 396 | b = task["chunked_b"] 397 | recv = task["recv"] 398 | done = False 399 | recv_end = recv.tell() 400 | recv.seek(task["chunked_idx"], io.SEEK_SET) 401 | while True: 402 | back_idx = recv.tell() 403 | num = "" 404 | while True: 405 | char = recv.read(1) 406 | if not char: 407 | goout = True 408 | break 409 | if char == "\r": 410 | break 411 | num += char 412 | if goout: 413 | recv.seek(back_idx, io.SEEK_SET) 414 | break 415 | recv.seek(1, io.SEEK_CUR) 416 | try: 417 | x = int(num, 16) 418 | except: 419 | f = open("chunk_bug." + str(time.time()), "w+") 420 | log_with_time("chunk_bug") 421 | f.write(task["recv"].getvalue()) 422 | f.close() 423 | exit(1) 424 | if not x: 425 | done = True 426 | break 427 | chunk = recv.read(x) 428 | if len(chunk) != x: 429 | recv.seek(back_idx, io.SEEK_SET) 430 | break 431 | recv.seek(2, io.SEEK_CUR) 432 | b.write(chunk) 433 | task["chunked_idx"] = recv.tell() 434 | recv.seek(recv_end, io.SEEK_SET) 435 | if done: 436 | task["recv"] = task["chunked_b"] 437 | del task["chunked_b"] 438 | del task["chunked_idx"] 439 | call_parser(task) 440 | remove_task(task) 441 | 442 | 443 | def parse_http_buffer(task): 444 | header = task.get("res_header") 445 | if not header: 446 | parse_header(task) 447 | header = task.get("res_header") 448 | if not header: 449 | remove_task(task) 450 | return 451 | if header.get("Transfer-Encoding") == "chunked": 452 | decode_chunked(task) 453 | call_parser(task) 454 | remove_task(task) 455 | 456 | 457 | def parse_header(task): 458 | recv = task["recv"] 459 | content = recv.getvalue() 460 | body_pos = content.find("\r\n\r\n") 461 | if body_pos < 0: 462 | return 463 | recv.truncate(0) 464 | recv.write(content[body_pos + 4:]) 465 | try: 466 | status, cookie, header = parse_response_header(content[:body_pos]) 467 | task["res_cookie"] = cookie 468 | task["res_header"] = header 469 | task["res_status"] = status 470 | except (IndexError, TypeError) as e: 471 | remove_task(task, why="解析http头失败: %s" % e) 472 | return 473 | task["status"] = STATUS_HEADER 474 | 475 | 476 | def parse_response(header, task): 477 | # 检测请求是否完成,并调用paser 478 | total_length = 0xffffffff 479 | if "Content-Length" in header: 480 | total_length = int(header["Content-Length"]) 481 | if task["recv"].tell() >= total_length: 482 | parse_http_buffer(task) 483 | return 484 | if header.get("Transfer-Encoding") == "chunked": 485 | if not "chunked_b" in task: 486 | task["chunked_b"] = StringIO() 487 | task["chunked_idx"] = 0 488 | decode_chunk_stream(task) 489 | 490 | 491 | STATUS_CONNECT = 0x1 << 3 492 | STATUS_CONNECTED = 0x1 << 4 493 | STATUS_SSL_HANDSHAKE = 0x1 << 5 494 | STATUS_FAILED = 0x1 << 6 495 | STATUS_SEND = 0x1 << 7 496 | STATUS_RECV = 0x1 << 8 497 | STATUS_HEADER = 0x1 << 9 498 | STATUS_DONE = 0x1 << 10 499 | STATUS_SOCKS5_HANDSHAKE = 0x1 << 11 500 | STATUS_SOCKS5_REQUEST = 0x1 << 12 501 | 502 | 503 | def remove_task(task, why=None): 504 | try: 505 | catch_bug(task, why=why) 506 | except: 507 | traceback.print_exc() 508 | 509 | 510 | def catch_bug(task, why=None): 511 | random = task["random"] 512 | if why and config["retry"]: 513 | task["reason"] = why 514 | failed_tasks[random] = task 515 | con = task["con"] 516 | fd = task["fd"] 517 | if fd in fd_task: 518 | del fd_task[fd] 519 | if random in tasks: 520 | del tasks[random] 521 | task["send"].close() 522 | task["recv"].close() 523 | if task.get("ssl"): 524 | con = task["ssl_con"] 525 | else: 526 | con = task["con"] 527 | if not con: 528 | return 529 | try: 530 | con.close() 531 | except (AttributeError, OSError): 532 | pass 533 | try: 534 | os.close(fd) 535 | except OSError: 536 | pass 537 | 538 | 539 | def connect(task, remote): 540 | # 开启异步连接 541 | try: 542 | reqsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 543 | reqsock.setblocking(0) 544 | reqfd = reqsock.fileno() 545 | ep.register(reqfd, EPOLLIN | EPOLLOUT | EPOLLERR) 546 | except Exception as e: 547 | raise e 548 | # fd -> task 549 | fd_task[reqfd] = task 550 | task["con"] = reqsock 551 | task["fd"] = reqfd 552 | # 设定连接初始化时间 553 | task["start"] = time.time() 554 | # 有dns缓存则使用 555 | remote_str = "%s:%s" % remote 556 | if remote_str in dns_buffer: 557 | remote = dns_buffer[remote_str] 558 | try: 559 | reqsock.connect(remote) 560 | except socket.error as e: 561 | if e.errno == errno.EINPROGRESS: 562 | return 563 | reqsock.close() 564 | raise e 565 | 566 | 567 | def connect_remote(task): 568 | # 生成请求,暂时写到发送缓冲 569 | try: 570 | remote, content = generate_request(task) 571 | except KeyError as e: 572 | log_with_time("generate_request error: %s" % e) 573 | pprint.pprint(task) 574 | return 575 | if "remote" in task: 576 | remote = task["remote"] 577 | if task.get("method", METHOD_GET).lower() == "head": 578 | task["header_only"] = True 579 | else: 580 | task["header_only"] = False 581 | count = 0 582 | while True: 583 | try: 584 | connect(task, remote) 585 | break 586 | except Exception as e: 587 | count += 1 588 | if count > 3: 589 | raise e 590 | traceback.print_exc() 591 | time.sleep(1) 592 | if not task.get("socks5_proxy"): 593 | task["send"].write(content) 594 | task["status"] = STATUS_SEND 595 | else: 596 | task["send"].write("\x05\x01\x00") 597 | task["status"] = STATUS_SOCKS5_HANDSHAKE 598 | 599 | 600 | def send_remain(task): 601 | # 网络阻塞, 重发 602 | buf = task["send"] 603 | con = task["con"] 604 | data = buf.getvalue() 605 | count = len(data) 606 | try: 607 | sent = con.send(data) 608 | except socket.error as e: 609 | if e.errno != errno.EAGAIN: 610 | remove_task(task, why="write_later send: %s" % e) 611 | return 612 | buf.truncate(0) 613 | # 避免busy loop 614 | if sent != count: 615 | buf.write(data[sent:]) 616 | ep.modify(task["fd"], EPOLLIN | EPOLLOUT | EPOLLERR) 617 | else: 618 | ep.modify(task["fd"], EPOLLIN | EPOLLERR) 619 | 620 | 621 | def send_remain_ssl(task): 622 | # 网络阻塞, 重发 623 | buf = task["send"] 624 | con = task["ssl_con"] 625 | data = buf.getvalue() 626 | count = len(data) 627 | try: 628 | sent = con.send(data) 629 | except ssl.SSLError as e: 630 | if handle_ssl_exception(task, e) < 0: 631 | remove_task(task, why="send_remain_ssl: %s" % e) 632 | return 633 | buf.truncate(0) 634 | # 避免busy loop 635 | if sent != count: 636 | buf.write(data[sent:]) 637 | ep.modify(task["fd"], EPOLLIN | EPOLLOUT | EPOLLERR) 638 | else: 639 | ep.modify(task["fd"], EPOLLIN | EPOLLERR) 640 | 641 | 642 | def read_to_buffer(task): 643 | # 一次把可用数据读出 644 | con = task["con"] 645 | buf = task["recv"] 646 | status = 0 647 | while True: 648 | try: 649 | mark = buf.tell() 650 | buf.write(con.recv(409600)) 651 | if buf.tell() == mark: 652 | parse_http_buffer(task) 653 | break 654 | except socket.error as e: 655 | if e.errno == errno.EAGAIN: 656 | status = 1 657 | else: 658 | status = -1 659 | break 660 | return status 661 | 662 | 663 | def handle_read(task): 664 | con = task["con"] 665 | status = read_to_buffer(task) 666 | if not status: 667 | return 668 | elif status < 0: 669 | remove_task(task, why="read_to_buffer error") 670 | return 671 | if task["status"] & STATUS_SOCKS5_HANDSHAKE: 672 | val = task["recv"].getvalue() 673 | if val.startswith("\x05\x00"): 674 | task["send"].write(task["socks5_request_message"]) 675 | del task["socks5_request_message"] 676 | task["status"] = STATUS_SOCKS5_REQUEST 677 | task["recv"].truncate(0) 678 | send_remain(task) 679 | else: 680 | remove_task(task, why="socks5握手失败") 681 | return 682 | elif task["status"] & STATUS_SOCKS5_REQUEST: 683 | val = task["recv"].getvalue() 684 | if val.startswith("\x05\x00"): 685 | task["ssl"] = task.get("ssl_mark", False) 686 | if task.get("ssl"): 687 | remote_connected(task) 688 | event_write(task) 689 | else: 690 | task["send"].write(task["socks5_request_content"]) 691 | del task["socks5_request_content"] 692 | task["status"] = STATUS_RECV 693 | task["recv"].truncate(0) 694 | send_remain(task) 695 | else: 696 | remove_task(task, why="socks5请求失败") 697 | return 698 | # 找http头并解析 699 | if task["status"] & STATUS_RECV: 700 | parse_header(task) 701 | if task["header_only"] and task.get("res_header"): 702 | call_parser(task) 703 | remove_task(task) 704 | return 705 | if task["status"] & STATUS_HEADER: 706 | try: 707 | parse_response(task["res_header"], task) 708 | except KeyError as e: 709 | remove_task(task, why="解析http响应主体失败: %s" % e) 710 | return 711 | 712 | 713 | def handle_ssl_exception(task, e): 714 | status = 1 715 | errno = e.errno 716 | # need read 717 | if errno == ssl.SSL_ERROR_WANT_READ: 718 | ep.modify(task["fd"], EPOLLIN | EPOLLERR) 719 | # need write 720 | elif errno == ssl.SSL_ERROR_WANT_WRITE: 721 | ep.modify(task["fd"], EPOLLIN | EPOLLERR | EPOLLOUT) 722 | # other 723 | else: 724 | status = -1 725 | return status 726 | 727 | 728 | def read_to_buffer_ssl(task): 729 | # 一次把可用数据读出 730 | con = task["ssl_con"] 731 | buf = task["recv"] 732 | status = 0 733 | while True: 734 | try: 735 | mark = buf.tell() 736 | buf.write(con.recv(409600)) 737 | if buf.tell() == mark: 738 | parse_http_buffer(task) 739 | break 740 | except ssl.SSLError as e: 741 | if e.errno == ssl.SSL_ERROR_ZERO_RETURN: 742 | parse_http_buffer(task) 743 | else: 744 | status = handle_ssl_exception(task, e) 745 | break 746 | return status 747 | 748 | 749 | def handle_read_ssl(task): 750 | # ssl handshake packet 751 | if task["status"] & STATUS_SSL_HANDSHAKE: 752 | ssl_do_handshake(task) 753 | return 754 | status = read_to_buffer_ssl(task) 755 | if not status: 756 | return 757 | elif status < 0: 758 | remove_task(task, why="read_to_buffer_ssl") 759 | return 760 | # 找http头并解析 761 | if task["status"] & STATUS_RECV: 762 | parse_header(task) 763 | if task["status"] & STATUS_HEADER: 764 | try: 765 | parse_response(task["res_header"], task) 766 | except KeyError as e: 767 | remove_task(task, why="解析http响应主体失败: %s" % e) 768 | return 769 | 770 | 771 | def remote_connected(task): 772 | if task.get("ssl"): 773 | task["status"] = STATUS_SSL_HANDSHAKE 774 | task["ssl_con"] = ssl.wrap_socket(task["con"], do_handshake_on_connect=False) 775 | else: 776 | task["status"] = STATUS_RECV 777 | 778 | 779 | def ssl_do_handshake(task): 780 | con = task["ssl_con"] 781 | try: 782 | con.do_handshake() 783 | task["status"] = STATUS_RECV 784 | ep.modify(task["fd"], EPOLLERR | EPOLLOUT | EPOLLIN) 785 | if task.get("socks5_proxy"): 786 | task["send"].write(task["socks5_request_content"]) 787 | del task["socks5_request_content"] 788 | task["recv"].truncate(0) 789 | send_remain_ssl(task) 790 | except ssl.SSLError as e: 791 | if handle_ssl_exception(task, e) < 0: 792 | remove_task(task, why="ssl handshake: %s" % e) 793 | 794 | 795 | def event_write(task): 796 | if task["status"] & STATUS_SEND: 797 | remote_connected(task) 798 | if task["status"] & STATUS_SSL_HANDSHAKE: 799 | ssl_do_handshake(task) 800 | return 801 | if task["send"].tell(): 802 | if task.get("ssl"): 803 | send_remain_ssl(task) 804 | else: 805 | send_remain(task) 806 | 807 | 808 | def event_read(task): 809 | if task.get("ssl"): 810 | handle_read_ssl(task) 811 | else: 812 | handle_read(task) 813 | 814 | 815 | def get_socket_error(con): 816 | d = con.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR, 4) 817 | code = struct.unpack("i", d)[0] 818 | return errno.errorcode.get(code, "none") 819 | 820 | 821 | 822 | 823 | def handle_event(ep): 824 | time_now = time.time() 825 | for fd, event in ep.poll(2): 826 | if fd == g["timerfd"]: 827 | do_timer() 828 | continue 829 | task = fd_task.get(fd) 830 | if not task: 831 | continue 832 | # not likely more than 1024 event. 833 | task["event"] += 1 834 | if task["event"] > 2048: 835 | remove_task(task) 836 | continue 837 | if event & EPOLLERR: 838 | if on_socket_error: 839 | on_socket_error(task) 840 | else: 841 | print get_socket_error(task["con"]) 842 | remove_task(task, why="epoll err") 843 | continue 844 | if event & EPOLLOUT: 845 | task["start"] = time_now 846 | event_write(task) 847 | if event & EPOLLIN: 848 | task["start"] = time_now 849 | event_read(task) 850 | 851 | 852 | def run_debug(): 853 | print "======================" 854 | print "tasks", len(tasks) 855 | print "failed", len(failed_tasks) 856 | print "======================" 857 | 858 | 859 | def bisect_left(array, item, less): 860 | l = 0 861 | hi = len(array) 862 | while l < hi: 863 | m = l + (hi - l) / 2 864 | v = array[m] 865 | if less(v, item) > 0: 866 | l = m + 1 867 | else: 868 | hi = m - 1 869 | return l 870 | 871 | 872 | def find_new_task(item, zero): 873 | if item[1]["start"] == 0: 874 | return -1 875 | else: 876 | return 1 877 | 878 | 879 | def find_timeout(item, cur): 880 | start = item[1]["start"] 881 | if start == 0: 882 | return 0 883 | return cur - start - config["timeout"] 884 | 885 | 886 | def clean_tasks(now): 887 | sorted_tasks = sorted(tasks.items(), 888 | key=lambda x: x[1]["start"], 889 | reverse=True) 890 | mark = bisect_left(sorted_tasks, 891 | now, 892 | find_timeout) 893 | 894 | print "mark: %s, tasks: %s" % (mark, len(sorted_tasks)) 895 | if not mark and len(sorted_tasks): 896 | mark += 1 897 | for i in range(mark): 898 | task = sorted_tasks[i][1] 899 | if on_timeout: 900 | on_timeout(task) 901 | remove_task(task, why="连接超时被清理") 902 | 903 | 904 | def connect_more(now): 905 | sorted_tasks = sorted(tasks.items(), 906 | key=lambda x: x[1]["start"], 907 | reverse=True) 908 | mark = bisect_left(sorted_tasks, 909 | 0, 910 | find_new_task) 911 | space = config["limit"] - len(fd_task) 912 | print "space: %s mark: %s tasks %s" % (space, mark, len(sorted_tasks)) 913 | for _, v in sorted_tasks[mark:]: 914 | if space <= 0: 915 | break 916 | if v["con"]: 917 | continue 918 | connect_remote(v) 919 | space -= 1 920 | 921 | 922 | def do_timer(): 923 | assert os.read(g["timerfd"], 8) 924 | current = time.time() 925 | g["timer_signal"] = False 926 | if current - http_time > config["timeout"]: 927 | clean_tasks(current) 928 | g["http_time"] = current 929 | dns_buffer = dns_preset.copy() 930 | if current - task_time > config["interval"]: 931 | connect_more(current) 932 | g["task_time"] = current 933 | if current - expire_timeout > 600: 934 | g["expire_timeout"] = current 935 | pprint.pprint(fd_task) 936 | pprint.pprint(tasks) 937 | for i, task in fd_task.items(): 938 | remove_task(task, why="任务无响应超时") 939 | print "warning: tasks expire...." 940 | 941 | 942 | def run_async(ep): 943 | g["http_time"] = time.time() 944 | g["task_time"] = time.time() 945 | g["expire_timeout"] = time.time() 946 | g["timer_signal"] = False 947 | while True: 948 | handle_event(ep) 949 | if not len(tasks): 950 | break 951 | 952 | 953 | def fill_task(task): 954 | if task.get("chain") and not task.get("chain_idx"): 955 | task["chain_idx"] = 0 956 | flt = chain_next(task) 957 | prev = task 958 | task = flt(prev) 959 | assert id(task) != id(prev) 960 | task["prev"] = prev 961 | task["send"] = StringIO() 962 | task["recv"] = StringIO() 963 | copy = { 964 | "proxy": "", 965 | "url": "", 966 | "random": "", 967 | "fd": -1, 968 | "parser": nope_parser, 969 | "start": 0, 970 | "retry": 0, 971 | "status": STATUS_SEND, 972 | "redirect": 0, 973 | "con": None, 974 | "chain": None, 975 | "chain_idx": 0, 976 | "ssl": False, 977 | "event": 0, 978 | } 979 | for k, v in copy.items(): 980 | if k not in task: 981 | task[k] = v 982 | if task["redirect"] and "urlsset" not in task: 983 | task["urlsset"] = {task["url"]: 1} 984 | if task.get("chain") and not task.get("chain_idx"): 985 | task["chain_idx"] = 1 986 | return task 987 | 988 | 989 | def preset_dns(task_list): 990 | dns_exception_list = [] 991 | for i in task_list: 992 | if i.get("remote"): 993 | continue 994 | d = urlparse(i["url"]) 995 | if "host" not in d: 996 | continue 997 | host = d["host"] 998 | if "port" not in d: 999 | port = 80 1000 | else: 1001 | port = d["port"] 1002 | remote = "%s:%s" % (host, port) 1003 | if remote in dns_buffer: 1004 | continue 1005 | start = time.time() 1006 | try: 1007 | addr = safe_dns_request(host, port) 1008 | except OSError as e: 1009 | print e.message 1010 | dns_exception_list.append(i) 1011 | continue 1012 | cost = time.time() - start 1013 | if cost > 1: 1014 | print "dns slow query: %s, %s" % (cost, host) 1015 | dns_buffer[remote] = addr 1016 | for i in dns_exception_list: 1017 | task_list.remove(i) 1018 | 1019 | 1020 | def log_with_time(msg): 1021 | print "async_http %s: %s" % (time.ctime(), repr(msg)) 1022 | 1023 | 1024 | def dispatch_tasks(task_list): 1025 | g["ep"] = epoll() 1026 | # 补全任务缺少的 1027 | for i, v in enumerate(task_list): 1028 | task_list[i] = fill_task(v) 1029 | # 初始化任务管理 1030 | preset_dns(task_list) 1031 | space = config["limit"] 1032 | start_time = time.time() 1033 | acnt = len(task_list) 1034 | for i in task_list: 1035 | while True: 1036 | random = get_random() 1037 | if not random in tasks: 1038 | tasks[random] = i 1039 | break 1040 | i["random"] = random 1041 | if space > 0: 1042 | connect_remote(i) 1043 | space -= 1 1044 | g["timerfd"] = open_timerfd() 1045 | ep.register(timerfd, EPOLLIN | EPOLLERR) 1046 | run_async(ep) 1047 | os.close(timerfd) 1048 | ep.close() 1049 | del g["timerfd"] 1050 | del g["ep"] 1051 | fcnt = len(failed_tasks) 1052 | log_with_time("acnt: %d, fcnt: %d, time: %d" % (acnt, 1053 | fcnt, time.time() - start_time)) 1054 | for k, v in failed_tasks.iteritems(): 1055 | log_with_time("failed: %s" % v["url"]) 1056 | 1057 | 1058 | def repeat_tasks(task_list): 1059 | global failed_tasks 1060 | dispatch_tasks(task_list) 1061 | while len(failed_tasks): 1062 | ret = [] 1063 | items = failed_tasks.items() 1064 | failed_tasks = {} 1065 | for key, v in items: 1066 | if v["retry"] > config["retry_limit"]: 1067 | continue 1068 | v["retry"] += 1 1069 | t = default_copy_func(v) 1070 | ret.append(t) 1071 | if on_failed: 1072 | on_failed(ret) 1073 | dispatch_tasks(ret) 1074 | 1075 | 1076 | def batch_request(urls, parser): 1077 | tasks = [] 1078 | for i in urls: 1079 | tasks.append({ 1080 | "url": i, 1081 | "parser": parser, 1082 | }) 1083 | repeat_tasks(tasks) 1084 | 1085 | 1086 | def insert_task(task): 1087 | task = default_copy_func(task) 1088 | task = fill_task(task) 1089 | while True: 1090 | random = get_random() 1091 | if random not in tasks: 1092 | tasks[random] = task 1093 | break 1094 | task["random"] = random 1095 | 1096 | 1097 | def debug_parser(task): 1098 | pprint.pprint(task["res_header"]) 1099 | 1100 | 1101 | def chain_next(task): 1102 | chain = task["chain"] 1103 | idx = task["chain_idx"] 1104 | if idx < len(chain): 1105 | task["chain_idx"] += 1 1106 | return chain[idx] 1107 | 1108 | 1109 | class TIMESPEC(ctypes.Structure): 1110 | """ 1111 | struct timespec { 1112 | time_t tv_sec; /* Seconds */ 1113 | long tv_nsec; /* Nanoseconds */ 1114 | }; 1115 | 1116 | struct itimerspec { 1117 | struct timespec it_interval; /* Interval for periodic timer */ 1118 | struct timespec it_value; /* Initial expiration */ 1119 | }; 1120 | """ 1121 | _fields_ = [("interval_sec", ctypes.c_long), 1122 | ("interval_nsec", ctypes.c_long), 1123 | ("expire_sec", ctypes.c_long), 1124 | ("expire_nsec", ctypes.c_long), 1125 | ] 1126 | 1127 | 1128 | def open_timerfd(): 1129 | """ 1130 | int timerfd_create(int clockid, int flags); 1131 | 1132 | int timerfd_settime(int fd, int flags, 1133 | const struct itimerspec *new_value, 1134 | struct itimerspec *old_value); 1135 | """ 1136 | libc = ctypes.cdll.LoadLibrary("libc.so.6") 1137 | TFD_NONBLOCK = 00004000 1138 | TFD_CLOEXEC = 02000000 1139 | CLOCK_MONOTONIC = 1 1140 | fd = libc.timerfd_create(CLOCK_MONOTONIC, TFD_NONBLOCK | TFD_CLOEXEC) 1141 | assert fd != -1 1142 | ts = TIMESPEC(1, 0, 1, 0) 1143 | assert libc.timerfd_settime(fd, 0, ctypes.pointer(ts), 0) != -1 1144 | return fd 1145 | -------------------------------------------------------------------------------- /down.py: -------------------------------------------------------------------------------- 1 | import simple_http 2 | import pprint 3 | 4 | default = "down.output" 5 | 6 | def down(url, output, proxy=""): 7 | res = simple_http.get(url, proxy=proxy, header=simple_http.download_header) 8 | if res["status"] != 200: 9 | pprint.pprint(h) 10 | exit(1) 11 | f = open(output, "wb+") 12 | f.write(res["text"]) 13 | f.close() 14 | 15 | if __name__ == "__main__": 16 | import argparse 17 | parser = argparse.ArgumentParser( 18 | description="a simple tool for downloading") 19 | parser.add_argument("-u", type=str, help="target url") 20 | parser.add_argument("-p", type=str, help="proxy") 21 | parser.add_argument("-o", type=str, default=default, help="file name") 22 | args = parser.parse_args() 23 | if args.u: 24 | down(args.u, args.o, args.p) 25 | 26 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from distutils.core import setup 4 | 5 | setup(name='simplehttp', 6 | version='0.1', 7 | description='sync and async http client with http, https, proxy, auto redirect support', 8 | author='maliubiao', 9 | author_email='maliubiao@gmail.com', 10 | url='http://github.com/maliubiao/simple_http', 11 | py_modules = ["simple_http", "_http", "async_http"] 12 | ) 13 | -------------------------------------------------------------------------------- /simple_http.py: -------------------------------------------------------------------------------- 1 | # -*-encoding=utf-8-*- 2 | import socket 3 | import io 4 | import pdb 5 | from struct import pack 6 | 7 | import zlib 8 | import cStringIO 9 | from _http import * 10 | 11 | 12 | def get(url, **kwargs): 13 | return request(url, method=METHOD_GET, **kwargs) 14 | 15 | 16 | def head(url, **kwargs): 17 | return request(url, method=METHOD_HEAD, header_only=True, **kwargs) 18 | 19 | 20 | def delete(url, **kwargs): 21 | return request(url, method=METHOD_DELETE, **kwargs) 22 | 23 | 24 | def trace(url, **kwargs): 25 | return request(url, method=METHOD_TRACE, **kwargs) 26 | 27 | 28 | def options(url, **kwargs): 29 | return request(url, method=METHOD_OPTIONS, **kwargs) 30 | 31 | 32 | def put(url, **kwargs): 33 | return request(url, method=METHOD_PUT, **kwargs) 34 | 35 | 36 | def post(url, **kwargs): 37 | return request(url, method=METHOD_POST, **kwargs) 38 | 39 | 40 | def request(url, **kwargs): 41 | redirect = kwargs.get("redirect", 1) 42 | assert redirect > 0 43 | new_url = url 44 | urlset = {new_url: 1} 45 | while redirect: 46 | redirect = redirect - 1 47 | if urlset[new_url] > 2: 48 | raise socket.error("endless redirect") 49 | res = do_request(new_url, **kwargs) 50 | res["url"] = new_url 51 | cookie = res.get("cookie") 52 | if "cookie" not in kwargs: 53 | kwargs["cookie"] = {} 54 | if cookie: 55 | kwargs["cookie"].update(get_cookie(cookie)) 56 | res_header = res["header"] 57 | status = res["status"] 58 | if res_header.get("Location") or res_header.get("location"): 59 | new_url = res_header.get("Location", res_header.get("location")) 60 | if not new_url.startswith("http"): 61 | new_url = fix_relative_url(url, new_url) 62 | else: 63 | break 64 | print "redirect to", new_url 65 | if new_url in urlset: 66 | urlset[new_url] += 1 67 | else: 68 | urlset[new_url] = 1 69 | return res 70 | 71 | 72 | def fix_relative_url(url, new_url): 73 | d = urlparse(url) 74 | host = "%s://%s" % (d.get("schema", "http"), d["host"]) 75 | if d.get("port"): 76 | host = "%s:%s" % d['port'] 77 | if not new_url.startswith("/"): 78 | new_url = "/" + new_url 79 | return host + new_url 80 | 81 | 82 | def do_request(url, **kwargs): 83 | request = generate_request(url, **kwargs) 84 | return send_http(request) 85 | 86 | 87 | def generate_request(url, **kwargs): 88 | rl = [] 89 | url_parts = urlparse(url) 90 | # http basic authorization 91 | basicauth = basic_auth_msg(url_parts.get("user"), url_parts.get("password")) 92 | if "user" in url_parts: 93 | del url_parts["user"] 94 | if "password" in url_parts: 95 | del url_parts["password"] 96 | proxy = kwargs.get("proxy", "") 97 | if proxy: 98 | pauth = proxy_auth_msg(proxy) 99 | else: 100 | pauth = None 101 | # maybe ssl 102 | port = int(url_parts.get("port", 80)) 103 | use_ssl = False 104 | if url_parts.get("schema") == "https": 105 | use_ssl = True 106 | port = 443 107 | if not has_ssl: 108 | raise socket.error("Unsupported schema") 109 | # handle query string 110 | if kwargs.get("query"): 111 | url_parts["query"] = generate_query(kwargs["query"]) 112 | host = url_parts["host"] 113 | # http proxy: remove schema://host:port 114 | if proxy.startswith("http"): 115 | url_parts["schema"] = "http" 116 | else: 117 | del url_parts["host"] 118 | if "schema" in url_parts: 119 | del url_parts["schema"] 120 | if "port" in url_parts: 121 | del url_parts["port"] 122 | if not kwargs.get("header"): 123 | header = default_header.copy() 124 | else: 125 | header = kwargs["header"] 126 | if not port in (80, 443): 127 | header["Host"] = "%s:%d" % (host, port) 128 | else: 129 | header["Host"] = host 130 | if kwargs.get("method") in (METHOD_PUT, METHOD_POST): 131 | content = generate_post(header, kwargs["payload"]) 132 | header["Content-Length"] = str(len(content)) 133 | # reuqest path 134 | path = generate_url(url_parts) 135 | method = kwargs.get("method", METHOD_GET) 136 | # for basic authorization 137 | if basicauth: header["Authorization"] = basicauth 138 | # for basic proxy authorization 139 | if pauth: header["Proxy-Authorization"] = pauth 140 | rl.append(generate_request_header(header, method, path)) 141 | # generate cookie and HEADER_END 142 | if kwargs.get("cookie"): 143 | rl.append("Cookie: ") 144 | rl.append(generate_cookie(kwargs["cookie"])) 145 | rl.append(HEADER_END) 146 | else: 147 | rl.append("\r\n") 148 | if kwargs.get("method") in (METHOD_PUT, METHOD_POST): 149 | rl.append(content) 150 | # args for send_http 151 | body = "".join(rl) 152 | remote = kwargs.get("remote", (host, port)) 153 | return { 154 | "body": body, 155 | "remote": remote, 156 | "ssl": use_ssl, 157 | "timeout": kwargs.get("timeout", default_timeout), 158 | "proxy": proxy, 159 | "header_only": kwargs.get("header_only", False), 160 | } 161 | 162 | 163 | def decode_chunk_stream(response): 164 | goout = False 165 | b = response["chunked_b"] 166 | recv = response["recv"] 167 | done = False 168 | recv_end = recv.tell() 169 | recv.seek(response["chunked_idx"], io.SEEK_SET) 170 | while True: 171 | back_idx = recv.tell() 172 | num = "" 173 | while True: 174 | char = recv.read(1) 175 | if not char: 176 | goout = True 177 | break 178 | if char == "\r": 179 | break 180 | num += char 181 | if goout: 182 | recv.seek(back_idx, io.SEEK_SET) 183 | break 184 | recv.seek(1, io.SEEK_CUR) 185 | x = int(num, 16) 186 | if not x: 187 | done = True 188 | break 189 | chunk = recv.read(x) 190 | if len(chunk) != x: 191 | recv.seek(back_idx, io.SEEK_SET) 192 | break 193 | recv.seek(2, io.SEEK_CUR) 194 | b.write(chunk) 195 | response["chunked_idx"] = recv.tell() 196 | recv.seek(recv_end, io.SEEK_SET) 197 | if done: 198 | response["recv"] = response["chunked_b"] 199 | del response["chunked_b"] 200 | del response["chunked_idx"] 201 | return done 202 | 203 | 204 | def parse_header(response): 205 | recv = response["recv"] 206 | data = recv.getvalue() 207 | idx = data.find(HEADER_END) 208 | if not idx: 209 | data.find(HEADER_END2) 210 | skip = 2 211 | else: 212 | skip = 4 213 | if idx < 0: 214 | return 215 | recv.truncate(0) 216 | recv.write(data[idx + skip:]) 217 | status, cookie, header = parse_response_header(data[:idx]) 218 | response.update(status) 219 | response["cookie"] = cookie 220 | response["header"] = header 221 | if "Content-Length" in header: 222 | try: 223 | length = int(header["Content-Length"]) 224 | except ValueError: 225 | length = int(header["Content-Length"].split("\n")[0]) 226 | if length >= 0: 227 | response["total_length"] = length 228 | if header.get("Transfer-Encoding") == "chunked": 229 | response["chunked"] = True 230 | response["chunked_b"] = cStringIO.StringIO() 231 | response["chunked_idx"] = 0 232 | return header 233 | 234 | 235 | def wait_response(request): 236 | has_header = False 237 | recv = cStringIO.StringIO() 238 | remote = request["sock"] 239 | header = None 240 | response = {"recv": recv} 241 | while True: 242 | data = remote.recv(40960) 243 | # remote closed 244 | if not data: 245 | break 246 | recv.write(data) 247 | if not has_header: 248 | header = parse_header(response) 249 | if not header: 250 | continue 251 | if request["header_only"]: 252 | break 253 | has_header = True 254 | if response.get("chunked") and decode_chunk_stream(response): 255 | break 256 | if recv.tell() >= response.get("total_length", 0xffffffff): 257 | break 258 | if not header: 259 | raise socket.error("remote error: %s:%d" % remote.getpeername()) 260 | return response 261 | 262 | 263 | def connect_sock5(sock, remote, server): 264 | sock.connect(server) 265 | # socks5 handshake 266 | sock.send("\x05\x01\x00") 267 | if not sock.recv(4).startswith("\x05\x00"): 268 | sock.close() 269 | raise socket.error("connect proxy failed") 270 | # use remote dns by default 271 | hdr = "\x05\x01\x00\x03%s%s%s" % (pack("B", 272 | len(remote[0])), remote[0], 273 | pack(">H", remote[1])) 274 | sock.send(hdr) 275 | # if request failed 276 | if not sock.recv(12).startswith("\x05\x00"): 277 | sock.close() 278 | raise socket.error("unexpected response packet") 279 | 280 | 281 | def connect_proxy(sock, remote, proxy): 282 | proxy_type = None 283 | url_parts = urlparse(proxy) 284 | schema = url_parts["schema"] 285 | if schema in "https": 286 | proxy_type = "http" 287 | sock.connect((url_parts["host"], int(url_parts["port"]))) 288 | elif schema == "socks5": 289 | proxy_type = "socks5" 290 | connect_sock5(sock, remote, 291 | (url_parts["host"], int(url_parts["port"]))) 292 | else: 293 | raise socket.error("unknown proxy type") 294 | return proxy_type 295 | 296 | 297 | def send_tcp(sock, message): 298 | i = 0 299 | while True: 300 | count = sock.send(message[i:]) 301 | i += count 302 | if i == len(message): 303 | break 304 | 305 | 306 | 307 | def send_http(request): 308 | # if there is a proxy , connect proxy server instead 309 | proxy_type = None 310 | remote = request["remote"] 311 | sock = socket.socket(socket.AF_INET, 312 | socket.SOCK_STREAM) 313 | if request["proxy"]: 314 | # 用代理则先连接代理服务器 315 | proxy_type = connect_proxy(sock, remote, request["proxy"]) 316 | else: 317 | sock.connect(remote) 318 | sock.settimeout(request["timeout"]) 319 | # 用代理不能封闭ssl 320 | if request["ssl"] and proxy_type != "http": 321 | sock = ssl.wrap_socket(sock) 322 | request["sock"] = sock 323 | # 略粗暴,可能发不全 324 | send_tcp(sock, request["body"]) 325 | response = wait_response(request) 326 | # 如果需要缓存连接则添加到队列, 否则直接关闭连接 327 | host = "%s:%d" % remote 328 | header = response["header"] 329 | text = response["recv"].getvalue() 330 | del response["recv"] 331 | if not request.get("header_only") and header: 332 | # maybe gzip stream 333 | if header.get("Content-Encoding") == "gzip": 334 | text = zlib.decompress(text, 16 + zlib.MAX_WBITS) 335 | elif header.get("Content-Encoding") == "deflate": 336 | text = zlib.decompress(text, -zlib.MAX_WBITS) 337 | response["text"] = text 338 | return response 339 | --------------------------------------------------------------------------------