├── changelog.txt
├── lib
├── __init__.py
└── comm.py
├── resources
├── __init__.py
├── settings.xml
└── language
│ └── English
│ └── strings.po
├── icon.png
├── .gitattributes
├── .gitignore
├── README.md
├── addon.xml
├── www
├── index.html
└── css
│ └── styles.css
├── LICENSE.txt
├── helpers.py
├── pyhtml.py
├── socks.py
└── service.py
/changelog.txt:
--------------------------------------------------------------------------------
1 | v0.0.0.1
--------------------------------------------------------------------------------
/lib/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/resources/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/feelfar/115proxy-for-kodi/HEAD/icon.png
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Windows:
2 | Thumbs.db
3 | ehthumbs.db
4 | Desktop.ini
5 |
6 | # Python:
7 | *.py[cod]
8 | *.so
9 | *.egg
10 | *.egg-info
11 | dist
12 | build
13 | /.vs
14 | service - 副本.py
15 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 115proxy-for-kodi
2 | 115原码播放服务Kodi插件,需要kodi 18以上版本,需配合 https://github.com/feelfar/115-for-kodi 使用
3 | # 安装
4 | 由于release包尚未释出,可直接下载源代码zip包安装。
5 | https://github.com/feelfar/115proxy-for-kodi/archive/master.zip
6 | # 感谢
7 | 下载链接生成参考了https://github.com/kkHAIKE/fake115 谢谢kkHAIKE大神
8 |
--------------------------------------------------------------------------------
/resources/settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/addon.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | File115Proxy
9 | File115Proxy
10 | en
11 | all
12 |
13 |
14 |
--------------------------------------------------------------------------------
/www/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | WEB115
7 |
8 |
9 |
10 |
11 |
35 |
36 |
--------------------------------------------------------------------------------
/resources/language/English/strings.po:
--------------------------------------------------------------------------------
1 | # NHL HLS Proxy language file
2 | # Addon Name: NHL HLS Proxy
3 | # Addon id: service.nhl-hls-proxy
4 | # Addon provider: Timewasted
5 | msgid ""
6 | msgstr ""
7 | "Project-Id-Version: NHL-HLS-Proxy\n"
8 | "Report-Msgid-Bugs-To: https://github.com/timewasted/service.nhl-hls-proxy\n"
9 | "POT-Creation-Date: YEAR-MO-DA HO:MI+ZONE\n"
10 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
11 | "Last-Translator: Timewasted\n"
12 | "Language-Team: English (http://www.transifex.com/projects/p/xbmc-addons/language/en/)\n"
13 | "MIME-Version: 1.0\n"
14 | "Content-Type: text/plain; charset=UTF-8\n"
15 | "Content-Transfer-Encoding: 8bit\n"
16 | "Language: en\n"
17 | "Plural-Forms: nplurals=2; plural=(n != 1);\n"
18 |
19 | msgctxt "#30001"
20 | msgid "HLS Proxy"
21 | msgstr ""
22 |
23 | msgctxt "#30002"
24 | msgid "Listen on address"
25 | msgstr ""
26 |
27 | msgctxt "#30003"
28 | msgid "Listen on port"
29 | msgstr ""
30 |
31 | msgctxt "#30004"
32 | msgid "Listen port must be between 1024 and 65535 inclusive"
33 | msgstr ""
34 |
35 | msgctxt "#30005"
36 | msgid "You shouldn't need to change these settings"
37 | msgstr ""
38 |
39 | msgctxt "#30006"
40 | msgid "Changes require a restart of Kodi to take effect"
41 | msgstr ""
42 |
--------------------------------------------------------------------------------
/lib/comm.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # comm.py
3 | from __future__ import unicode_literals
4 | import sys
5 | import xbmc,xbmcvfs,json,gzip,time,os,re
6 |
7 | def with_metaclass(meta, *bases):
8 | """Create a base class with a metaclass."""
9 | # This requires a bit of explanation: the basic idea is to make a dummy
10 | # metaclass for one level of class instantiation that replaces itself with
11 | # the actual metaclass.
12 | class metaclass(type):
13 |
14 | def __new__(cls, name, this_bases, d):
15 | return meta(name, bases, d)
16 |
17 | @classmethod
18 | def __prepare__(cls, name, this_bases):
19 | return meta.__prepare__(name, bases)
20 | return type.__new__(metaclass, 'temporary_class', (), {})
21 |
22 | def ensure_text(s, encoding='utf-8', errors='strict'):
23 | if isinstance(s, bytes):
24 | return s.decode(encoding, errors)
25 | elif isinstance(s, str):
26 | return s
27 | else:
28 | raise TypeError("not expecting type '%s'" % type(s))
29 |
30 | def ensure_binary(s, encoding='utf-8', errors='strict'):
31 | if isinstance(s, str):
32 | return s.encode(encoding, errors)
33 | elif isinstance(s, bytes):
34 | return s
35 | else:
36 | raise TypeError("not expecting type '%s'" % type(s))
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2014, Ryan Rogers
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | 1. Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 | 2. Redistributions in binary form must reproduce the above copyright notice,
10 | this list of conditions and the following disclaimer in the documentation
11 | and/or other materials provided with the distribution.
12 |
13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
17 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
20 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 |
--------------------------------------------------------------------------------
/www/css/styles.css:
--------------------------------------------------------------------------------
1 | body {
2 | background-color: #CCCCFF;
3 | font-size:2vw;
4 | }
5 | ul,table {
6 | list-style-type: none;
7 | margin: 0;
8 | padding: 0;
9 | width:100%;
10 | }
11 |
12 | li a {
13 | display: block;
14 | color: #000;
15 | padding: 8px 16px;
16 | overflow:hidden;
17 | text-decoration: none;
18 | text-overflow:ellipsis;
19 | white-space:nowrap;
20 | }
21 |
22 | /* 鼠标移动到选项上修改背景颜色 */
23 | li a:hover {
24 | background-color: #666633;
25 | color: white;
26 | }
27 |
28 |
29 | td a {
30 | display: block;
31 | color: #000;
32 | padding: 8px 16px;
33 | text-decoration: none;
34 | background-color: ##99CC66;
35 | font-size:1.8vw
36 | }
37 |
38 | textarea {
39 | width : 100%;
40 | height : 80%;
41 | }
42 |
43 | /* 鼠标移动到选项上修改背景颜色 */
44 | td a:hover {
45 | background-color: #555;
46 | color: white;
47 | }
48 |
49 | .path,.typefilter{
50 | color: #ffffff;
51 | background-color: #669966;
52 | font-size:1.5vw;
53 | }
54 |
55 | .curpath{
56 | color: #000;
57 | background-color: #CCFFCC;
58 | font-size:1.5vw;
59 | word-wrap: break-word;
60 | white-space: normal;
61 | word-break:break-all;
62 | }
63 |
64 | .sort{
65 | color: #000;
66 | background-color: #99ffcc;
67 | }
68 |
69 | .sha1{
70 | color: #000;
71 | background-color: #ccff99;
72 | font-size:1.2vw
73 | }
74 |
75 | .pagesel{
76 | color: #000;
77 | background-color: #CCFFCC;
78 | font-size:1.5vw;
79 | }
80 |
81 | .loctd{
82 | max-width: 5%;
83 | width: 5%;
84 | }
85 |
86 | .sha1td{
87 | max-width: 10%;
88 | width: 10%;
89 | }
90 |
91 | .loc{
92 | color: #000;
93 | background-color: #FFCC99;
94 | padding: 8px 8px;
95 | font-size:1.5vw;
96 |
97 | }
98 |
99 | .videotd{
100 | max-width: 45%;
101 | width: 45%;
102 | }
103 |
104 | .video{
105 | color: #000;
106 | background-color: #FFCCCC;
107 | word-wrap: break-word;
108 | white-space: normal;
109 | word-break:break-all;
110 | }
111 |
112 | .vid2{
113 | color: #000;
114 | background-color: #FFCC99;
115 | }
116 |
117 | .bigfont {
118 | font-size:2.5vw;
119 | width:100%;
120 | }
121 |
122 | .video-container {
123 | position: absolute;
124 | top: 0;
125 | bottom: 0;
126 | width: 95%;
127 | height: 95%;
128 | overflow: hidden;
129 | }
130 | .video-container video {
131 | /* Make video to at least 100% wide and tall */
132 | min-width: 95%;
133 | min-height: 95%;
134 |
135 | /* Setting width & height to auto prevents the browser from stretching or squishing the video */
136 | width: auto;
137 | height: auto;
138 |
139 | /* Center the video
140 | position: absolute;
141 | top: 50%;
142 | left: 50%;
143 | transform: translate(-50%,-50%);
144 | */
145 | }
--------------------------------------------------------------------------------
/helpers.py:
--------------------------------------------------------------------------------
1 | #VERSION: 1.40
2 |
3 | # Author:
4 | # Christophe DUMEZ (chris@qbittorrent.org)
5 |
6 | # Redistribution and use in source and binary forms, with or without
7 | # modification, are permitted provided that the following conditions are met:
8 | #
9 | # * Redistributions of source code must retain the above copyright notice,
10 | # this list of conditions and the following disclaimer.
11 | # * Redistributions in binary form must reproduce the above copyright
12 | # notice, this list of conditions and the following disclaimer in the
13 | # documentation and/or other materials provided with the distribution.
14 | # * Neither the name of the author nor the names of its contributors may be
15 | # used to endorse or promote products derived from this software without
16 | # specific prior written permission.
17 | #
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
22 | # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | # POSSIBILITY OF SUCH DAMAGE.
29 | # -*- coding: utf-8 -*-
30 | import re, htmlentitydefs
31 | import tempfile
32 | import os
33 | import StringIO, gzip, urllib,urllib2
34 | import socket
35 | import socks
36 | import re
37 | import xbmc
38 |
39 | class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
40 | def http_error_301(self, req, fp, code, msg, headers):
41 | result = urllib2.HTTPRedirectHandler.http_error_301(
42 | self, req, fp, code, msg, headers)
43 | result.status = code
44 | return result
45 |
46 | def http_error_302(self, req, fp, code, msg, headers):
47 | result = urllib2.HTTPRedirectHandler.http_error_302(
48 | self, req, fp, code, msg, headers)
49 | result.status = code
50 | return result
51 |
52 | class PassRedirectHandler(urllib2.HTTPRedirectHandler):
53 | def http_error_301(self, req, fp, code, msg, headers):
54 | infourl = urllib.addinfourl(fp, headers, req.get_full_url())
55 | infourl.status = code
56 | infourl.code = code
57 | return infourl
58 |
59 | def http_error_302(self, req, fp, code, msg, headers):
60 | infourl = urllib.addinfourl(fp, headers, req.get_full_url())
61 | infourl.status = code
62 | infourl.code = code
63 | return infourl
64 |
65 |
66 | # Some sites blocks default python User-agent
67 | user_agent = 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
68 | #user_agent = 'User-Agent: Mozilla/5.0 (Linux; U; Android 6.0.1;)'
69 | headers = {'User-Agent': user_agent,'Accept-encoding': 'gzip,deflate','Accept-Language':'zh-cn','X-Requested-With': 'XMLHttpRequest'}
70 | # SOCKS5 Proxy support
71 | if os.environ.has_key("sock_proxy") and len(os.environ["sock_proxy"].strip()) > 0:
72 | proxy_str = os.environ["sock_proxy"].strip()
73 | m=re.match(r"^(?:(?P[^:]+):(?P[^@]+)@)?(?P[^:]+):(?P\w+)$", proxy_str)
74 | if m is not None:
75 | socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, m.group('host'), int(m.group('port')), True, m.group('username'), m.group('password'))
76 | socket.socket = socks.socksocket
77 |
78 | def htmlentitydecode(s):
79 | # First convert alpha entities (such as é)
80 | # (Inspired from http://mail.python.org/pipermail/python-list/2007-June/443813.html)
81 | def entity2char(m):
82 | entity = m.group(1)
83 | if entity in htmlentitydefs.name2codepoint:
84 | return unichr(htmlentitydefs.name2codepoint[entity])
85 | return u" " # Unknown entity: We replace with a space.
86 | t = re.sub(u'&(%s);' % u'|'.join(htmlentitydefs.name2codepoint), entity2char, s)
87 |
88 | # Then convert numerical entities (such as é)
89 | t = re.sub(u'(\d+);', lambda x: unichr(int(x.group(1))), t)
90 |
91 | # Then convert hexa entities (such as é)
92 | return re.sub(u'(\w+);', lambda x: unichr(int(x.group(1),16)), t)
93 |
94 | def gethead(url, data=None,referer=None,h=None):
95 | if h:
96 | headers.update(h)
97 | req = urllib2.Request(url, headers = headers)
98 | if referer:
99 | req.add_header('Referer', referer)
100 | req.get_method = lambda : 'HEAD'
101 | opener = urllib2.build_opener(PassRedirectHandler)
102 | return opener.open(req,timeout=30)
103 |
104 | def retrieve_url(url, data=None,referer=None,h=None,redirect=True,charset='auto',savecookie=False):
105 | """ Return the content of the url page as a string """
106 | if h:
107 | headers.update(h)
108 | req = urllib2.Request(url, headers = headers)
109 | if referer:
110 | req.add_header('Referer', referer)
111 | if redirect:
112 | opener = urllib2.build_opener(SmartRedirectHandler)
113 | else:
114 | opener = urllib2.build_opener(PassRedirectHandler)
115 | try:
116 | if data:
117 | response = opener.open(req, data=data,timeout=30)
118 | else:
119 | response = opener.open(req,timeout=30)
120 | if response.code==302:
121 | return response.info()['Location']
122 | except urllib2.URLError as errno:
123 | print(" ".join(("Connection error:", str(errno.reason))))
124 | return ""
125 | dat = response.read()
126 |
127 | # Check if it is gzipped
128 | if dat[:2] == '\037\213':
129 | # Data is gzip encoded, decode it
130 | compressedstream = StringIO.StringIO(dat)
131 | gzipper = gzip.GzipFile(fileobj=compressedstream)
132 | extracted_data = gzipper.read()
133 | dat = extracted_data
134 | info = response.info()
135 | if savecookie:
136 | cookie=info.getheader('Set-Cookie')
137 | if cookie:
138 | dat='feelfarcookie:'+cookie+dat
139 | if charset=='auto':
140 | charset = 'utf-8'
141 | try:
142 | ignore, charset = info['Content-Type'].split('charset=')
143 | except:
144 | pass
145 | dat = dat.decode(charset, 'replace')
146 | dat = htmlentitydecode(dat)
147 | return dat.encode('utf-8', 'replace')
148 |
149 | def download_file(url, referer=None):
150 | """ Download file at url and write it to a file, return the path to the file and the url """
151 | file, path = tempfile.mkstemp()
152 | file = os.fdopen(file, "w")
153 | # Download url
154 | req = urllib2.Request(url, headers = headers)
155 | if referer is not None:
156 | req.add_header('referer', referer)
157 | response = urllib2.urlopen(req)
158 | dat = response.read()
159 | # Check if it is gzipped
160 | if dat[:2] == '\037\213':
161 | # Data is gzip encoded, decode it
162 | compressedstream = StringIO.StringIO(dat)
163 | gzipper = gzip.GzipFile(fileobj=compressedstream)
164 | extracted_data = gzipper.read()
165 | dat = extracted_data
166 |
167 | # Write it to a file
168 | file.write(dat)
169 | file.close()
170 | # return file path
171 | return path+" "+url
172 |
--------------------------------------------------------------------------------
/pyhtml.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 |
4 | PyHTML
5 | ======
6 |
7 | Simple HTML generator for Python.
8 |
9 |
10 | Usage:
11 |
12 | Lets create a tag.
13 |
14 | >>> t = div()
15 | >>> t
16 | div()
17 |
18 |
19 | Tags can be rendered by converting to string.
20 |
21 | >>> str(t)
22 | '
'
23 |
24 |
25 | Printing an object automatically calls str() with that object.
26 | I will keep printing tags in this tutorial for clarity.
27 |
28 | >>> print(div())
29 |
30 |
31 |
32 | Parantheses can be omitted if the tag has no content.
33 |
34 | >>> print(div)
35 |
36 |
37 |
38 | Some tags are self closing.
39 | >>> print(hr)
40 |
41 |
42 |
43 | You can put some content into the tag.
44 | >>> print(div('content'))
45 |
46 | content
47 |
48 |
49 |
50 | You can set attributes of the tag.
51 |
52 | >>> print(div(lang='tr', id='content', class_="bar", data_value="foo"))
53 |
54 |
55 |
56 | Or both:
57 |
58 | >>> print(div(lang='tr')('content'))
59 |
60 | content
61 |
62 |
63 |
64 | Content can be anything which can be converted to string.
65 |
66 | If content is a callable, it will be called with a one argument
67 | that is the context you pass to render() as keyword arguments.
68 |
69 | >>> greet = lambda ctx: 'Hello %s' % ctx.get('user', 'guest')
70 | >>> greeting = div(greet)
71 | >>> print(greeting)
72 |
73 | Hello guest
74 |
75 | >>> print(greeting.render(user='Cenk'))
76 |
77 | Hello Cenk
78 |
79 |
80 |
81 | You can give list of items as content.
82 |
83 | >>> print(div(nav(), greet, hr))
84 |
85 |
86 | Hello guest
87 |
88 |
89 |
90 |
91 | You can give give a callable returning a list as content.
92 |
93 | >>> items = lambda ctx: [li('a'), li('b')]
94 | >>> print(ul(items))
95 |
96 |
97 | a
98 |
99 |
100 | b
101 |
102 |
103 |
104 |
105 | You can give give a generator as content.
106 |
107 | >>> def items(ctx):
108 | ... for i in range(3):
109 | ... yield li(i)
110 | >>> print(ul(items))
111 |
112 |
113 | 0
114 |
115 |
116 | 1
117 |
118 |
119 | 2
120 |
121 |
122 |
123 |
124 | You can nest tags.
125 |
126 | >>> print(div(div(p('a paragraph'))))
127 |
128 |
129 |
130 | a paragraph
131 |
132 |
133 |
134 |
135 |
136 | Some tags have sensible defaults.
137 |
138 | >>> print(form())
139 |
140 |
141 | >>> print(html())
142 |
143 |
144 |
145 |
146 | Full example:
147 |
148 | >>> print(html(
149 | ... head(
150 | ... title('Awesome website'),
151 | ... script(src="http://path.to/script.js")
152 | ... ),
153 | ... body(
154 | ... header(
155 | ... img(src='/path/to/logo.png'),
156 | ... ),
157 | ... div(
158 | ... 'Content here'
159 | ... ),
160 | ... footer(
161 | ... hr,
162 | ... 'Copyright 2012'
163 | ... )
164 | ... )
165 | ... ))
166 |
167 |
168 |
169 |
170 | Awesome website
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 | Content here
180 |
181 |
182 |
183 | Copyright 2012
184 |
185 |
186 |
187 |
188 | """
189 |
190 | from __future__ import print_function
191 | import io
192 | import sys
193 | from copy import deepcopy
194 | from types import GeneratorType
195 | import lib.comm as comm
196 |
197 | if sys.version_info[0] >= 3:
198 | from typing import Dict, List # noqa
199 |
200 | __version__ = '1.3.1'
201 |
202 | # The list will be extended by register_all function.
203 | __all__ = 'Tag Block Safe Var SelfClosingTag html script style form'.split()
204 |
205 | tags = 'head body title div p h1 h2 h3 h4 h5 h6 u b i s a em strong span '\
206 | 'font del_ ins ul ol li dd dt dl article section nav aside header '\
207 | 'footer audio video deo_video object_ embed param fieldset legend button '\
208 | 'label select datalist option table thead tbody tr th td caption '\
209 | 'blockquote cite q abbr acronym address'
210 |
211 | self_closing_tags = 'meta link br hr input_ img'
212 |
213 | whitespace_sensitive_tags = 'code samp pre var kbd dfn source textarea'
214 |
215 | INDENT = 2
216 |
217 |
218 | def _escape(text):
219 | r = (
220 | ('&', '&'),
221 | ('<', '<'),
222 | ('>', '>'),
223 | ('"', '"'),
224 | ("'", '''), )
225 | for k, v in r:
226 | text = text.replace(k, v)
227 | return text
228 |
229 |
230 | class TagMeta(type):
231 | """Type of the Tag. (type(Tag) == TagMeta)
232 | """
233 |
234 | def __str__(cls):
235 | """Renders as empty tag."""
236 | if cls.self_closing:
237 | return '<%s/>' % cls.__name__
238 | else:
239 | return '<%s>%s>' % (cls.__name__, cls.__name__)
240 |
241 | def __repr__(cls):
242 | return cls.__name__
243 |
244 | class Tag(comm.with_metaclass(TagMeta, object)): # type: ignore
245 |
246 | safe = False # do not escape while rendering
247 | self_closing = False
248 | whitespace_sensitive = False
249 | default_attributes = {} # type: Dict[str, str]
250 | doctype = None # type: str
251 |
252 | def __init__(self, *children, **attributes):
253 | _safe = attributes.pop('_safe', None)
254 | if _safe is not None:
255 | self.safe = _safe
256 |
257 | # Only children or attributes may be set at a time.
258 | assert ((bool(children) ^ bool(attributes)) or (not children and not attributes))
259 |
260 | if self.self_closing and children:
261 | raise Exception("Self closing tag can't have children")
262 |
263 | self.children = children
264 |
265 | self.blocks = {} # type: Dict[str, List[Block]]
266 | self._set_blocks(children)
267 |
268 | self.attributes = self.default_attributes.copy()
269 | self.attributes.update(attributes)
270 |
271 | def __call__(self, *children, **options):
272 | if self.self_closing:
273 | raise Exception("Self closing tag can't have children")
274 |
275 | _safe = options.pop('_safe', None)
276 | if _safe is not None:
277 | self.safe = _safe
278 |
279 | self.children = children
280 | self._set_blocks(children)
281 | return self
282 |
283 | def __repr__(self):
284 | if self.attributes and not self.children:
285 | return "%s(%s)" % (self.name, self._repr_attributes())
286 | elif self.children and not self.attributes:
287 | return "%s(%s)" % (self.name, self._repr_children())
288 | elif self.attributes and self.children:
289 | return "%s(%s)(%s)" % (self.name, self._repr_attributes(), self._repr_children())
290 | else:
291 | return "%s()" % self.name
292 |
293 | def _repr_attributes(self):
294 | return ', '.join("%s=%r" % (key, value) for key, value in self.attributes.items)
295 |
296 | def _repr_children(self):
297 | return ', '.join(repr(child) for child in self.children)
298 |
299 | def __str__(self):
300 | return self.render()
301 |
302 | @property
303 | def name(self):
304 | return self.__class__.__name__
305 |
306 | def copy(self):
307 | return deepcopy(self)
308 |
309 | def render(self, _out=None, _indent=0, **context):
310 | if _out is None:
311 | _out = io.StringIO('')
312 |
313 | # Write doctype
314 | if self.doctype:
315 | _out.write(' ' * _indent)
316 | _out.write(self.doctype)
317 | _out.write('\n')
318 |
319 | # Indent opening tag
320 | _out.write(' ' * _indent)
321 |
322 | # Open tag
323 | _out.write('<%s' % self.name)
324 |
325 | self._write_attributes(_out, context)
326 |
327 | if self.self_closing:
328 | _out.write('/>')
329 | else:
330 | # Close opening tag
331 | _out.write('>')
332 |
333 | if self.children:
334 | # Newline after opening tag
335 | if not self.whitespace_sensitive:
336 | _out.write('\n')
337 |
338 | # Write content
339 | self._write_list(self.children, _out, context,
340 | _indent + INDENT)
341 |
342 | if not self.whitespace_sensitive:
343 | # Newline after content
344 | _out.write('\n')
345 | # Indent closing tag
346 | _out.write(' ' * _indent)
347 |
348 | # Write closing tag
349 | _out.write('%s>' % self.name)
350 |
351 | return _out.getvalue()
352 |
353 | def _write_list(self, l, out, context, indent=0):
354 | for i, child in enumerate(l):
355 | # Write newline between items
356 | if i != 0 and not self.whitespace_sensitive:
357 | out.write('\n')
358 |
359 | self._write_item(child, out, context, indent)
360 |
361 | def _write_item(self, item, out, context, indent):
362 | if isinstance(item, Tag):
363 | item.render(out, indent, **context)
364 | elif isinstance(item, TagMeta):
365 | self._write_as_string(item, out, indent, escape=False)
366 | elif callable(item):
367 | rv = item(context)
368 | self._write_item(rv, out, context, indent)
369 | elif isinstance(item, (GeneratorType, list, tuple)):
370 | self._write_list(item, out, context, indent)
371 | else:
372 | self._write_as_string(item, out, indent)
373 |
374 | def _write_as_string(self, s, out, indent, escape=True):
375 | if isinstance(s, str) and not isinstance(out, io.StringIO):
376 | s = s
377 | elif s is None:
378 | s = ''
379 | elif not isinstance(s, str):
380 | s = str(s)
381 |
382 | if escape and not self.safe:
383 | s = _escape(s)
384 |
385 | # Write content
386 | if not self.whitespace_sensitive:
387 | lines = s.splitlines(True)
388 | for line in lines:
389 | out.write(' ' * indent)
390 | out.write(line)
391 | else:
392 | out.write(s)
393 |
394 | def _write_attributes(self, out, context):
395 | for key, value in sorted(self.attributes.items()):
396 | # Some attribute names such as "class" conflict
397 | # with reserved keywords in Python. These must
398 | # be postfixed with underscore by user.
399 | if key.endswith('_'):
400 | key = key.rstrip('_')
401 |
402 | # Dash is preffered to underscore in attribute names.
403 | key = key.replace('_', '-')
404 |
405 | if callable(value):
406 | value = value(context)
407 |
408 | if isinstance(value, str) and not isinstance(
409 | out, io.StringIO):
410 | value = value
411 |
412 | if not isinstance(value, str):
413 | value = str(value)
414 |
415 | value = _escape(value)
416 |
417 | out.write(' %s="%s"' % (key, value))
418 |
419 | def __setitem__(self, block_name, *children):
420 | for block in self.blocks[block_name]:
421 | block(*children)
422 |
423 | self._set_blocks(children, block_name=block_name)
424 |
425 | def _set_blocks(self, children, block_name=None):
426 | for child in children:
427 | if isinstance(child, Block):
428 | if child.block_name == block_name:
429 | self.blocks[child.block_name] = [child]
430 | elif child.block_name not in self.blocks:
431 | self.blocks[child.block_name] = []
432 | self.blocks[child.block_name].append(child)
433 | elif isinstance(child, Tag):
434 | for blocks in child.blocks.values():
435 | self._set_blocks(blocks, block_name=block_name)
436 |
437 |
438 | class Block(Tag):
439 | """List of renderable items."""
440 |
441 | def __init__(self, name):
442 | super(Block, self).__init__()
443 | self.block_name = name
444 | self.children = ()
445 |
446 | def __repr__(self):
447 | if not self.children:
448 | return 'Block(%r)' % self.block_name
449 | else:
450 | return 'Block(%r)(%s)' % (self.block_name, self._repr_children())
451 |
452 | def render(self, _out=None, _indent=0, **context):
453 | if _out is None:
454 | _out = io.StringIO('')
455 |
456 | self._write_list(self.children, _out, context, _indent)
457 | return _out.getvalue()
458 |
459 |
460 | class Safe(Block):
461 | """Helper for wrapping content that do not need escaping."""
462 |
463 | safe = True
464 |
465 | def __init__(self, *children, **options):
466 | super(Safe, self).__init__(None)
467 | super(Safe, self).__call__(*children, **options)
468 |
469 |
470 | def Var(var, default=None):
471 | """Helper function for printing a variable from context."""
472 | return lambda ctx: ctx.get(var, default)
473 |
474 |
475 | class SelfClosingTag(Tag):
476 | self_closing = True
477 |
478 |
479 | class WhitespaceSensitiveTag(Tag):
480 | whitespace_sensitive = True
481 |
482 |
483 | class html(Tag):
484 | doctype = ''
485 |
486 |
487 | class script(Tag):
488 | safe = True
489 | default_attributes = {'type': 'text/javascript'}
490 |
491 |
492 | class style(Tag):
493 | default_attributes = {'type': 'text/css'}
494 |
495 |
496 | class form(Tag):
497 | default_attributes = {'method': 'POST'}
498 |
499 |
500 | _M = sys.modules[__name__]
501 |
502 |
503 | def register_all(tags, parent):
504 | for tag in tags.split():
505 | __all__.append(tag)
506 | setattr(_M, tag, type(tag, (parent, ), {'name': tag.rstrip('_').replace('_', '-')}))
507 |
508 |
509 | register_all(tags, Tag)
510 | register_all(self_closing_tags, SelfClosingTag)
511 | register_all(whitespace_sensitive_tags, WhitespaceSensitiveTag)
512 |
513 |
514 | if __name__ == "__main__":
515 | import doctest
516 | doctest.testmod(extraglobs={'print_function': print_function}) # type: ignore
517 |
--------------------------------------------------------------------------------
/socks.py:
--------------------------------------------------------------------------------
1 | """SocksiPy - Python SOCKS module.
2 | Version 1.01
3 |
4 | Copyright 2006 Dan-Haim. All rights reserved.
5 | Various fixes by Christophe DUMEZ - 2010
6 |
7 | Redistribution and use in source and binary forms, with or without modification,
8 | are permitted provided that the following conditions are met:
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 | 2. Redistributions in binary form must reproduce the above copyright notice,
12 | this list of conditions and the following disclaimer in the documentation
13 | and/or other materials provided with the distribution.
14 | 3. Neither the name of Dan Haim nor the names of his contributors may be used
15 | to endorse or promote products derived from this software without specific
16 | prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
19 | WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
20 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
21 | EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
22 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
24 | OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
25 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
26 | OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
27 |
28 |
29 | This module provides a standard socket-like interface for Python
30 | for tunneling connections through SOCKS proxies.
31 |
32 | """
33 |
34 | import socket
35 | import struct
36 |
37 | PROXY_TYPE_SOCKS4 = 1
38 | PROXY_TYPE_SOCKS5 = 2
39 | PROXY_TYPE_HTTP = 3
40 |
41 | _defaultproxy = None
42 | _orgsocket = socket.socket
43 |
44 | class ProxyError(Exception):
45 | def __init__(self, value):
46 | self.value = value
47 | def __str__(self):
48 | return repr(self.value)
49 |
50 | class GeneralProxyError(ProxyError):
51 | def __init__(self, value):
52 | self.value = value
53 | def __str__(self):
54 | return repr(self.value)
55 |
56 | class Socks5AuthError(ProxyError):
57 | def __init__(self, value):
58 | self.value = value
59 | def __str__(self):
60 | return repr(self.value)
61 |
62 | class Socks5Error(ProxyError):
63 | def __init__(self, value):
64 | self.value = value
65 | def __str__(self):
66 | return repr(self.value)
67 |
68 | class Socks4Error(ProxyError):
69 | def __init__(self, value):
70 | self.value = value
71 | def __str__(self):
72 | return repr(self.value)
73 |
74 | class HTTPError(ProxyError):
75 | def __init__(self, value):
76 | self.value = value
77 | def __str__(self):
78 | return repr(self.value)
79 |
80 | _generalerrors = ("success",
81 | "invalid data",
82 | "not connected",
83 | "not available",
84 | "bad proxy type",
85 | "bad input")
86 |
87 | _socks5errors = ("succeeded",
88 | "general SOCKS server failure",
89 | "connection not allowed by ruleset",
90 | "Network unreachable",
91 | "Host unreachable",
92 | "Connection refused",
93 | "TTL expired",
94 | "Command not supported",
95 | "Address type not supported",
96 | "Unknown error")
97 |
98 | _socks5autherrors = ("succeeded",
99 | "authentication is required",
100 | "all offered authentication methods were rejected",
101 | "unknown username or invalid password",
102 | "unknown error")
103 |
104 | _socks4errors = ("request granted",
105 | "request rejected or failed",
106 | "request rejected because SOCKS server cannot connect to identd on the client",
107 | "request rejected because the client program and identd report different user-ids",
108 | "unknown error")
109 |
110 | def setdefaultproxy(proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
111 | """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
112 | Sets a default proxy which all further socksocket objects will use,
113 | unless explicitly changed.
114 | """
115 | global _defaultproxy
116 | _defaultproxy = (proxytype,addr,port,rdns,username,password)
117 |
118 | class socksocket(socket.socket):
119 | """socksocket([family[, type[, proto]]]) -> socket object
120 |
121 | Open a SOCKS enabled socket. The parameters are the same as
122 | those of the standard socket init. In order for SOCKS to work,
123 | you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
124 | """
125 |
126 | def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
127 | _orgsocket.__init__(self,family,type,proto,_sock)
128 | if _defaultproxy != None:
129 | self.__proxy = _defaultproxy
130 | else:
131 | self.__proxy = (None, None, None, None, None, None)
132 | self.__proxysockname = None
133 | self.__proxypeername = None
134 |
135 | def __recvall(self, bytes):
136 | """__recvall(bytes) -> data
137 | Receive EXACTLY the number of bytes requested from the socket.
138 | Blocks until the required number of bytes have been received.
139 | """
140 | data = ""
141 | while len(data) < bytes:
142 | d = self.recv(bytes-len(data))
143 | if not d:
144 | raise GeneralProxyError("connection closed unexpectedly")
145 | data = data + d
146 | return data
147 |
148 | def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
149 | """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
150 | Sets the proxy to be used.
151 | proxytype - The type of the proxy to be used. Three types
152 | are supported: PROXY_TYPE_SOCKS4 (including socks4a),
153 | PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
154 | addr - The address of the server (IP or DNS).
155 | port - The port of the server. Defaults to 1080 for SOCKS
156 | servers and 8080 for HTTP proxy servers.
157 | rdns - Should DNS queries be preformed on the remote side
158 | (rather than the local side). The default is True.
159 | Note: This has no effect with SOCKS4 servers.
160 | username - Username to authenticate with to the server.
161 | The default is no authentication.
162 | password - Password to authenticate with to the server.
163 | Only relevant when username is also provided.
164 | """
165 | self.__proxy = (proxytype,addr,port,rdns,username,password)
166 |
167 | def __negotiatesocks5(self,destaddr,destport):
168 | """__negotiatesocks5(self,destaddr,destport)
169 | Negotiates a connection through a SOCKS5 server.
170 | """
171 | # First we'll send the authentication packages we support.
172 | if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
173 | # The username/password details were supplied to the
174 | # setproxy method so we support the USERNAME/PASSWORD
175 | # authentication (in addition to the standard none).
176 | self.sendall("\x05\x02\x00\x02")
177 | else:
178 | # No username/password were entered, therefore we
179 | # only support connections with no authentication.
180 | self.sendall("\x05\x01\x00")
181 | # We'll receive the server's response to determine which
182 | # method was selected
183 | chosenauth = self.__recvall(2)
184 | if chosenauth[0] != "\x05":
185 | self.close()
186 | raise GeneralProxyError((1,_generalerrors[1]))
187 | # Check the chosen authentication method
188 | if chosenauth[1] == "\x00":
189 | # No authentication is required
190 | pass
191 | elif chosenauth[1] == "\x02":
192 | # Okay, we need to perform a basic username/password
193 | # authentication.
194 | self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
195 | authstat = self.__recvall(2)
196 | if authstat[0] != "\x01":
197 | # Bad response
198 | self.close()
199 | raise GeneralProxyError((1,_generalerrors[1]))
200 | if authstat[1] != "\x00":
201 | # Authentication failed
202 | self.close()
203 | raise Socks5AuthError,((3,_socks5autherrors[3]))
204 | # Authentication succeeded
205 | else:
206 | # Reaching here is always bad
207 | self.close()
208 | if chosenauth[1] == "\xFF":
209 | raise Socks5AuthError((2,_socks5autherrors[2]))
210 | else:
211 | raise GeneralProxyError((1,_generalerrors[1]))
212 | # Now we can request the actual connection
213 | req = "\x05\x01\x00"
214 | # If the given destination address is an IP address, we'll
215 | # use the IPv4 address request even if remote resolving was specified.
216 | try:
217 | ipaddr = socket.inet_aton(destaddr)
218 | req = req + "\x01" + ipaddr
219 | except socket.error:
220 | # Well it's not an IP number, so it's probably a DNS name.
221 | if self.__proxy[3]==True:
222 | # Resolve remotely
223 | ipaddr = None
224 | req = req + "\x03" + chr(len(destaddr)) + destaddr
225 | else:
226 | # Resolve locally
227 | ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
228 | req = req + "\x01" + ipaddr
229 | req = req + struct.pack(">H",destport)
230 | self.sendall(req)
231 | # Get the response
232 | resp = self.__recvall(4)
233 | if resp[0] != "\x05":
234 | self.close()
235 | raise GeneralProxyError((1,_generalerrors[1]))
236 | elif resp[1] != "\x00":
237 | # Connection failed
238 | self.close()
239 | if ord(resp[1])<=8:
240 | raise Socks5Error((ord(resp[1]),_generalerrors[ord(resp[1])]))
241 | else:
242 | raise Socks5Error((9,_generalerrors[9]))
243 | # Get the bound address/port
244 | elif resp[3] == "\x01":
245 | boundaddr = self.__recvall(4)
246 | elif resp[3] == "\x03":
247 | resp = resp + self.recv(1)
248 | boundaddr = self.__recvall(ord(resp[4]))
249 | else:
250 | self.close()
251 | raise GeneralProxyError((1,_generalerrors[1]))
252 | boundport = struct.unpack(">H",self.__recvall(2))[0]
253 | self.__proxysockname = (boundaddr,boundport)
254 | if ipaddr != None:
255 | self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
256 | else:
257 | self.__proxypeername = (destaddr,destport)
258 |
259 | def getproxysockname(self):
260 | """getsockname() -> address info
261 | Returns the bound IP address and port number at the proxy.
262 | """
263 | return self.__proxysockname
264 |
265 | def getproxypeername(self):
266 | """getproxypeername() -> address info
267 | Returns the IP and port number of the proxy.
268 | """
269 | return _orgsocket.getpeername(self)
270 |
271 | def getpeername(self):
272 | """getpeername() -> address info
273 | Returns the IP address and port number of the destination
274 | machine (note: getproxypeername returns the proxy)
275 | """
276 | return self.__proxypeername
277 |
278 | def __negotiatesocks4(self,destaddr,destport):
279 | """__negotiatesocks4(self,destaddr,destport)
280 | Negotiates a connection through a SOCKS4 server.
281 | """
282 | # Check if the destination address provided is an IP address
283 | rmtrslv = False
284 | try:
285 | ipaddr = socket.inet_aton(destaddr)
286 | except socket.error:
287 | # It's a DNS name. Check where it should be resolved.
288 | if self.__proxy[3]==True:
289 | ipaddr = "\x00\x00\x00\x01"
290 | rmtrslv = True
291 | else:
292 | ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
293 | # Construct the request packet
294 | req = "\x04\x01" + struct.pack(">H",destport) + ipaddr
295 | # The username parameter is considered userid for SOCKS4
296 | if self.__proxy[4] != None:
297 | req = req + self.__proxy[4]
298 | req = req + "\x00"
299 | # DNS name if remote resolving is required
300 | # NOTE: This is actually an extension to the SOCKS4 protocol
301 | # called SOCKS4A and may not be supported in all cases.
302 | if rmtrslv==True:
303 | req = req + destaddr + "\x00"
304 | self.sendall(req)
305 | # Get the response from the server
306 | resp = self.__recvall(8)
307 | if resp[0] != "\x00":
308 | # Bad data
309 | self.close()
310 | raise GeneralProxyError((1,_generalerrors[1]))
311 | if resp[1] != "\x5A":
312 | # Server returned an error
313 | self.close()
314 | if ord(resp[1]) in (91,92,93):
315 | self.close()
316 | raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90]))
317 | else:
318 | raise Socks4Error((94,_socks4errors[4]))
319 | # Get the bound address/port
320 | self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(">H",resp[2:4])[0])
321 | if rmtrslv != None:
322 | self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
323 | else:
324 | self.__proxypeername = (destaddr,destport)
325 |
326 | def __negotiatehttp(self,destaddr,destport):
327 | """__negotiatehttp(self,destaddr,destport)
328 | Negotiates a connection through an HTTP server.
329 | """
330 | # If we need to resolve locally, we do this now
331 | if self.__proxy[3] == False:
332 | addr = socket.gethostbyname(destaddr)
333 | else:
334 | addr = destaddr
335 | self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n")
336 | # We read the response until we get the string "\r\n\r\n"
337 | resp = self.recv(1)
338 | while resp.find("\r\n\r\n")==-1:
339 | resp = resp + self.recv(1)
340 | # We just need the first line to check if the connection
341 | # was successful
342 | statusline = resp.splitlines()[0].split(" ",2)
343 | if statusline[0] not in ("HTTP/1.0","HTTP/1.1"):
344 | self.close()
345 | raise GeneralProxyError((1,_generalerrors[1]))
346 | try:
347 | statuscode = int(statusline[1])
348 | except ValueError:
349 | self.close()
350 | raise GeneralProxyError((1,_generalerrors[1]))
351 | if statuscode != 200:
352 | self.close()
353 | raise HTTPError((statuscode,statusline[2]))
354 | self.__proxysockname = ("0.0.0.0",0)
355 | self.__proxypeername = (addr,destport)
356 |
357 | def connect(self,destpair):
358 | """connect(self,despair)
359 | Connects to the specified destination through a proxy.
360 | destpar - A tuple of the IP/DNS address and the port number.
361 | (identical to socket's connect).
362 | To select the proxy server use setproxy().
363 | """
364 | # Do a minimal input check first
365 | if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int):
366 | raise GeneralProxyError((5,_generalerrors[5]))
367 | if self.__proxy[0] == PROXY_TYPE_SOCKS5:
368 | if self.__proxy[2] != None:
369 | portnum = self.__proxy[2]
370 | else:
371 | portnum = 1080
372 | _orgsocket.connect(self,(self.__proxy[1],portnum))
373 | self.__negotiatesocks5(destpair[0],destpair[1])
374 | elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
375 | if self.__proxy[2] != None:
376 | portnum = self.__proxy[2]
377 | else:
378 | portnum = 1080
379 | _orgsocket.connect(self,(self.__proxy[1],portnum))
380 | self.__negotiatesocks4(destpair[0],destpair[1])
381 | elif self.__proxy[0] == PROXY_TYPE_HTTP:
382 | if self.__proxy[2] != None:
383 | portnum = self.__proxy[2]
384 | else:
385 | portnum = 8080
386 | _orgsocket.connect(self,(self.__proxy[1],portnum))
387 | self.__negotiatehttp(destpair[0],destpair[1])
388 | elif self.__proxy[0] == None:
389 | _orgsocket.connect(self,(destpair[0],destpair[1]))
390 | else:
391 | raise GeneralProxyError((4,_generalerrors[4]))
392 |
--------------------------------------------------------------------------------
/service.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | '''
3 | XBMCLocalProxy 0.1
4 | Copyright 2011 Torben Gerkensmeyer
5 |
6 | This program is free software; you can redistribute it and/or modify
7 | it under the terms of the GNU General Public License as published by
8 | the Free Software Foundation; either version 2 of the License, or
9 | (at your option) any later version.
10 |
11 | This program is distributed in the hope that it will be useful,
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | GNU General Public License for more details.
15 |
16 | You should have received a copy of the GNU General Public License
17 | along with this program; if not, write to the Free Software
18 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
19 | MA 02110-1301, USA.
20 | '''
21 | from __future__ import unicode_literals
22 | import io
23 | import sys
24 | from urllib import parse
25 | from urllib import request
26 | from urllib import response
27 | import http.cookiejar as cookielib
28 | import html.entities as htmlentitydefs
29 | from datetime import datetime
30 | import base64
31 | import uuid
32 | import re
33 | import time
34 | import socket
35 | import gzip
36 | import json
37 | import xbmc,xbmcaddon,xbmcvfs
38 | try:
39 | xbmc.translatePath = xbmcvfs.translatePath
40 | except AttributeError:
41 | pass
42 | from threading import Semaphore
43 | import os
44 | import mimetypes
45 | import shutil
46 | import lib.comm as comm
47 | from traceback import format_exc
48 | from socketserver import ThreadingMixIn
49 | from http.server import HTTPServer, BaseHTTPRequestHandler
50 | from cgi import parse_header, parse_multipart
51 | from Cryptodome import Random
52 | from Cryptodome.Hash import MD5
53 | from Cryptodome.Hash import SHA1
54 | from Cryptodome.Cipher import PKCS1_OAEP, PKCS1_v1_5
55 | from Cryptodome.PublicKey import RSA
56 |
57 | __cwd__=os.path.dirname(__file__)
58 | __lib__ = xbmc.translatePath( os.path.join( __cwd__, 'lib' ) )
59 | sys.path.append (__lib__)
60 |
61 | from pyhtml import *
62 | defaultUserAgent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 115Browser/27.0.3.7'
63 |
64 | _cookiestr=''
65 |
66 | def encode_obj(in_obj):
67 | def encode_list(in_list):
68 | out_list = []
69 | for el in in_list:
70 | out_list.append(encode_obj(el))
71 | return out_list
72 |
73 | def encode_dict(in_dict):
74 | out_dict = {}
75 | for k, v in in_dict.items():
76 | out_dict[k] = encode_obj(v)
77 | return out_dict
78 |
79 | if isinstance(in_obj, str):
80 | return comm.ensure_binary(in_obj)
81 | elif isinstance(in_obj, list):
82 | return encode_list(in_obj)
83 | elif isinstance(in_obj, tuple):
84 | return tuple(encode_list(in_obj))
85 | elif isinstance(in_obj, dict):
86 | return encode_dict(in_obj)
87 | return in_obj
88 |
89 | class SmartRedirectHandler(request.HTTPRedirectHandler):
90 | def http_error_301(self, req, fp, code, msg, headers):
91 | result = request.HTTPRedirectHandler.http_error_301(
92 | self, req, fp, code, msg, headers)
93 | #result.status = code
94 | return result
95 |
96 | def http_error_302(self, req, fp, code, msg, headers):
97 | result = request.HTTPRedirectHandler.http_error_302(
98 | self, req, fp, code, msg, headers)
99 | #result.status = code
100 | return result
101 |
102 | class PassRedirectHandler(request.HTTPRedirectHandler):
103 | def http_error_301(self, req, fp, code, msg, headers):
104 | infourl = response.addinfourl(fp, headers, req.get_full_url())
105 | infourl.status = code
106 | infourl.code = code
107 | return infourl
108 |
109 | def http_error_302(self, req, fp, code, msg, headers):
110 | infourl = response.addinfourl(fp, headers, req.get_full_url())
111 | infourl.status = code
112 | infourl.code = code
113 | return infourl
114 |
115 | class api_115(object):
116 | downcookie=''
117 | def __init__(self, cookstr):
118 | if cookstr=='0':
119 | cookstr=_cookiestr
120 | #xbmc.log(cookstr,level=xbmc.LOGERROR)
121 | self.headers = {
122 |
123 | 'User-Agent': defaultUserAgent,
124 | 'Accept-encoding': 'gzip,deflate',
125 | 'Cookie': cookstr,
126 | }
127 | self.user_id=None
128 | self.user_key=None
129 | if self.get_userkey() is False:
130 | xbmc.log(msg='Get userkey info failed!',level=xbmc.LOGERROR)
131 |
132 | def urlopen(self,url, data=None,referer=None,binary=False, **args):
133 | #url=url
134 | reponse=''
135 | for i in range(1,5):
136 | try:
137 | headers=self.headers.copy()
138 | if 'userAgent' in args:
139 | headers['User-Agent']=args['userAgent']
140 | del args['userAgent']
141 | req = request.Request(url,headers = headers)
142 | if referer:
143 | req.add_header('Referer', referer)
144 | opener = request.build_opener(SmartRedirectHandler)
145 | if data:
146 | if type(data) == str:
147 | data=data.encode()
148 | rsp = opener.open(req, data=data, timeout=15)
149 | else:
150 | rsp = opener.open(req, timeout=15)
151 |
152 | if rsp.info().get('Content-Encoding') == 'gzip':
153 | reponse = gzip.GzipFile(fileobj=io.BytesIO(rsp.read())).read()
154 | else:
155 | reponse = rsp.read()
156 | if not binary:
157 | reponse=comm.ensure_text(reponse)
158 | #xbmc.log(msg=str(rsp.headers),level=xbmc.LOGERROR)
159 | self.downcookie=''
160 | for key,value in rsp.headers.items():
161 | if key.lower()=='set-cookie':
162 | downcookies = re.findall(r'(?:[0-9abcdef]{20,}|acw_tc)\s*\x3D\s*[0-9abcdef]{20,}', value, re.DOTALL | re.MULTILINE)
163 | for downcook in downcookies:
164 | self.downcookie+=downcook+';'
165 | rsp.close()
166 | break
167 | except Exception as e:
168 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
169 |
170 | return reponse
171 |
172 | def jsonload(self,data):
173 | try:
174 | data= data.replace('\n','').replace('\r','')
175 | data=json.loads(data[data.index('{'):])
176 | return data
177 | except:
178 | return {'state':False,'error':'jsonload error'}
179 |
180 | def gettaglist(self):
181 | data=self.urlopen('https://webapi.115.com/label/list?user_id=&offset=0&limit=11500&sort=create_time&order=desc')
182 | return json.loads(data[data.index('{'):])
183 |
184 | def settag(self,fid,tag):
185 | data = parse.urlencode({'fid': fid,'file_label':tag})
186 | try:
187 | data=self.urlopen('http://web.api.115.com/files/edit',data=data)
188 | data= self.fetch(data).replace('\n','').replace('\r','')
189 | data=json.loads(data[data.index('{'):])
190 | return data['state']
191 | except:
192 | return False
193 |
194 | def getfilelist(self,cid,offset,pageitem,star,sorttype,sortasc,typefilter='0',nf='0',search_value=''):
195 | try:
196 | if search_value!='' and search_value!='0':
197 | file_label=''
198 | match=re.search(r'^tag\s*(?P[0-9]{10,})$',search_value)
199 | if match:
200 | file_label=match.group('tag')
201 | if file_label:
202 | data=parse.urlencode({'file_label': file_label,'cid':cid,'aid':'1','limit':str(pageitem),
203 | 'o':sorttype,'asc':sortasc,'offset':str(offset),'format':'json','date':'','pick_code':'','type':typefilter,'source':''})
204 | else:
205 | data=parse.urlencode({'search_value': search_value,'cid':cid,'aid':'1','limit':str(pageitem),
206 | 'o':sorttype,'asc':sortasc,'offset':str(offset),'format':'json','date':'','pick_code':'','type':typefilter,'source':''})
207 | data=self.urlopen('http://web.api.115.com/files/search?'+data)
208 | else:
209 | self.urlopen('https://webapi.115.com/files/order',data=parse.urlencode(encode_obj({'file_id':cid,'user_order':sorttype,'user_asc':sortasc,'fc_mix':'1'})))
210 | data = parse.urlencode(encode_obj({'aid': '1','cid':cid,'limit':pageitem,'offset':offset,'type':typefilter,'star':star,'natsort':'1','fc_mix':'1',
211 | 'o':sorttype,'asc':sortasc,'nf':nf,'show_dir':'1','format':'json','_':str(int(time.time()))}))
212 | if sorttype=='file_name':
213 | data=self.urlopen('http://aps.115.com/natsort/files.php?'+data)
214 | else:
215 | data=self.urlopen('http://webapi.115.com/files?'+data)
216 | return json.loads(data[data.index('{'):])
217 | except:
218 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
219 |
220 |
221 | def getpc(self,fid):
222 | try:
223 | data=self.urlopen('http://web.api.115.com/category/get?aid=1&cid='+fid)
224 | data= json.loads(data[data.index('{'):])
225 | return data['pick_code']
226 | except Exception as errno:
227 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
228 | return ''
229 |
230 | def getsubtitle(self,pc):
231 | try:
232 | data=self.urlopen('http://webapi.115.com/movies/subtitle?pickcode=%s'%(pc))
233 | data=json.loads(data[data.index('{'):])
234 | if data['state']:
235 | return data['data']['list']
236 | except Exception as errno:
237 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
238 |
239 | def createdir(self,pid,cname):
240 | data = parse.urlencode({'pid': pid,'cname':cname})
241 | try:
242 | data=self.urlopen('http://web.api.115.com/files/add',data=data)
243 | data= json.loads(data[data.index('{'):])
244 | return data['cid']
245 | except Exception as errno:
246 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
247 | return ''
248 |
249 | def copy(self,fid,cid):
250 | data = parse.urlencode({'fid': fid,'pid':cid})
251 | try:
252 | data=self.urlopen('http://web.api.115.com/files/copy',data=data)
253 | data= json.loads(data[data.index('{'):])
254 | return data['state']
255 | except Exception as errno:
256 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
257 | return False
258 |
259 | def rename(self,fid,newname):
260 | data = parse.urlencode({'fid': fid,'file_name':newname})
261 | try:
262 | data=self.urlopen('http://web.api.115.com/files/edit',data=data)
263 | data= json.loads(data[data.index('{'):])
264 | return data['state']
265 | except Exception as errno:
266 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
267 | return False
268 |
269 | def batchrename(self,namedict):
270 | newnames={}
271 | for key,value in namedict.items():
272 | newnames['files_new_name[%s]'%key]=value
273 | data = parse.urlencode(newnames)
274 | try:
275 | data=self.urlopen('https://webapi.115.com/files/batch_rename',data=data)
276 | data= json.loads(data[data.index('{'):])
277 | return data['state']
278 | except Exception as errno:
279 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
280 | return False
281 |
282 | def batchcidrename(self,cid,oldnewnames):
283 | namedict={}
284 | files=[]
285 | self.getallfiles(cid,files)
286 | for f in files:
287 | if f['n'] in oldnewnames:
288 | namedict[f['fid']]=oldnewnames[f['n']]
289 | self.batchrename(namedict)
290 |
291 | def getallfiles(self,cid,files):
292 | data=self.getfilelist(cid=cid,offset=0,pageitem=1150,star='0',sorttype='user_utime',sortasc='0')
293 | if 'data' in data:
294 | resp = data['data']
295 | for d in resp:
296 | if 'fid' in d:
297 | files.append(d)
298 | elif 'pid' in d:
299 | cid=d['cid']
300 | self.getallfiles(cid,files)
301 |
302 | def countfiles(self,cid):
303 | try:
304 | data=self.urlopen('https://webapi.115.com/category/get?cid=%s'%(cid))
305 | data= json.loads(data[data.index('{'):])
306 | return int(data['count'])
307 | except Exception as errno:
308 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
309 |
310 | def pathdeep(self,cid):
311 | try:
312 | data=self.urlopen('https://webapi.115.com/category/get?cid=%s'%(cid))
313 | data= json.loads(data[data.index('{'):])
314 | return int(len(data['paths']))
315 | except Exception as errno:
316 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
317 |
318 | def delete(self,fids):
319 | data={'pid':0}
320 | i=0
321 | for fid in fids:
322 | data['fid['+str(i)+']']=fid
323 | i+=1
324 | data = parse.urlencode(data)
325 | try:
326 | data=self.urlopen('http://web.api.115.com/rb/delete',data=data)
327 | data= json.loads(data[data.index('{'):])
328 | return data['state']
329 | except Exception as errno:
330 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
331 | return ''
332 |
333 | def get_userkey(self):
334 | try:
335 | data=self.urlopen('http://proapi.115.com/app/uploadinfo')
336 | data= json.loads(data[data.index('{'):])
337 | self.user_id=str(data['user_id'])
338 | self.user_key=str(data['userkey']).upper()
339 | except Exception as errno:
340 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
341 | return False
342 |
343 | def get_preid(self,pc):
344 | try:
345 | file_url=self.getfiledownloadurl(pc)
346 | xbmc.log(msg='file_url:'+file_url,level=xbmc.LOGERROR)
347 | filedownloadurl=downcookie=''
348 | if file_url.find('|')>0:
349 | filedownloadurl,downcookie=file_url.split('|')
350 | else:
351 | filedownloadurle=file_url
352 |
353 | reqheaders={}
354 | reqheaders['User-Agent']=defaultUserAgent
355 | reqheaders['Referer']='https://115.com/?cid=0&offset=0&mode=wangpan'
356 | reqheaders['Cookie']=self.headers['Cookie']+';'+downcookie+';'
357 | reqheaders['Range']='bytes=0-131071';
358 | req = request.Request(filedownloadurl, headers=reqheaders)
359 | response=None
360 | preid=''
361 | opener = request.build_opener(SmartRedirectHandler)
362 | rsp = opener.open(req, timeout=15)
363 | if rsp.info().get('Content-Encoding') == 'gzip':
364 | reponse = gzip.GzipFile(fileobj=io.BytesIO(rsp.read())).read()
365 | else:
366 | reponse = rsp.read()
367 | sha = SHA1.new()
368 | sha.update(bytes(reponse))
369 | preid = sha.hexdigest()
370 | return preid.upper()
371 | except Exception as errno:
372 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
373 | return ''
374 |
375 | def import_file_with_sha1(self,preid,fileid,filesize,filename,cid,tm):
376 | target='U_1_'+str(cid)
377 | #tm=str(int(time.time()))+'000'
378 | #tm='16649927390000'
379 | xbmc.log(msg="tm:"+tm,level=xbmc.LOGERROR)
380 |
381 | sha=SHA1.new()
382 | sha.update( str.encode(self.user_id+fileid+ fileid +target+'0'))
383 | s1 = sha.hexdigest()
384 | xbmc.log(msg="s1:"+s1,level=xbmc.LOGERROR)
385 | s2=self.user_key+s1+"000000"
386 |
387 | sha=SHA1.new()
388 | sha.update(str.encode(s2))
389 | sig=sha.hexdigest().upper()
390 | xbmc.log(msg="sig:"+sig,level=xbmc.LOGERROR)
391 |
392 | useridmd5 = MD5.new()
393 | useridmd5.update(self.user_id.encode())
394 | struidmd5=useridmd5.hexdigest()
395 | xbmc.log(msg="useridmd5:"+struidmd5,level=xbmc.LOGERROR)
396 |
397 | tokenmd5 = MD5.new()
398 | appVersion = "25.2.0"
399 | tokenstr = "Qclm8MGWUv59TnrR0XPg" +fileid +str(filesize)+ preid + self.user_id + tm + struidmd5 +appVersion;
400 | tokenmd5.update(tokenstr.encode())
401 | token=tokenmd5.hexdigest()
402 | xbmc.log(msg="token:"+token,level=xbmc.LOGERROR)
403 |
404 | url=("http://uplb.115.com/3.0/initupload.php?rt=0&topupload=0&isp=0&appid=0&appversion=%s&format=json&sig=%s&token=%s&t=%s")%(appVersion,sig,token,tm)
405 | postData=('preid=%s&fileid=%s&quickid=%s&app_ver=%s&filename=%s&filesize=%s&exif=&target=%s&userid=%s')%(preid,fileid,fileid,appVersion,filename,filesize,target,self.user_id)
406 | try:
407 | data=self.urlopen(url,data=postData)
408 | data= json.loads(data[data.index('{'):])
409 |
410 | if data['status']==2 and data['statuscode']==0:
411 | return True
412 | else:
413 | xbmc.log(msg=filename+' upload failed.',level=xbmc.LOGERROR)
414 | return False
415 | except Exception as errno:
416 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
417 | return False
418 |
419 | def exportfid(self,name,length,sha1,pc):
420 | fidsha1=''
421 | try:
422 | preid=self.get_preid(pc)
423 | fidsha1='115://'+name+'|'+length+'|'+sha1+'|'+preid;
424 | except Exception as e:
425 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
426 | return fidsha1
427 |
428 | def exportcid(self,outlist,cid,pathdeep=0):
429 | data=self.getfilelist(cid=cid,offset=0,pageitem=1150,star='0',sorttype='user_utime',sortasc='0')
430 | paths=''
431 | pathlist=[]
432 | if 'path' in data:
433 | for item in data['path']:
434 | pathlist.append(item['name'])
435 | if len(pathlist)>=pathdeep:
436 | pathlist=pathlist[pathdeep:]
437 | paths='|'.join(pathlist)
438 | if 'data' in data:
439 | resp = data['data']
440 | for d in resp:
441 | if 'fid' in d:
442 | fidsha1=self.exportfid(d['n'],str(d['s']),d['sha'],d['pc'])
443 | if paths!='':
444 | fidsha1=fidsha1+'|'+paths
445 | outlist.append(fidsha1)
446 | elif 'pid' in d:
447 | cid=d['cid']
448 | self.exportcid(outlist,cid,pathdeep)
449 |
450 | def url_is_alive(self,url):
451 | try:
452 | rspcode=0
453 | req = request.Request(url,headers = self.headers)
454 | req.get_method = lambda : 'HEAD'
455 | req.add_header('keep_alive','false')
456 | opener = request.build_opener(SmartRedirectHandler)
457 | rsp = opener.open(req, timeout=15)
458 | rspcode=str(rsp.code)
459 | rsp.close()
460 | return rspcode=='200'
461 | except:
462 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
463 | return False
464 | def notecatelist(self):
465 | data=self.urlopen('https://note.115.com/?ct=note&ac=cate&has_picknews=1')
466 | return self.jsonload(data)
467 |
468 | def noteaddcate(self,cname):
469 | data = parse.urlencode(encode_obj({'cname': cname,'up_key':'tn__%d_0'%(int(time.time()))}))
470 | data=self.urlopen('https://note.115.com/?ct=note&ac=addcate',data=data)
471 | return self.jsonload(data)
472 |
473 | def notegetcateid(self,cname):
474 | cid=0
475 | data=self.notecatelist()
476 | if data['state'] and data['data']:
477 | for cate in data['data']:
478 | if cate['cname']==cname:
479 | cid=int(cate['cid'])
480 | break
481 | if cid==0:
482 | data = self.noteaddcate(cname)
483 | if data['state']:
484 | cid=int(data['data']['cid'])
485 | return cid
486 |
487 | def notesave(self,cid,nid,title,content):
488 | data = parse.urlencode(encode_obj({'cid': cid,'nid':nid,'subject':title,'content':content,'is_html':0,'toc_ids':''}))
489 | data = self.urlopen('https://note.115.com/?ct=note&ac=save',data=data)
490 | return self.jsonload(data)
491 |
492 | def notelist(self,cid,start):
493 | data = parse.urlencode(encode_obj({'ct':'note','page_size':90,'has_picknews':1,'cid': cid,'keyword':'','start':start,'_':int(time.time())}))
494 | data = self.urlopen('https://note.115.com/?'+data)
495 | return self.jsonload(data)
496 |
497 | def notedelete(self,nid):
498 | data = parse.urlencode(encode_obj({'nid': nid}))
499 | data = self.urlopen('https://note.115.com/?ct=note&ac=delete',data=data)
500 | return self.jsonload(data)
501 |
502 | def notedetail(self,nid):
503 | data = parse.urlencode(encode_obj({'ct': 'note','nid':nid,'ac':'detail'}))
504 | data = self.urlopen('https://note.115.com/?'+data)
505 | return self.jsonload(data)
506 |
507 | def notegetcontent(self,cname,notetitle):
508 | content=''
509 | cid=self.notegetcateid(cname)
510 | data=self.notelist(cid=cid,start=0)
511 | nid=0
512 | if data['state'] and data['data']:
513 | for note in data['data']:
514 | if note['title']==notetitle:
515 | nid=int(note['nid'])
516 | break
517 | if nid:
518 | data = self.notedetail(nid)
519 | if data['state']:
520 | content=data['data']['content']
521 | return content
522 |
523 | def notegetpcurl(self,pc):
524 | content=''
525 | cid=self.notegetcateid('pickcodeurl')
526 | data=self.notelist(cid=cid,start=0)
527 | nid=0
528 | nidolds=''
529 | if data['state'] and data['data']:
530 | for note in data['data']:
531 | #xbmc.log(msg='zzzzz: %s %s %d %d'%(pc,note['title'],int(time.time()) , int(note['update_time'])),level=xbmc.LOGERROR)
532 | if (int(time.time()) - int(note['update_time']))>6*3600:
533 | nidolds+=note['nid']+','
534 | else:
535 | if note['title']==pc:
536 | nid=int(note['nid'])
537 | break
538 | if nidolds:
539 | self.notedelete(nidolds)
540 | if nid:
541 | data = self.notedetail(nid)
542 | if data['state']:
543 | content=data['data']['content']
544 | return content
545 |
546 |
547 | def notesavecontent(self,cname,notetitle,content):
548 | state=False
549 | cid=self.notegetcateid(cname)
550 | data=self.notelist(cid=cid,start=0)
551 | nid=0
552 | if data['state'] and data['data']:
553 | for note in data['data']:
554 | if note['title']==notetitle:
555 | nid=int(note['nid'])
556 | break
557 | data = self.notesave(cid=cid,nid=nid,title=notetitle,content=content)
558 | state = data['state']
559 | return state
560 |
561 | def notedeleteolds(self,cname):
562 | state=False
563 | cid=self.notegetcateid(cname)
564 | data=self.notelist(cid=cid,start=90)
565 | nids=''
566 | if data['state'] and data['data']:
567 | for note in data['data']:
568 | nids=nids+note['nid']+','
569 | if nids:
570 | data = self.notedelete(nid=nids)
571 | state = data['state']
572 | return state
573 |
574 | g_kts = [240, 229, 105, 174, 191, 220, 191, 138, 26, 69, 232, 190, 125, 166, 115, 184, 222, 143, 231, 196, 69, 218, 134, 196, 155, 100, 139, 20, 106, 180, 241, 170, 56, 1, 53, 158, 38, 105, 44, 134, 0, 107, 79, 165, 54, 52, 98, 166, 42, 150, 104, 24, 242, 74, 253, 189, 107, 151, 143, 77, 143, 137, 19, 183, 108, 142, 147, 237, 14, 13, 72, 62, 215, 47, 136, 216, 254, 254, 126, 134, 80, 149, 79, 209, 235, 131, 38, 52, 219, 102, 123, 156, 126, 157, 122, 129, 50, 234, 182, 51, 222, 58, 169, 89, 52, 102, 59, 170, 186, 129, 96, 72, 185, 213, 129, 156, 248, 108, 132, 119, 255, 84, 120, 38, 95, 190, 232, 30, 54, 159, 52, 128, 92, 69, 44, 155, 118, 213, 27, 143, 204, 195, 184, 245]
575 |
576 | g_key_s = [0x29, 0x23, 0x21, 0x5E]
577 |
578 | g_key_l = [120, 6, 173, 76, 51, 134, 93, 24, 76, 1, 63, 70]
579 |
580 | def m115_getkey(self,length,key):
581 | if key != '':
582 | results = []
583 | for i in range(length):
584 | v1=(key[i] + self.g_kts[length * i])&(0xff)
585 | v2=self.g_kts[length * (length - 1 - i)]
586 | results.append(v1^v2)
587 | return results
588 | if length == 12:
589 | return self.g_key_l
590 | else:
591 | return self.g_key_s
592 |
593 | def xor115_enc(self, src, srclen, key, keylen):
594 | ret = []
595 | mod4 = srclen % 4
596 | for i in range(mod4):
597 | ret.append(src[i] ^ key[i % keylen])
598 | for i in range(srclen-mod4):
599 | ret.append(src[i+mod4] ^ key[i % keylen])
600 | return ret
601 |
602 | def m115_sym_encode(self,src, srclen, key1, key2):
603 | #plugin.log.error('%d %d %d %d %d %d...%d %d'%(src[0],src[1],src[2],src[3],src[4],src[5],src[30],src[31]))
604 | k1 = self.m115_getkey(4, key1)
605 | #plugin.log.error(len(k1))
606 | #plugin.log.error('%d %d ...%d %d'%(k1[0],k1[1],k1[2],k1[3]))
607 |
608 | k2 = self.m115_getkey(12, key2)
609 | #plugin.log.error(len(k2))
610 | #plugin.log.error('%d %d ...%d %d'%(k2[0],k2[1],k2[10],k2[11]))
611 | ret = self.xor115_enc(src, srclen, k1, 4)
612 |
613 |
614 | ret.reverse();
615 | ret = self.xor115_enc(ret, srclen, k2, 12)
616 | #plugin.log.error(len(ret))
617 | #plugin.log.error('%d %d %d %d %d %d...%d %d'%(ret[0],ret[1],ret[2],ret[3],ret[4],ret[5],ret[30],ret[31]))
618 | return ret;
619 |
620 | def m115_sym_decode(self,src, srclen, key1, key2):
621 | k1 = self.m115_getkey(4, key1)
622 | #plugin.log.error('k1:%d %d %d %d'%(k1[0],k1[1],k1[2],k1[3]))
623 |
624 | k2 = self.m115_getkey(12, key2)
625 | ssss=0
626 | # for ss in k2:
627 | # plugin.log.error('k2:%d:%d'%(ssss,ss))
628 | # ssss+=1
629 | ret = self.xor115_enc(src, srclen, k2, 12)
630 | ssss=0
631 | # for ss in ret:
632 | # plugin.log.error('ret1:%d:%d'%(ssss,ss))
633 | # ssss+=1
634 | ret.reverse()
635 | ret = self.xor115_enc(ret, srclen, k1, 4)
636 | return ret
637 |
638 | n=0x8686980c0f5a24c4b9d43020cd2c22703ff3f450756529058b1cf88f09b8602136477198a6e2683149659bd122c33592fdb5ad47944ad1ea4d36c6b172aad6338c3bb6ac6227502d010993ac967d1aef00f0c8e038de2e4d3bc2ec368af2e9f10a6f1eda4f7262f136420c07c331b871bf139f74f3010e3c4fe57df3afb71683
639 | e=0x10001
640 | def rsa_encrypt(self, bsrc):
641 | pretemp=bytearray(128-len(bsrc))
642 | for i in range(len(pretemp)):
643 | pretemp[i]=0xff
644 | pretemp[0]=0x00
645 | pretemp[1]=0x02
646 | pretemp[len(pretemp)-1]=0x00
647 | pretemp.extend(bsrc)
648 | isrc=int.from_bytes(pretemp,'big')
649 | ienc=pow(isrc,self.e,self.n)
650 | return ienc.to_bytes(128, 'big')
651 |
652 | def rsa_decrypt(self,bsrc):
653 | isrc=int.from_bytes(bsrc,'big')
654 | idec=pow(isrc,self.e,self.n)
655 | bdec=idec.to_bytes(128, 'big')
656 | index00=bdec[1:].index(0x00)
657 | return bdec[index00+2:]
658 |
659 | def m115_asym_encode(self,src, srclen):
660 | m = 128 - 11
661 | ret = bytearray()
662 | for i in range(int((srclen + m - 1) / m)):
663 | bsrc=bytes(src[i*m:i*m+m])
664 | #plugin.log.error(len(bsrc))
665 | #plugin.log.error('%s %s ...%s %s'%(bsrc[0],bsrc[1],bsrc[30],bsrc[31]))
666 | #rettemp=self.pcipher.encrypt(bsrc)
667 | #rettemp=self.cipher.encrypt(bsrc)
668 | rettemp=self.rsa_encrypt(bsrc)
669 | #plugin.log.error(len(rettemp))
670 | ret.extend(rettemp);
671 | #ret += base64.b64decode(rettemp);
672 | ret = base64.b64encode(ret)
673 | return ret
674 |
675 | def m115_asym_decode(self,src, srclen):
676 | m = 128
677 | #plugin.log.error(srclen)
678 | ret = bytearray()
679 | for i in range(int((srclen + m - 1) / m)):
680 | rettemp=bytes(src[i*m:i*m+m])
681 | #dsize = SHA.digest_size
682 | #sentinel = Random.new().read(16+dsize)
683 | #message=self.scipher.decrypt(rettemp,'')
684 | #message=self.cipher.decrypt(rettemp,'')
685 | message=self.rsa_decrypt(rettemp)
686 | #message=self.scipher.decrypt(rettemp,sentinel)
687 | #digest = SHA.new(message[:-dsize]).digest()
688 | #if digest==message[-dsize:]: # Note how we DO NOT look for the sentinel
689 | # plugin.log.error("Encryption was correct.")
690 | #else:
691 | # plugin.log.error("Encryption was not correct.")
692 | ret.extend(message)
693 | #ssss=0
694 | #for ss in ret:
695 | # plugin.log.error('%d:%d'%(ssss,ord(ss)))
696 | # ssss+=1
697 | return ret
698 |
699 | def m115_encode(self,src, tm):
700 | #plugin.log.error(src)
701 | key = MD5.new()
702 | #plugin.log.error(b'tm=%s'%tm)
703 | key.update(('!@###@#%sDFDR@#@#'%tm).encode())
704 | bkey = bytearray()
705 | bkey.extend( key.hexdigest().encode())
706 | #plugin.log.error(len(bkey))
707 | #plugin.log.error(key.hexdigest())
708 | #plugin.log.error('%d %d ...%d %d'%(bkey[0],bkey[1],bkey[30],bkey[31]))
709 | bsrc = bytearray()
710 | bsrc.extend(src.encode())
711 | #plugin.log.error(bsrc)
712 | tmp = self.m115_sym_encode(bsrc, len(bsrc),bkey, '')
713 | tmp2 = bkey[0:16]
714 | tmp2.extend(tmp)
715 | #plugin.log.error(len(tmp2))
716 | #plugin.log.error('%d %d %d %d %d %d...%d %d...%d %d'%(tmp2[0],tmp2[1],tmp2[2],tmp2[3],tmp2[4],tmp2[5],tmp2[30],tmp2[31],tmp2[46],tmp2[47]))
717 | return {
718 | 'data': self.m115_asym_encode(tmp2, len(tmp2)),'key':key.hexdigest()
719 | }
720 |
721 | def m115_decode(self,src, key):
722 | bkey1 = bytearray()
723 | bkey1.extend(key.encode())
724 | #plugin.log.error('%d %d ...%d %d'%(bkey1[0],bkey1[1],bkey1[30],bkey1[31]))
725 | tmp = base64.b64decode(src)
726 | bsrc = bytearray()
727 | bsrc.extend(tmp)
728 | tmp = self.m115_asym_decode(bsrc, len(bsrc))
729 | #plugin.log.error('ch=%s'%len(tmp))
730 | bkey2 = bytearray()
731 | bkey2.extend(tmp[0:16])
732 | #plugin.log.error('key2=%s'%tmp[0:16])
733 | bsrc2 = bytearray()
734 | bsrc2.extend(tmp[16:])
735 | return self.m115_sym_decode(bsrc2, len(tmp) - 16, bkey1,bkey2)
736 |
737 | def getfiledownloadurl(self,pc):
738 | result = ''
739 | tm = str((int(int(time.time()))))
740 | data=self.urlopen("https://webapi.115.com/files/download?pickcode="+pc+"&_="+tm)
741 | data= self.jsonload(data)
742 | if data['state']:
743 | result=data['file_url']+'|'+self.downcookie
744 | if not result:
745 | content=self.notegetpcurl(pc=pc)
746 | if content:
747 | result=content
748 | if not result:
749 | pcencode = self.m115_encode((json.dumps({'pickcode': pc})).replace(' ',''),tm)
750 | data=self.urlopen('http://proapi.115.com/app/chrome/downurl?t='+tm,data=parse.urlencode({'data':pcencode['data']}))
751 | jsondata = json.loads(data[data.index('{'):])
752 | if jsondata['state'] != True:
753 | return ''
754 | decodetmp=self.m115_decode(jsondata['data'], pcencode['key'])
755 | bdecode = bytearray()
756 | bdecode.extend(decodetmp)
757 | jsondata = json.loads(bdecode.decode())
758 | jsondata=jsondata[list(jsondata.keys())[0]]
759 | if 'url' in jsondata:
760 | result = jsondata['url']['url']+'|'+self.downcookie
761 | self.notesavecontent(cname='pickcodeurl',notetitle=pc,content=result)
762 | #xbmc.log('url_return '+result,level=xbmc.LOGERROR)
763 | return result
764 |
765 | def oldgetfiledownloadurl(self,pc):
766 | bad_server = ''
767 | result = ''
768 | try:
769 | data=self.urlopen('https://webapi.115.com/files/download?pickcode='+pc+'&_='+str(int(time.time())))
770 | data= json.loads(data[data.index('{'):])
771 | if data['state']:
772 | result=data['file_url']
773 | else:
774 | data=self.urlopen('http://proapi.115.com/app/chrome/down?method=get_file_url&pickcode='+pc)
775 | data= json.loads(data[data.index('{'):])
776 | if data['state']:
777 | for value in data['data'].values():
778 | if 'url' in value:
779 | result = value['url']['url']
780 | break
781 | else:
782 | return ''
783 |
784 | #xbmc.executebuiltin('XBMC.Notification('%s', '%s', '%s', '%s')' %(result, '', 5000, ''))
785 | return result+'|'+self.downcookie
786 | except Exception as errno:
787 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
788 | return ''
789 |
790 | def coversrttovtt(self,srturl):
791 | try:
792 | srtcontent=self.urlopen(srturl)
793 | dictsrt={
794 | 'sourceFormat': 'AutoDetect',
795 | 'targetFormat': 'Vtt',
796 | 'timeShiftBy': '+0.0',
797 | 'timeShiftAfter': '0.0',
798 | 'writeHours': True,
799 | 'vttStartCounter': '',
800 | 'maxCharactersPerLine': '',
801 | 'input': srtcontent,
802 | }
803 | jsonsrt=json.dumps(dictsrt)
804 | data=parse.urlencode({'tool':'subtitle-subtitle-converter','parameters':jsonsrt})
805 | data=self.urlopen('https://toolslick.com/api/process',data=data)
806 | data= json.loads(data[data.index('{'):])
807 | return data['subtitle']
808 | except Exception as errno:
809 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
810 | return ''
811 |
812 | class MyHandler(BaseHTTPRequestHandler):
813 | #文件块读取大小
814 | blockSize=1024*1024*16
815 | #每文件最大访问线程数
816 | accessThreadNum=2
817 | #文件下载地址
818 | fidDownloadurl={}
819 | #文件下载线程计数器
820 | fidSemaphores={}
821 | #文件大小
822 | fileSize={}
823 | def handle(self):
824 | try:
825 | BaseHTTPRequestHandler.handle(self)
826 | except socket.error:
827 | pass
828 |
829 | def handle_one_request(self):
830 | try:
831 | self.raw_requestline = self.rfile.readline(65536*1024+1)
832 | if len(self.raw_requestline) > 65536*1024:
833 | self.requestline = ''
834 | self.request_version = ''
835 | self.command = ''
836 | self.send_error(414)
837 | return
838 | if not self.raw_requestline:
839 | self.close_connection = 1
840 | return
841 | if not self.parse_request():
842 | # An error code has been sent, just exit
843 | return
844 | mname = 'do_' + self.command
845 | if not hasattr(self, mname):
846 | self.send_error(501, 'Unsupported method (%r)' % self.command)
847 | return
848 | method = getattr(self, mname)
849 | #xbmc.log('before call do_Get')
850 | method()
851 | #add debug info close
852 | #xbmc.log('after call do_Get')
853 | if not self.wfile.closed:
854 | self.wfile.flush() #actually send the response if not already done.
855 | except socket.timeout as errno:
856 | #a read or a write timed out. Discard this connection
857 | self.log_error('Request timed out: %r', errno)
858 | self.close_connection = 1
859 | return
860 | '''
861 | Serves a HEAD request
862 | '''
863 | def do_HEAD(s):
864 | xbmc.log( 'XBMCLocalProxy: Serving HEAD request...')
865 | s.answer_request(0)
866 |
867 | '''
868 | Serves a GET request.
869 | '''
870 | def do_GET(s):
871 | xbmc.log( 'XBMCLocalProxy: Serving GET request...')
872 | s.answer_request(1)
873 |
874 | def parse_POST(s):
875 | ctype, pdict = parse_header(s.headers['content-type'])
876 | if ctype == 'multipart/form-data':
877 | postvars = parse_multipart(s.rfile, pdict)
878 | elif ctype == 'application/x-www-form-urlencoded':
879 | length = int(s.headers['content-length'])
880 | postvars = parse.parse_qs(
881 | s.rfile.read(length).decode ('utf-8'),
882 | keep_blank_values=True)
883 | else:
884 | postvars = {}
885 | return postvars
886 |
887 | def do_POST(s):
888 | postvars = s.parse_POST()
889 | s.answer_request(1,postvars)
890 |
891 | def convert_relative_to_absolute(s,m3u8_content, base_url):
892 | lines = m3u8_content.split('\n')
893 | absolute_lines = []
894 |
895 | for line in lines:
896 | if line.startswith('#') or line.strip() == '':
897 | # Skip comments and empty lines
898 | absolute_lines.append(line)
899 | else:
900 | absolute_url = parse.urljoin(base_url, line)
901 | m3uurl='/mp2t?'+parse.urlencode(encode_obj({'url': absolute_url}))
902 | absolute_lines.append(m3uurl)
903 |
904 | return '\n'.join(absolute_lines)
905 |
906 | def answer_request(s, sendData,postvars={}):
907 | try:
908 | urlsp=parse.urlparse(s.path)
909 | scheme=urlsp.scheme
910 | netloc=urlsp.netloc
911 | request_path=urlsp.path
912 | requestedWith=''
913 | for key in s.headers:
914 | if key.lower()=='x-requested-with':
915 | requestedWith= s.headers[key]
916 | if request_path=='/stop':
917 | sys.exit()
918 | elif request_path=='/version':
919 | s.send_response(200)
920 | s.end_headers()
921 | t = html(
922 | head(
923 | title('WEB115 VERSION'),
924 | link(rel='stylesheet',href='/css/styles.css')
925 | ),
926 | body(
927 | xbmcaddon.Addon().getAddonInfo('name')+' is Running',
928 | br(),
929 | 'Version: '+xbmcaddon.Addon().getAddonInfo('version')
930 | )
931 | )
932 |
933 | s.wfile.write( comm.ensure_binary(t.render()))
934 |
935 | elif request_path[0:4]=='/djs':
936 | try:
937 | (url,name)=request_path[5:].split('/')
938 | name=name[:name.index('.json')]
939 | dictvideo={
940 | 'encodings':[
941 | {
942 | 'name':'h264',
943 | 'videoSources':[
944 | {
945 | 'resolution':'scode',
946 | 'url':parse.unquote_plus(url)
947 | }
948 | ]
949 | }
950 | ],
951 | 'title':name,
952 | 'id': 999
953 | }
954 | jsonvideo = json.dumps(dictvideo)
955 | debugstr='''{
956 | 'id': 100,
957 | 'title': '%s',
958 | 'encodings': [{
959 | 'name': 'h264',
960 | 'videoSources': [{
961 | 'resolution': 1080,
962 | 'height': 1920,
963 | 'width': 3840,
964 | 'url': '%s'
965 | }, ]
966 | }
967 | ]
968 | }
969 | '''%(name,parse.unquote_plus(url))
970 | s.send_response(200)
971 | #s.send_header('Content-Length', len(jsonvideo))
972 | s.send_header('Content-Length', len(debugstr))
973 | s.send_header('Keep-Alive', 'timeout=5, max=100')
974 | s.send_header('Connection', 'Keep-Alive')
975 | s.send_header('Content-Type', 'application/json')
976 | s.end_headers()
977 | #s.wfile.write(jsonvideo)
978 | s.wfile.write(debugstr)
979 | except Exception as errno:
980 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
981 |
982 | elif request_path[0:4]=='/deo':
983 | try:
984 | qs=parse.parse_qs(urlsp.query, keep_blank_values=True)
985 | url=qs.get('url',['0'])[0]
986 | name=qs.get('title',['0'])[0]
987 | mimetype=qs.get('mimetype',['0'])[0]
988 | s.send_response(200)
989 |
990 | playhtml='''
991 |
992 |
993 |
994 |
995 |
996 | DEOVR
997 |
998 |
999 |
1000 |
1001 |
1002 |
1003 |
1004 |
1005 |
1006 |
1007 |
1008 | '''%(name,url)
1009 | s.send_header('Content-Length', len(playhtml))
1010 | s.end_headers()
1011 | s.wfile.write(playhtml)
1012 | except Exception as errno:
1013 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1014 |
1015 | elif request_path[0:4]=='/gjs':
1016 | try:
1017 | (url,name)=request_path[5:].split('/')
1018 | name=name[:name.index('.json')]
1019 | dictvideo={
1020 | 'encodings':[
1021 | {
1022 | 'name':'h264',
1023 | 'videoSources':[
1024 | {
1025 | 'resolution':'scode',
1026 | 'url':parse.unquote_plus(url)
1027 | }
1028 | ]
1029 | }
1030 | ],
1031 | 'title':name,
1032 | 'id': 999
1033 | }
1034 | jsonvideo = json.dumps(dictvideo)
1035 | debugstr='''{
1036 | 'id': 100,
1037 | 'title': '%s',
1038 | 'encodings': [{
1039 | 'name': 'h264',
1040 | 'videoSources': [{
1041 | 'resolution': 1080,
1042 | 'height': 1920,
1043 | 'width': 3840,
1044 | 'url': '%s'
1045 | }, ]
1046 | }
1047 | ]
1048 | }
1049 | '''%(name,parse.unquote_plus(url))
1050 | s.send_response(200)
1051 | #s.send_header('Content-Length', len(jsonvideo))
1052 | s.send_header('Content-Length', len(debugstr))
1053 | s.send_header('Keep-Alive', 'timeout=5, max=100')
1054 | s.send_header('Connection', 'Keep-Alive')
1055 | s.send_header('Content-Type', 'application/json')
1056 | s.end_headers()
1057 | #s.wfile.write(jsonvideo)
1058 | s.wfile.write(debugstr)
1059 | except Exception as errno:
1060 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1061 |
1062 | elif request_path[0:4]=='/115':
1063 | (fid,cookiestr,changeserver,name)=request_path[5:].split('/')
1064 | cookiestr=parse.unquote_plus(cookiestr)
1065 | res=s.serveFile(fid, cookiestr, changeserver, sendData,name)
1066 |
1067 | elif request_path[0:4]=='/m3u':
1068 | try:
1069 | (pc,sha,name)=request_path[5:].split('/')
1070 | xl = api_115('0')
1071 | datam=xl.urlopen('https://v.anxia.com/site/api/video/m3u8/'+ pc+'.m3u8',userAgent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36')
1072 | #s.wfile.write(datam)
1073 | m3u8urls=[]
1074 | for match in re.finditer('BANDWIDTH=(?P.*?)\x2C.*?(?Phttp.*?)\r', datam, re.IGNORECASE | re.DOTALL):
1075 | m3u8urls.append((int(match.group('bandwidth')),match.group('url')))
1076 | if len(m3u8urls)>0:
1077 | m3u8urls.sort(key=lambda x:x[0],reverse=True)
1078 | m3u8url= m3u8urls[0][1]
1079 | parsed_url = parse.urlparse(m3u8url)
1080 | protocol = parsed_url.scheme
1081 | hostname = parsed_url.netloc
1082 | base_url = protocol+'://'+hostname+'/'
1083 | extm3u=xl.urlopen(m3u8url,userAgent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36')
1084 | extm3u=s.convert_relative_to_absolute(extm3u,base_url)
1085 | #extm3u='''#EXTM3U
1086 | #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=15000000,RESOLUTION=640x426,NAME='YH'
1087 | #%s'''%(url)
1088 | # urlsp=urlparse(url)
1089 | # scheme=urlsp.scheme
1090 | # netloc=urlsp.netloc
1091 | # datam=xl.urlopen(url)
1092 | # matchkeyurl=re.search(r'(?P\x2Fapi\x2Fvideo\x2Fm3u8\x2Fvideo\x2ekey.*?)[\x22\x27]', datam, re.DOTALL | re.IGNORECASE)
1093 | # if matchkeyurl:
1094 | # keyurl=matchkeyurl.group('keyurl')
1095 | # keyurl2=urlparse.urljoin(url, keyurl)
1096 | # datam=datam.replace(keyurl,keyurl2)
1097 | # datam=datam.replace('\n/','\n%s://%s/'%(scheme,netloc))
1098 | #s.wfile.write(datam)
1099 | s.send_response(200)
1100 | s.send_header('Content-type', 'application/vnd.apple.mpegurl')
1101 | s.send_header('Content-Length', len(extm3u))
1102 | s.end_headers()
1103 | s.wfile.write(comm.ensure_binary(extm3u))
1104 | else:
1105 | xl = api_115('0')
1106 | data = parse.urlencode({'op': 'vip_push','pickcode':pc,'sha1':sha})
1107 | data=xl.urlopen('http://115.com/?ct=play&ac=push',data=data)
1108 | s.send_response(200)
1109 | s.send_header('Content-Type', 'text/html; charset=UTF-8')
1110 | t = html(
1111 | head(
1112 | title('未转码'),
1113 | link(rel='stylesheet',href='/css/styles.css')
1114 | ),
1115 | body('当前文件未转码,请尝试原码播放')
1116 | )
1117 | htmlrender=comm.ensure_binary(t.render())
1118 | s.send_header('Content-Length', len(htmlrender))
1119 | s.end_headers()
1120 | s.wfile.write(htmlrender)
1121 | except Exception as errno:
1122 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1123 | elif request_path=='/mp2t':
1124 | qs=parse.parse_qs(urlsp.query, keep_blank_values=True)
1125 | url=qs.get('url',['0'])[0]
1126 | res=s.serveMP2T(url)
1127 | elif request_path=='/covm3u':
1128 | xl = api_115('0')
1129 | qs=parse.parse_qs(urlsp.query, keep_blank_values=True)
1130 | m3u8url=qs.get('url',['0'])[0]
1131 | try:
1132 | parsed_url = parse.urlparse(m3u8url)
1133 | protocol = parsed_url.scheme
1134 | hostname = parsed_url.netloc
1135 | base_url = protocol+'://'+hostname+'/'
1136 | extm3u=xl.urlopen(m3u8url,userAgent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36')
1137 | extm3u=s.convert_relative_to_absolute(extm3u,base_url)
1138 | s.send_response(200)
1139 | s.send_header('Content-type', 'application/vnd.apple.mpegurl')
1140 | s.send_header('Content-Length', len(extm3u))
1141 | s.send_header('Access-Control-Allow-Credentials', False)
1142 | s.send_header('Access-Control-Allow-Origin','*')
1143 | s.send_header('Timing-Allow-Origin','*')
1144 | s.send_header('Connection', 'keep-alive')
1145 | s.end_headers()
1146 | s.wfile.write(comm.ensure_binary(extm3u))
1147 | except Exception as errno:
1148 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1149 | elif request_path=='/play':
1150 | try:
1151 | qs=parse.parse_qs(urlsp.query, keep_blank_values=True)
1152 | url=qs.get('url',['0'])[0]
1153 | name=qs.get('title',['0'])[0]
1154 | mimetype=qs.get('mimetype',['0'])[0]
1155 | cid=qs.get('cid',['0'])[0]
1156 | pc=qs.get('pc',['0'])[0]
1157 | xl = api_115('0')
1158 | subtitlelist=xl.getsubtitle(pc)
1159 | subtrack=''
1160 | defaultsub='default'
1161 | for sub in subtitlelist:
1162 |
1163 | suburl=('%s://%s/sub/%s/%s.vtt' % (s.request_version.split('/')[0],
1164 | s.headers.get('Host'),
1165 | parse.quote_plus(sub['url']),
1166 | parse.quote_plus(sub['title'])))
1167 | subtrack+='''
1168 | '''%(suburl,sub['language'],parse.quote_plus(sub['title']),defaultsub)
1169 | defaultsub=''
1170 | s.send_response(200)
1171 | s.send_header('Content-Type', 'text/html; charset=UTF-8')
1172 | playhtml='''
1173 |
1174 |
1175 |
1176 |
1177 |
1178 |
1179 |
1180 |
1181 |
1182 |
1184 |
1185 | %s
1186 |
1187 | To view this video please enable JavaScript, and consider upgrading to a web browser that
1188 | supports HTML5 video
1189 |
1190 |
1191 |
1192 |
1193 |
1194 | '''%(url,mimetype,subtrack)
1195 | # t = html(
1196 | # head(
1197 | # meta(charset='utf-8'),
1198 | # title('web115 files'),
1199 | # link(rel='stylesheet',href='https://vjs.zencdn.net/7.6.0/video-js.css'),
1200 | # script(src='https://vjs.zencdn.net/ie8/1.1.2/videojs-ie8.min.js')
1201 | # ),
1202 | # body(
1203 | # video(id='my-video' class_='video-js vjs-default-skin' width='800' data-setup={'controls': True, 'autoplay': True, 'preload': 'auto'}
1204 | # )()
1205 | # script(src='https://vjs.zencdn.net/7.6.0/video.js')
1206 | # )
1207 | # )
1208 | # s.wfile.write(t.render())
1209 | playhtml=comm.ensure_binary(playhtml)
1210 | s.send_header('Content-Length', len(playhtml))
1211 | s.end_headers()
1212 | s.wfile.write(playhtml)
1213 | except Exception as errno:
1214 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1215 |
1216 | elif request_path=='/files':
1217 | qs=parse.parse_qs(urlsp.query, keep_blank_values=True)
1218 | cid=qs.get('cid',[0])[0]
1219 | cid=int(cid)
1220 | offset=int(qs.get('offset',[0])[0])
1221 | star=qs.get('star',[0])[0]
1222 | typefilter=qs.get('typefilter',[0])[0]
1223 | cursorttype=qs.get('cursorttype',['0'])[0]
1224 | sorttype ='user_utime'
1225 | if cursorttype=='2' or cursorttype=='3':
1226 | sorttype ='file_size'
1227 | if cursorttype=='4' or cursorttype=='5':
1228 | sorttype ='file_name'
1229 | sortasc='0'
1230 | if cursorttype=='1' or cursorttype=='2' or cursorttype=='4':
1231 | sortasc='1'
1232 | xl = api_115('0')
1233 | taglist=[]
1234 | data=xl.gettaglist()
1235 | if data['state']:
1236 | fllist=sorted( data['data']['list'],key=lambda k:k['sort'],reverse=True)
1237 | for tag in fllist:
1238 | tagname=tag['name']
1239 | taglist.append([tagname,tag['id']])
1240 | tagnamelist=[q[0] for q in taglist]
1241 | tagidlist=[q[1] for q in taglist]
1242 |
1243 | searchvalue=qs.get('searchvalue',[''])[0]
1244 | if len(searchvalue)<3:searchvalue=''
1245 | searchstr=searchvalue
1246 | if searchvalue[0:2]=='t:':
1247 | searchstr=searchvalue[2:]
1248 | try:
1249 | searchstr=('tag'+tagidlist[tagnamelist.index(searchstr)])
1250 | except:
1251 | pass
1252 | #pageitem= int(xbmcaddon.Addon().getSetting('pageitem'))
1253 | pageitem= int(qs.get('pageitem',[0])[0])
1254 | if pageitem==0: pageitem=80
1255 | if pageitem<8: pageitem=8
1256 | if pageitem>200: pageitem=200
1257 | data=xl.getfilelist(cid=cid,offset=offset,pageitem=pageitem,star=star,sorttype=sorttype,sortasc=sortasc,typefilter=typefilter,nf='0',search_value=searchstr)
1258 | #xbmc.log(str(data),level=xbmc.LOGERROR)
1259 | if data['state']:
1260 | def sha1inout(ctx):
1261 | for title, url in [('导入SHA1','/sha1?'+parse.urlencode({'mode':'beginimport','cid': cid}))]:
1262 | yield td(a(href=url,class_='sha1import')(title))
1263 | def sort(ctx):
1264 | for title, url in [
1265 | ('从新到旧','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':typefilter,'cursorttype':0,'searchvalue':searchvalue,'star': star})),
1266 | ('从旧到新','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':typefilter,'cursorttype':1,'searchvalue':searchvalue,'star': star})),
1267 | ('从小到大','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':typefilter,'cursorttype':2,'searchvalue':searchvalue,'star': star})),
1268 | ('从大到小','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':typefilter,'cursorttype':3,'searchvalue':searchvalue,'star': star})),
1269 | ('从A到Z','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':typefilter,'cursorttype':4,'searchvalue':searchvalue,'star': star})),
1270 | ('从Z到A','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':typefilter,'cursorttype':5,'searchvalue':searchvalue,'star': star}))]:
1271 | yield td(a(href=url,class_='sort')(title))
1272 | def filters(ctx):
1273 | for title, url in [
1274 | ('全部','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':0,'cursorttype':cursorttype,'searchvalue':searchvalue})),
1275 | ('视频','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':4,'cursorttype':cursorttype,'searchvalue':searchvalue})),
1276 | ('图片','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':2,'cursorttype':cursorttype,'searchvalue':searchvalue})),
1277 | ('音乐','/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'typefilter':3,'cursorttype':cursorttype,'searchvalue':searchvalue}))]:
1278 | yield td(a(href=url,class_='typefilter')(title))
1279 | def paths(ctx):
1280 | if 'path' in data:
1281 | for item in data['path']:
1282 | title=''
1283 | url=''
1284 | if str(item['cid'])!=str(cid):
1285 | title='返回到【%s】'%(item['name'])
1286 | url='/files?'+parse.urlencode({'cid': item['cid'],'offset':0,'pageitem':pageitem,'cursorttype':cursorttype})
1287 | yield td(a(href=url,class_='path',title=title)(title))
1288 | def searchcur(ctx):
1289 | cidname=''
1290 | if 'path' in data:
1291 | if len(data['path'])>0:
1292 | cidname=data['path'][-1]['name']
1293 | if 'folder' in data:
1294 | cidname=data['folder']['name']
1295 | if cidname!='':
1296 | def tagnameoptions(ctx):
1297 | for tagname in tagnamelist:
1298 | yield option(value='t:'+tagname)
1299 | url='/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,'cursorttype':cursorttype})
1300 | title='当前【%s】'%(cidname)
1301 | yield form(action='/files',method='GET')(
1302 | input_( type='hidden', name="cid",value=cid),
1303 | input_( type='hidden', name="cursorttype",value=cursorttype),
1304 | input_( type='hidden', name="pageitem",value=pageitem),
1305 | table(tr(
1306 | td(a(href=url,class_='curpath',title=title)(title)),
1307 | td(input_(class_='bigfont', type='text', name="searchvalue", list='tagnames',value=searchvalue),datalist(id='tagnames')(tagnameoptions)),
1308 | td(input_(class_='bigfont', type='submit', name="submit",value='搜索')),
1309 | )
1310 | ))
1311 | def navpage(ctx):
1312 | count=int(data['count'])
1313 | pages=int(count/pageitem)
1314 | if count%pageitem>0:
1315 | pages=pages+1
1316 | curpage=int(offset/pageitem)+1
1317 | offlast=offset+pageitem
1318 | if offlast>count:
1319 | offlast=count
1320 | yield td('每页')
1321 | pageitems=[8,16,40,80,120,200]
1322 | def optionspageitem(ctx):
1323 | for pi in pageitems:
1324 | url='/files?'+parse.urlencode({'cid': cid,'offset':offset,'pageitem':pi,
1325 | 'cursorttype':cursorttype,'typefilter':typefilter,'searchvalue':searchvalue,'star': star})
1326 | if pi==pageitem:
1327 | yield option(value=url,selected='selected',class_='pagesel')(str(pi))
1328 | else:
1329 | yield option(value=url)(str(pi))
1330 | yield td(select(onchange='if (this.value) window.location.href=this.value',class_='pagesel')(optionspageitem))
1331 | yield td('项')
1332 | if curpage>1:
1333 | url='/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':pageitem,
1334 | 'cursorttype':cursorttype,'typefilter':typefilter,'searchvalue':searchvalue,'star': star})
1335 | yield td(a(href=url,title='第一页',class_='pagesel')('|<'),class_='pagesel')
1336 |
1337 | url='/files?'+parse.urlencode({'cid': cid,'offset':pageitem*(curpage-2),'pageitem':pageitem,
1338 | 'cursorttype':cursorttype,'typefilter':typefilter,'searchvalue':searchvalue,'star': star})
1339 | yield td(a(href=url,title='上一页',class_='pagesel')('<'),class_='pagesel')
1340 | else:
1341 | yield td(a(href='#',title='第一页',class_='pagesel')('|<'),class_='pagesel')
1342 | yield td(a(href='#',title='上一页',class_='pagesel')('<'),class_='pagesel')
1343 | def optionspage(ctx):
1344 | for page in range(1,pages+1):
1345 | url='/files?'+parse.urlencode({'cid': cid,'offset':pageitem*(page-1),'pageitem':pageitem,
1346 | 'cursorttype':cursorttype,'typefilter':typefilter,'searchvalue':searchvalue,'star': star})
1347 | #offlast=offset+pageitem
1348 | #if offlast>count:
1349 | # offlast=count
1350 | strpage='第%03d页'%(page)
1351 | if curpage==page:
1352 | yield option(value=url,selected='selected',class_='pagesel')(strpage)
1353 | else:
1354 | yield option(value=url)(strpage)
1355 | yield td(select(onchange='if (this.value) window.location.href=this.value',class_='pagesel')(optionspage),class_='pagesel')
1356 | yield td('共%03d页'%(pages),class_='pagesel')
1357 |
1358 | if curpage'),class_='pagesel'),
1362 |
1363 | url='/files?'+parse.urlencode({'cid': cid,'offset':pageitem*(pages-1),'pageitem':pageitem,
1364 | 'cursorttype':cursorttype,'typefilter':typefilter,'searchvalue':searchvalue,'star': star})
1365 | yield td(a(href=url,title='最后一页',class_='pagesel')('>|'),class_='pagesel')
1366 | else:
1367 | yield td(a(href='#',title='下一页',class_='pagesel')('>'),class_='pagesel')
1368 | yield td(a(href='#',title='最后一页',class_='pagesel')('>|'),class_='pagesel')
1369 | yield td('(当前:%s至%s|共%s个文件)'%(offset+1,offlast,count),class_='curpath')
1370 | def items(ctx):
1371 | for item in data['data']:
1372 | #data['data']有时不是list,而是dict, foreach后返回的是key文本。20180425
1373 | if not isinstance(item, dict):
1374 | item=data['data'][item]
1375 | title=''
1376 | url=''
1377 | sha1url=''
1378 | locurl=''
1379 | ifFolder=False
1380 | isvideo=False
1381 | ism3u8=False
1382 | mimetype=''
1383 | if 'sha' in item:
1384 | ifFolder=False
1385 | if cid!=int(item['cid']):
1386 | locurl='/files?'+parse.urlencode({'cid':item['cid'],'offset':0,'pageitem':pageitem,'typefilter':typefilter,'cursorttype':0,'searchvalue':''})
1387 | title=item['n']
1388 | mimetype, _ =mimetypes.guess_type('a.'+item['ico'].lower())
1389 | url='%s://%s/115/%s/%s/%s/%s' % (s.request_version.split('/')[0],s.headers.get('Host'),item['fid'],'0','0',parse.quote_plus(comm.ensure_binary(title)))
1390 | sha1url='/sha1?'+parse.urlencode({'mode':'exportfid','name': item['n'],'length': item['s'],'sha1': item['sha'],'pc': item['pc']})
1391 | #if item['ico'].lower() in ['mp4', 'wmv', 'avi', 'mkv', 'mpg','ts','vob','m4v','mov','flv','rmvb']:
1392 | if 'iv' in item:
1393 | isvideo=True
1394 | '''
1395 | sourceurl=parse.quote_plus('%s://%s/deo/%s/%s/%s/%s' % (s.request_version.split('/')[0],s.headers.get('Host'),item['fid'],'0','0','a.mp4'))
1396 | sourceurl='%s://%s/115/%s/%s/%s/%s' % (s.request_version.split('/')[0],s.headers.get('Host'),item['fid'],'0','0',parse.quote_plus(title))
1397 | url='deovr://%s' % (sourceurl)
1398 | #url='/giz/%s/%s/%s/%s' % (item['fid'],'0','0',parse.quote_plus(title+'.json'))
1399 | #url='/deo/%s/%s/%s/%s' % (item['fid'],'0','0',parse.quote_plus(title)+'.json')
1400 |
1401 | video_code = int(xbmcaddon.Addon().getSetting('video_code'))
1402 | if video_code==2 or (video_code==1 and item['ico'].lower()!='mp4'):
1403 | datam=xl.urlopen('http://115.com/api/video/m3u8/'+ xl.getpc(item['fid'])+'.m3u8')
1404 | m3u8urls=[]
1405 | for match in re.finditer('BANDWIDTH=(?P.*?)\x2C.*?(?Phttp.*?)\r', datam, re.IGNORECASE | re.DOTALL):
1406 | m3u8urls.append((int(match.group('bandwidth')),match.group('url')))
1407 | if len(m3u8urls)>0:
1408 | url='/m3u/%s/%s.m3u8' % (item['fid'],parse.quote_plus(title))
1409 | #m3u8urls.sort(key=lambda x:x[0],reverse=True)
1410 | #url= m3u8urls[0][1]
1411 | ism3u8=True
1412 | mimetype='application/x-mpegURL'
1413 | '''
1414 | else:
1415 | if item['n'][0:8]=='tempplay':
1416 | continue;
1417 | ifFolder=True
1418 | title='【%s】'%(item['n'])
1419 | url='/files?'+parse.urlencode({'cid': item['cid'],'offset':0,'pageitem':pageitem,'cursorttype':cursorttype})
1420 | sha1url='/sha1?'+parse.urlencode({'mode':'beginexportcid','cid': item['cid']})
1421 | if title:
1422 | tds=[]
1423 | # tds.append(td(a(href=sha1url, target="_blank" ,class_='sha1')('导出SHA1'),class_='sha1td'))
1424 | if locurl:
1425 | tds.append(td(a(href=locurl,type=mimetype,class_='loc')('定位'),class_='loctd'))
1426 | if isvideo:
1427 | # url='/play?'+parse.urlencode({'url': url,'title':title+'.m3u8','mimetype':mimetype}))
1428 | # yield li(a(href=url,title=title)(title))
1429 | #yield li(a(href=url,type=mimetype)(title),class_='video')
1430 | playurl='/play?'+parse.urlencode(encode_obj({'url': url,'title':item['n']+'.m3u8','mimetype':mimetype,'cid':item['cid'],'pc':item['pc']}))
1431 | m3url=('/m3u/%s/%s/%s.m3u8' % (item['pc'],item['sha'],parse.quote_plus(comm.ensure_binary(title))))
1432 | m3url=('%s://%s/m3u/%s/%s/%s.m3u8' % (s.request_version.split('/')[0],
1433 | s.headers.get('Host'),
1434 | item['pc'],item['sha'],parse.quote_plus(comm.ensure_binary(title))))
1435 | m3uplayurl='/play?'+parse.urlencode(encode_obj({'url': m3url,'title':title+'.m3u8','mimetype':'application/x-mpegURL','cid':item['cid'],'pc':item['pc']}))
1436 | deourl=('deovr://%s://%s/djs/%s/%s.json' % (s.request_version.split('/')[0],
1437 | s.headers.get('Host'),
1438 | parse.quote_plus(url),
1439 | parse.quote_plus(comm.ensure_binary(title)),
1440 | ))
1441 | m3udeourl=('deovr://%s://%s/djs/%s/%s.json' % (s.request_version.split('/')[0],
1442 | s.headers.get('Host'),
1443 | parse.quote_plus(m3url),
1444 | parse.quote_plus(comm.ensure_binary(title)),
1445 | ))
1446 | #gizurl=('gizmovr://type=video&url=%s' % (url))
1447 | #m3ugizurl=('gizmovr://type=video&url=%s' % (m3url)).encode('latin-1')
1448 | #xbmc.log(msg='requestedWith:'+requestedWith,level=xbmc.LOGERROR)
1449 | tds.append( td(a(href=url,type=mimetype,class_='video')(title),class_='videotd'))
1450 | tds.append( td(a(href=playurl,class_='vid2')('原码HTML5')))
1451 | tds.append( td(a(href=m3uplayurl,class_='vid2')('转码HTML5')))
1452 | #if requestedWith.lower().find('deovr')>=0:
1453 | #tds.append( td(a(href=m3udeourl,class_='vid2')('DEO转码播放')))
1454 | #else:
1455 | tds.append( td(a(href=m3url,type='application/x-mpegURL',class_='vid2')('转码直连')))
1456 | elif ifFolder:
1457 | tds.append(td(a(href=url,type=mimetype,class_='folder')(title),colspan='4'))
1458 | else:
1459 | tds.append(td(a(href=url,type=mimetype)(title),colspan='4'))
1460 | yield tr(tds)
1461 | s.send_response(200)
1462 | s.send_header('Content-Type', 'text/html; charset=UTF-8')
1463 |
1464 | t = html(
1465 | head(
1466 | title('web115 files'),
1467 | link(rel='stylesheet',href='/css/styles.css')
1468 | ),
1469 | body(
1470 | # nav(
1471 | # select(onchange='if (this.value) window.location.href=this.value')(sort),
1472 | # ),
1473 | # nav(
1474 | # select(onchange='if (this.value) window.location.href=this.value')(filters),
1475 | # ),
1476 |
1477 | #ul(paths),
1478 | table(tr(paths)),
1479 | searchcur,
1480 | #table(tr(sha1inout,sort)),
1481 | table(tr(sort)),
1482 | table(tr(filters,navpage)),
1483 | table(items),
1484 | )
1485 | )
1486 | htmlrender=comm.ensure_binary(t.render())
1487 | s.send_header('Content-Length', len(htmlrender))
1488 | s.end_headers()
1489 | s.wfile.write(htmlrender)
1490 | else:
1491 | s.send_response(200)
1492 | t = html(
1493 | head(
1494 | meta(charset='utf-8'),
1495 | title('WEB115 error'),
1496 | link(rel='stylesheet',href='/css/styles.css')
1497 | ),
1498 | body(
1499 | '获取文件列表失败',
1500 | br(),
1501 | '请重新扫码登录115网盘插件',
1502 | br(),
1503 | '并重新启动KODI',
1504 | )
1505 | )
1506 | htmlrender=comm.ensure_binary(t.render())
1507 | s.send_header('Content-Length', len(htmlrender))
1508 | s.end_headers()
1509 | s.wfile.write(htmlrender)
1510 | elif request_path=='/sha1':
1511 | qs=parse.parse_qs(urlsp.query, keep_blank_values=True)
1512 | mode=str(qs.get('mode',[0])[0])
1513 | xl = api_115('0')
1514 | if mode=='beginimport':
1515 | cid=str(qs.get('cid',[0])[0])
1516 | data=xl.getfilelist(cid=cid,offset=0,pageitem=10,star='0',sorttype='user_utime',sortasc='0')
1517 | #xbmc.log(str(data),level=xbmc.LOGERROR)
1518 | if data['state']:
1519 | cidname=''
1520 | if 'path' in data:
1521 | if len(data['path'])>0:
1522 | cidname=data['path'][-1]['name']
1523 | if 'folder' in data:
1524 | cidname=data['folder']['name']
1525 | s.send_response(200)
1526 | t = html(
1527 | head(
1528 | meta(charset='utf-8'),
1529 | title('115 SHA1'),
1530 | link(rel='stylesheet',href='/css/styles.css')
1531 | ),
1532 | body(
1533 | script(type='text/javascript')(r'''
1534 | function filtershastr() {
1535 | var content = document.getElementsByName("sha1str")[0].value.split('\n');
1536 | result="";
1537 | content.forEach(filter);
1538 | function filter(value) {
1539 | if(value.split('|').length>=4){
1540 | if(value.text.substring(0, 5)!="ed2k:"){
1541 | result=result+value+'\n';}
1542 | }
1543 | };
1544 | document.getElementsByName("sha1str")[0].value=result;
1545 | }
1546 | '''),
1547 | form(name='myform',action='/sha1?mode=import',method='POST', onsubmit='filtershastr()')(
1548 | input_( type='hidden', name="cid",value=cid),
1549 | label('当前【%s】'%(cidname)),
1550 | textarea(rows='40', cols="60", name='sha1str')(''),
1551 | input_(class_='bigfont', type='submit', name="submit",value='导入SHA1'),
1552 | )
1553 | )
1554 | )
1555 | htmlrender=comm.ensure_binary(t.render())
1556 | s.send_header('Content-Length', len(htmlrender))
1557 | s.end_headers()
1558 | s.wfile.write(htmlrender)
1559 | if mode=='import':
1560 | #cid=str(qs.get('cid',[0])[0])
1561 | cid=postvars['cid'][0]
1562 | xbmc.log("rootcid"+cid,level=xbmc.LOGERROR)
1563 | cid=xl.createdir(cid,'sha-%s'%(datetime.now().strftime('%Y%m%d-%H%M%S')))
1564 | #sha1str=str(qs.get('sha1str',[0])[0])
1565 | #sha1str=str(postvars['sha1str'])
1566 | sha1str=postvars['sha1str']
1567 | sha1str='\n'.join(sha1str)
1568 |
1569 | sha1str=parse.unquote_plus(sha1str)
1570 | #xbmc.log(sha1str,level=xbmc.LOGERROR)
1571 | succ=fail=0
1572 | subfolders={}
1573 | def getsubfoldercid(cid,foldername):
1574 | if not type(foldername)==str:
1575 | return cid
1576 | foldername = foldername.strip()
1577 | if foldername == '':
1578 | return cid
1579 | if foldername in subfolders:
1580 | return subfolders[foldername]
1581 | else:
1582 | foldernamelast=foldername
1583 | folders=foldername.split('|')
1584 | if len(folders)>1:
1585 | cid=getsubfoldercid(cid,'|'.join(folders[:-1]))
1586 | foldernamelast=folders[-1]
1587 | subfolders[foldername]=xl.createdir(cid,foldernamelast)
1588 | xbmc.log(msg='subfolders %s,%s,%s'%(foldername,foldernamelast,subfolders[foldername]),level=xbmc.LOGERROR)
1589 | return subfolders[foldername]
1590 |
1591 | link115s=[]
1592 | for match in re.finditer(r'(?:115\x3A\x2f\x2f|^)(?P[^\r\n\x3A\x7C]+?[\x7C][0-9]+[\x7C][0-9a-fA-F]{40}[\x7C][0-9a-fA-F]{40})(?:\x7C(?P.+?$)|\s+.*?|)', sha1str, re.IGNORECASE | re.MULTILINE):
1593 | link115s.append({'shalink':match.group('shalink'),'folder':match.group('folder')})
1594 | maxcount=300
1595 | if len(link115s)>maxcount:
1596 | s.send_response(200)
1597 | t = html(
1598 | head(
1599 | meta(charset='utf-8'),
1600 | title('Import '),
1601 | link(rel='stylesheet',href='/css/styles.css')
1602 | ),
1603 | body(
1604 | label('链接数为%i,最高支持%i,可尝试分批导入'%(len(link115s),maxcount)),
1605 | br(),
1606 | br(),
1607 | label('如有大批量导入导出需求,建议使用'),
1608 | a(href='http://www.tampermonkey.net/',target="_blank",)('油猴'),
1609 | label('配合'),
1610 | a(href='https://gist.github.com/Nerver4Ever/953447c9ecd330ffc0861d4cbb839369/raw/29b609ce37cb58a9e568069cb569e442941ea99c/115%25E8%25BD%25AC%25E5%25AD%2598%25E5%258A%25A9%25E6%2589%258Bui%25E4%25BC%2598%25E5%258C%2596%25E7%2589%2588.user.js',target="_blank",)('115转存助手ui优化版'),
1611 | br(),
1612 | a(href='#',onClick='javascript:history.go(-1)',class_='return')('返回上一页'),
1613 | )
1614 | )
1615 | htmlrender=comm.ensure_binary(t.render())
1616 | s.send_header('Content-Length', len(htmlrender))
1617 | s.end_headers()
1618 | s.wfile.write(htmlrender)
1619 | return
1620 | #for match in re.finditer(r'^\s*(?:115\x3A\x2f\x2f)?(?P[^\r\n\x2F\x7C]+?[\x7C][0-9]+[\x7C][0-9a-fA-F]{40}[\x7C][0-9a-fA-F]{40})\x7C?(?P.*?)\s*$', sha1str, re.IGNORECASE | re.MULTILINE):
1621 | failedlist = []
1622 | oldnewnames={}
1623 | tempnameindex=0
1624 | for link115 in link115s:
1625 | shalink=link115['shalink']
1626 | linkpart=shalink.split('|')
1627 |
1628 | filename=linkpart[0]
1629 | filesize=linkpart[1]
1630 | fileid=linkpart[2]
1631 | preid=linkpart[3].strip()
1632 | tempnameindex=tempnameindex+1
1633 | tempname="{0}{1}".format(tempnameindex,filename[filename.rfind('.'):].lower())
1634 | subcid=getsubfoldercid(cid,link115['folder'])
1635 | #xbmc.log(msg="{0}||||{1}".format(filename,tempname),level=xbmc.LOGERROR)
1636 | tm=str(int(time.time()))+'000'
1637 | if xl.import_file_with_sha1(preid,fileid,filesize,tempname,subcid,tm):
1638 | succ+=1
1639 | oldnewnames[tempname]=filename
1640 | else:
1641 | fail+=1
1642 | failedlist.append(shalink)
1643 | xl.batchcidrename(cid,oldnewnames)
1644 | url='/files?'+parse.urlencode({'cid': cid,'offset':0,'pageitem':10,'cursorttype':0})
1645 |
1646 | s.send_response(200)
1647 | t = html(
1648 | head(
1649 | meta(charset='utf-8'),
1650 | title('115 SHA1'),
1651 | link(rel='stylesheet',href='/css/styles.css')
1652 | ),
1653 | body(
1654 | label('成功:%i 失败:%i'%(succ,fail)),
1655 | br(),
1656 | textarea()('\r\n'.join(failedlist)),
1657 | a(href=url,title='打开保存目录',class_='path')('打开保存目录'),
1658 | )
1659 | )
1660 | htmlrender=comm.ensure_binary(t.render())
1661 | s.send_header('Content-Length', len(htmlrender))
1662 | s.end_headers()
1663 | s.wfile.write(htmlrender)
1664 | if mode=='exportfid':
1665 | name=str(qs.get('name',[0])[0])
1666 | length=str(qs.get('length',[0])[0])
1667 | sha1=str(qs.get('sha1',[0])[0])
1668 | pc=str(qs.get('pc',[0])[0])
1669 | exportsha=xl.exportfid(name,length,sha1,pc)
1670 | s.send_response(200)
1671 | s.send_header('Content-Length', len(exportsha))
1672 | s.send_header('Keep-Alive', 'timeout=5, max=100')
1673 | s.send_header('Connection', 'Keep-Alive')
1674 | s.send_header('Content-Type', 'text/plain; charset=UTF-8')
1675 | s.end_headers()
1676 | s.wfile.write(str.encode(exportsha))
1677 | if mode=='beginexportcid':
1678 | maxcount=200
1679 | warningcount=20
1680 | cid=str(qs.get('cid',[0])[0])
1681 | filescount=xl.countfiles(cid)
1682 | if filescount>maxcount:
1683 | s.send_response(200)
1684 | t = html(
1685 | head(
1686 | meta(charset='utf-8'),
1687 | title('Export '),
1688 | link(rel='stylesheet',href='/css/styles.css')
1689 | ),
1690 | body(
1691 | label('目录下文件数为%i,最高支持%i'%(filescount,maxcount)),
1692 | br(),
1693 | br(),
1694 | label('如有大批量导入导出需求,建议使用'),
1695 | a(href='http://www.tampermonkey.net/',target="_blank",)('油猴'),
1696 | label('配合'),
1697 | a(href='https://gist.github.com/Nerver4Ever/953447c9ecd330ffc0861d4cbb839369/raw/29b609ce37cb58a9e568069cb569e442941ea99c/115%25E8%25BD%25AC%25E5%25AD%2598%25E5%258A%25A9%25E6%2589%258Bui%25E4%25BC%2598%25E5%258C%2596%25E7%2589%2588.user.js',target="_blank",)('115转存助手ui优化版'),
1698 | br(),
1699 | a(href='#',onClick='window.close();',class_='return')('关闭页面'),
1700 | )
1701 | )
1702 | htmlrender=comm.ensure_binary(t.render())
1703 | s.send_header('Content-Length', len(htmlrender))
1704 | s.end_headers()
1705 | s.wfile.write(htmlrender)
1706 | elif filescount>warningcount:
1707 | url='/sha1?'+parse.urlencode({'mode':'exportcid','cid': cid})
1708 | s.send_response(200)
1709 | t = html(
1710 | head(
1711 | meta(charset='utf-8'),
1712 | title('Export '),
1713 | link(rel='stylesheet',href='/css/styles.css')
1714 | ),
1715 | body(
1716 | label('目录下文件数为%i,将会耗费较长时间,是否继续?'%(filescount)),
1717 | br(),
1718 | a(href=url,title='继续导出',class_='path')('继续导出'),
1719 | br(),
1720 | br(),
1721 | a(href='#',onClick='window.close();',class_='return')('关闭页面'),
1722 | )
1723 | )
1724 | htmlrender=comm.ensure_binary(t.render())
1725 | s.send_header('Content-Length', len(htmlrender))
1726 | s.end_headers()
1727 | s.wfile.write(htmlrender)
1728 | else:
1729 | url='/sha1?'+parse.urlencode({'mode':'exportcid','cid': cid})
1730 | s.send_response(200)
1731 | t = html(
1732 | head(
1733 | meta(charset='utf-8'),
1734 | title('Export '),
1735 | link(rel='stylesheet',href='/css/styles.css')
1736 | ),
1737 | body(
1738 | label('目录下文件数为%i,是否继续?'%(filescount)),
1739 | br(),
1740 | a(href=url,title='继续导出',class_='path')('继续导出'),
1741 | br(),
1742 | br(),
1743 | a(href='#',onClick='window.close();',class_='return')('关闭页面'),
1744 | )
1745 | )
1746 | htmlrender=comm.ensure_binary(t.render())
1747 | s.send_header('Content-Length', len(htmlrender))
1748 | s.end_headers()
1749 | s.wfile.write(htmlrender)
1750 | if mode=='exportcid':
1751 | cid=str(qs.get('cid',[0])[0])
1752 | pathdeep=xl.pathdeep(cid)
1753 | outlist=[]
1754 | xl.exportcid(outlist,cid,pathdeep)
1755 | exportsha=str.encode('\r\n'.join(outlist))
1756 | s.send_response(200)
1757 | s.send_header('Content-Length', len(exportsha))
1758 | s.send_header('Keep-Alive', 'timeout=5, max=100')
1759 | s.send_header('Connection', 'Keep-Alive')
1760 | s.send_header('Content-Type', 'text/plain; charset=UTF-8')
1761 | s.end_headers()
1762 | s.wfile.write(exportsha)
1763 |
1764 | elif request_path=='/cookie':
1765 | qs=parse.parse_qs(urlsp.query, keep_blank_values=True)
1766 | curformat=str(qs.get('cformat',[0])[0])
1767 | ac=str(qs.get('ac',[0])[0])
1768 | if ac=='save':
1769 | cookiestr=str(qs.get('cookiestr',[0])[0])
1770 | savecookiefile(cookiestr)
1771 | _cookiestr=loadcookiefile()
1772 | #xbmc.log(msg='zzzzzzz:'+curformat,level=xbmc.LOGERROR)
1773 | cookiestr=loadcookiefile(cformat=curformat)
1774 | #xbmc.log(msg='zzzzzzz:'+cookiestr,level=xbmc.LOGERROR)
1775 | def tdcformat(ctx):
1776 | yield label(for_='cformat')('cookie格式:')
1777 | def optionscformat(ctx):
1778 | for cformat in ['simple','json','LWP']:
1779 | url='/cookie?'+parse.urlencode({'cformat': cformat})
1780 | if curformat==cformat:
1781 | yield option(value=url,selected='selected',class_='pagesel')(cformat)
1782 | else:
1783 | yield option(value=url)(cformat)
1784 | yield select(id='cformat',onchange='if (this.value) window.location.href=this.value',class_='pagesel')(optionscformat)
1785 |
1786 | def savecookie(ctx):
1787 | yield form(action='/cookie',method='GET')(
1788 | input_( type='hidden', name='ac',value='save'),
1789 | textarea(rows='40', cols="60", name='cookiestr')(cookiestr),
1790 | input_(class_='bigfont', type='submit', name="submit",value='保存',onclick="return confirm('错误的值将造成登录失败,是否继续?')" ),
1791 | )
1792 | s.send_response(200)
1793 | t = html(
1794 | head(
1795 | meta(charset='utf-8'),
1796 | title('WEB115 COOKIE'),
1797 | link(rel='stylesheet',href='/css/styles.css')
1798 | ),
1799 | body(
1800 | tdcformat,
1801 | savecookie,
1802 | )
1803 | )
1804 | htmlrender=comm.ensure_binary(t.render())
1805 | s.send_header('Content-Length', len(htmlrender))
1806 | s.end_headers()
1807 | s.wfile.write(htmlrender)
1808 | elif request_path[0:4]=='/sub':
1809 | try:
1810 | (suburl,name)=request_path[5:].split('/')
1811 | suburl=parse.unquote_plus(suburl)
1812 | name=name[:name.index('.vtt')]
1813 | xl = api_115('0')
1814 | vttstr=xl.coversrttovtt(srturl=suburl)
1815 | s.send_response(200)
1816 | s.send_header('Content-Length', len(vttstr))
1817 | s.send_header('Keep-Alive', 'timeout=5, max=100')
1818 | s.send_header('Connection', 'Keep-Alive')
1819 | s.send_header('Content-Type', 'text/vtt; charset=UTF-8')
1820 | s.end_headers()
1821 | s.wfile.write(comm.ensure_binary(vttstr))
1822 | except Exception as errno:
1823 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1824 | else:
1825 | try:
1826 | if request_path=='/' or request_path=='':
1827 | request_path='/index.html'
1828 | filepath = xbmc.translatePath( os.path.join( __cwd__, 'www', request_path[1:]))
1829 |
1830 | f = open(filepath,'rb')
1831 |
1832 | except IOError:
1833 | s.send_error(404,'File Not Found: %s ' % request_path)
1834 | else:
1835 | s.send_response(200)
1836 | mimetype, _ = mimetypes.guess_type(filepath)
1837 | s.send_header('Content-type', mimetype)
1838 | #s.send_header('Content-Length', filesize)
1839 | s.end_headers()
1840 | shutil.copyfileobj(f,s.wfile)
1841 | except:
1842 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1843 | s.wfile.close()
1844 | return
1845 | try:
1846 | s.wfile.close()
1847 | except Exception as errno:
1848 | #xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1849 | pass
1850 |
1851 | def getfidUrl(s, fid, cookiestr):
1852 | xl = api_115(cookiestr)
1853 | filecopypc=''
1854 | cid=''
1855 | try:
1856 | fidUrl=''
1857 | if fid in s.fidDownloadurl:
1858 | (strtime,fidUrl)=s.fidDownloadurl[fid].split(' ')
1859 | timespan=int(time.time())-int(strtime)
1860 | #xbmc.log(msg='fidUrl timespan=%i'%(timespan),level=xbmc.LOGERROR)
1861 | if timespan>=7200:
1862 | fidUrl=''
1863 | if fidUrl=='':
1864 | fpc=xl.getpc(fid)
1865 | fidUrl=xl.getfiledownloadurl(fpc)
1866 | #xbmc.log(msg='fpc=%s;fidUrl=%s'%(fpc,fidUrl),level=xbmc.LOGERROR)
1867 | s.fidDownloadurl[fid]=str(int(time.time()))+' '+fidUrl
1868 | return fidUrl
1869 | except Exception as errno:
1870 | errorstr=' '.join(('error:', str(errno)))
1871 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1872 | return errorstr
1873 |
1874 |
1875 | def urlopenwithRetry(s,req):
1876 | for icount in range(10):
1877 | try:
1878 | opener2 = request.build_opener(SmartRedirectHandler)
1879 | response= opener2.open(req,timeout=40)
1880 | return response
1881 | break
1882 | except:
1883 | time.sleep(icount+1)
1884 | continue
1885 | def serveMP2T(s, url):
1886 | reqheaders={}
1887 | for key in s.headers:
1888 | #xbmc.log(msg='zzzdebug:XBMCLocalProxy: reqheaders %s:%s'%(key, s.headers[key]))
1889 | if key.lower()!='host' and key.lower()!='user-agent':
1890 | #opener.addheader(key,s.headers[key])
1891 | #request.add_header(key, s.headers[key])
1892 | reqheaders[key]=s.headers[key]
1893 |
1894 | reqheaders['User-Agent']='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36'
1895 | reqheaders['Referer']='https://v.anxia.com/'
1896 | reqheaders['Origin']='http://115.com/'
1897 |
1898 | #req = request.Request('https://cors-anywhere.herokuapp.com/'+url, headers=reqheaders)
1899 | wcode=200
1900 | wheaders={}
1901 | #wheaders={'Connection':'Keep-Alive','Keep-Alive':'timeout=20, max=100'}
1902 | try:
1903 | remote_request = request.Request(url, None, reqheaders)
1904 | with request.urlopen(remote_request) as response:
1905 | #s.protocal_version ='HTTP/1.1'
1906 | wcode=response.code
1907 | headers=response.info()
1908 | #headers.pop('Access-Control-Allow-Credentials', None)
1909 | #xbmc.log(msg=str(headers),level=xbmc.LOGERROR)
1910 | keys=['content-length','content-range','accept-ranges','date','connection','access-control-allow-methods','access-control-max-age','etag']
1911 | headerkeys = set(k.lower() for k in headers)
1912 | for key in headerkeys:
1913 | try:
1914 | if key=='content-length' and s.fileSize[fid]==-1:
1915 | #文件大小
1916 | s.fileSize[fid]= int(headers[key])
1917 | if key.lower() in keys:
1918 | #xbmc.log(msg='zzzdebug:'+key+':'+headers[key],level=xbmc.LOGERROR)
1919 | wheaders[key]=headers[key]
1920 | except Exception as errno:
1921 | xbmc.log(msg='zzzdebug:sendheaderERR:%s'%(errno),level=xbmc.LOGERROR)
1922 | pass
1923 | wheaders['Access-Control-Allow-Origin']='*'
1924 | s.send_response(wcode)
1925 | for key in wheaders:
1926 | s.send_header(key,wheaders[key])
1927 | s.end_headers()
1928 | fileout=s.wfile
1929 | shutil.copyfileobj(response,fileout)
1930 | except:
1931 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
1932 | s.send_response(404)
1933 | err=True
1934 | finally:
1935 | #xbmc.log('lockcount-1 HEAD over err=%s'%str(err),level=xbmc.LOGERROR)
1936 | response.close()
1937 | response=None
1938 | try:
1939 | s.wfile.close()
1940 | except:
1941 | pass
1942 | '''
1943 | Sends the requested file and add additional headers.
1944 | '''
1945 | def serveFile(s, fid, cookiestr, changeserver, sendData,name):
1946 | fidUrl = s.getfidUrl( fid, cookiestr)
1947 | #xbmc.log('fidUrl=%s'%(fidUrl),level=xbmc.LOGERROR)
1948 | if not fidUrl:
1949 | s.send_response(403)
1950 | return
1951 | filedownloadurl=downcookie=''
1952 | if fidUrl.find('|')>0:
1953 | filedownloadurl,downcookie=fidUrl.split('|')
1954 | else:
1955 | filedownloadurl=fidUrl
1956 | xbmc.log('filedownloadurl=%s downcookie=%s'%(filedownloadurl,downcookie),level=xbmc.LOGERROR)
1957 | # if str(xbmcaddon.Addon().getSetting('direct'))=='true':
1958 | # s.send_response(301)
1959 |
1960 | # s.send_header('Location', filedownloadurl)
1961 | # s.end_headers()
1962 |
1963 | # return
1964 | if not fid in s.fileSize:
1965 | s.fileSize[fid]=-1
1966 | if not fid in s.fidSemaphores:
1967 | s.fidSemaphores[fid]=Semaphore(s.accessThreadNum)
1968 |
1969 | rangeBegin=0
1970 | #处理转发请求headers---begin
1971 | reqheaders={}
1972 | for key in s.headers:
1973 | #xbmc.log(msg='zzzdebug:XBMCLocalProxy: reqheaders %s:%s'%(key, s.headers[key]))
1974 | if key.lower()!='host' and key.lower()!='user-agent':
1975 | #opener.addheader(key,s.headers[key])
1976 | #request.add_header(key, s.headers[key])
1977 | reqheaders[key]=s.headers[key]
1978 | if key.lower()=='range':
1979 | strRange=s.headers[key]
1980 | rangeBegin=int(strRange[strRange.index('=')+1:strRange.index('-')])
1981 |
1982 | reqheaders['User-Agent']=defaultUserAgent
1983 | reqheaders['Referer']='https://115.com/?cid=0&offset=0&mode=wangpan'
1984 | if cookiestr=='0': cookiestr=''
1985 | reqheaders['Cookie']=cookiestr+';'+downcookie+';'
1986 | #处理转发请求headers---end
1987 | #转发请求
1988 | req = request.Request(filedownloadurl, headers=reqheaders)
1989 | if sendData==0:
1990 | req.get_method = lambda : 'HEAD'
1991 | response=None
1992 |
1993 | #xbmc.log('lockcount+1 sendData=%d bytes=%d-'%(sendData,rangeBegin),level=xbmc.LOGERROR)
1994 | err=False
1995 |
1996 | wcode=200
1997 | wheaders={}
1998 | #wheaders={'Connection':'Keep-Alive','Keep-Alive':'timeout=20, max=100'}
1999 | try:
2000 | #线程加塞
2001 | s.fidSemaphores[fid].acquire()
2002 | response = s.urlopenwithRetry(req)
2003 | #s.protocal_version ='HTTP/1.1'
2004 | wcode=response.code
2005 | headers=response.info()
2006 | #xbmc.log(msg=str(headers),level=xbmc.LOGERROR)
2007 | keys=['content-length','content-range','accept-ranges','date']
2008 | headerkeys = set(k.lower() for k in headers)
2009 | for key in headerkeys:
2010 | try:
2011 | if key=='content-length' and s.fileSize[fid]==-1:
2012 | #文件大小
2013 | s.fileSize[fid]= int(headers[key])
2014 | if key.lower() in keys:
2015 | #xbmc.log(msg='zzzdebug:'+key+':'+headers[key],level=xbmc.LOGERROR)
2016 | wheaders[key]=headers[key]
2017 | except Exception as errno:
2018 | xbmc.log(msg='zzzdebug:sendheaderERR:%s'%(errno),level=xbmc.LOGERROR)
2019 | pass
2020 |
2021 | mimetype, _ =mimetypes.guess_type(name.lower())
2022 | if not mimetype:
2023 | mimetype='application/octet-stream'
2024 | #xbmc.log(msg='zzzdebug:mimetype:%s'%(mimetype),level=xbmc.LOGERROR)
2025 | wheaders['content-type']=mimetype
2026 |
2027 |
2028 | except:
2029 | xbmc.log('Open HEAD error',level=xbmc.LOGERROR)
2030 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
2031 | s.send_response(404)
2032 | err=True
2033 | finally:
2034 | #xbmc.log('lockcount-1 HEAD over err=%s'%str(err),level=xbmc.LOGERROR)
2035 | response.close()
2036 | s.fidSemaphores[fid].release()
2037 |
2038 | s.send_response(wcode)
2039 | for key in wheaders:
2040 | s.send_header(key,wheaders[key])
2041 | s.end_headers()
2042 |
2043 | if err or sendData==0:
2044 | return
2045 |
2046 | xbmc.log('rangeBegin=%d,s.fileSize[fid]=%d'%(rangeBegin,s.fileSize[fid]),level=xbmc.LOGERROR)
2047 |
2048 | while rangeBegin=s.fileSize[fid]:
2052 | rangeEnd=s.fileSize[fid]-1
2053 | reqheaders['Range']='bytes=%d-%d'%(rangeBegin,rangeEnd)
2054 | req = request.Request(filedownloadurl, headers=reqheaders)
2055 | #线程加塞
2056 | s.fidSemaphores[fid].acquire()
2057 | xbmc.log('lockcount+1 bytes=%d-%d'%(rangeBegin,rangeEnd),level=xbmc.LOGERROR)
2058 | err=False
2059 | try:
2060 | response = s.urlopenwithRetry(req)
2061 |
2062 | fileout=s.wfile
2063 | shutil.copyfileobj(response,fileout)
2064 | '''
2065 | readcount1=16384
2066 | readcount2=2048
2067 |
2068 | st=0
2069 |
2070 | buf="INIT"
2071 | while (buf!=None and len(buf)>0):
2072 | buf=response.read(readcount1)
2073 | st=0
2074 | #xbmc.log(msg='zzzdebug:XBMCLocalProxy: write..%s'%(len(buf)),level=xbmc.LOGERROR)
2075 | while (st<(len(buf)-readcount2)):
2076 | fileout.write(buf[st:st+readcount2])
2077 | st+=readcount2
2078 | fileout.write(buf[st:len(buf)])
2079 | '''
2080 | rangeBegin+=s.blockSize
2081 | except:
2082 | xbmc.log('lockcount-1 getandsendData error bytes=%d-%d'%(rangeBegin,rangeEnd),level=xbmc.LOGERROR)
2083 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
2084 | err=True
2085 |
2086 | finally:
2087 | response.close()
2088 | #time.sleep(1)
2089 | response=None
2090 | s.fidSemaphores[fid].release()
2091 | xbmc.log('lockcount-1 copyfileobj finally err=%s'%str(err),level=xbmc.LOGERROR)
2092 | if err:
2093 | break
2094 | #time.sleep(1)
2095 | #xbmc.log(msg='zzzdebug:XBMCLocalProxy:'+time.asctime()+' Closing connection')
2096 | try:
2097 | s.wfile.close()
2098 | except:
2099 | pass
2100 |
2101 |
2102 | class Server(HTTPServer):
2103 | if socket.has_dualstack_ipv6() and str(xbmcaddon.Addon().getSetting('ipv6'))=='true':
2104 | address_family = socket.AF_INET6
2105 |
2106 | '''HTTPServer class with timeout.'''
2107 | def get_request(self):
2108 | '''Get the request and client address from the socket.'''
2109 | self.socket.settimeout(20.0)
2110 | result = None
2111 | while result is None:
2112 | try:
2113 | result = self.socket.accept()
2114 | #self.socket.getpeername()
2115 | #self.socket = ssl.wrap_socket (self.socket,keyfile = xbmc.translatePath(os.path.join( __cwd__,'key.pem')),certfile=xbmc.translatePath(os.path.join( __cwd__,'server.pem')),server_side=True),
2116 | #xbmc.log(msg='ssl.wrap_socket',level=xbmc.LOGERROR)
2117 | except socket.timeout:
2118 | pass
2119 | except:
2120 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
2121 | result[0].settimeout(4000)
2122 | return result
2123 |
2124 | class ThreadedHTTPServer(ThreadingMixIn, Server):
2125 | '''Handle requests in a separate thread.'''
2126 |
2127 | HOST_NAME = ''
2128 | PORT_NUMBER = int(xbmcaddon.Addon().getSetting('listen_port'))
2129 |
2130 | def loadcookiefile(cformat='simple'):
2131 | cstr=''
2132 | cookiejar = cookielib.LWPCookieJar()
2133 | cid=seid=uid=kid=''
2134 | try:
2135 | cookiefile = xbmc.translatePath(os.path.join(xbmcaddon.Addon(id='plugin.video.115').getAddonInfo('path'), 'cookie.dat'))
2136 | if os.path.exists(cookiefile):
2137 | cookiejar.load(
2138 | cookiefile, ignore_discard=True, ignore_expires=True)
2139 | for cookie in cookiejar:
2140 | if cookie.name.upper()=='CID': cid=cookie.value
2141 | if cookie.name.upper()=='SEID': seid=cookie.value
2142 | if cookie.name.upper()=='UID': uid=cookie.value
2143 | if cookie.name.upper()=='KID': kid=cookie.value
2144 | if cformat.lower()=='json':
2145 | cookiejson=[{
2146 | "domain": "115.com",
2147 | "hostOnly": False,
2148 | "httpOnly": True,
2149 | "path": "/",
2150 | "sameSite": "lax",
2151 | "firstPartyDomain": "",
2152 | "partitionKey": None,
2153 | "secure": False,
2154 | "session": True,
2155 | "isProtected":True,
2156 | "name": "CID",
2157 | "value": cid,
2158 | "id": 1
2159 | },
2160 | {
2161 | "domain": "115.com",
2162 | "hostOnly": False,
2163 | "httpOnly": True,
2164 | "path": "/",
2165 | "sameSite": "lax",
2166 | "firstPartyDomain": "",
2167 | "partitionKey": None,
2168 | "secure": False,
2169 | "session": True,
2170 | "isProtected":True,
2171 | "name": "SEID",
2172 | "value": seid,
2173 | "id": 2
2174 | },
2175 | {
2176 | "domain": "115.com",
2177 | "hostOnly": False,
2178 | "httpOnly": True,
2179 | "path": "/",
2180 | "sameSite": "lax",
2181 | "firstPartyDomain": "",
2182 | "partitionKey": None,
2183 | "secure": False,
2184 | "session": True,
2185 | "isProtected":True,
2186 | "name": "UID",
2187 | "value": uid,
2188 | "id": 3
2189 | },
2190 | {
2191 | "domain": "115.com",
2192 | "hostOnly": False,
2193 | "httpOnly": True,
2194 | "path": "/",
2195 | "sameSite": "lax",
2196 | "firstPartyDomain": "",
2197 | "partitionKey": None,
2198 | "secure": False,
2199 | "session": True,
2200 | "isProtected":True,
2201 | "name": "KID",
2202 | "value": kid,
2203 | "id": 4
2204 | }]
2205 | cstr=json.dumps(cookiejson,indent=4)
2206 |
2207 | elif cformat.lower()=='lwp':
2208 | cstr='''#LWP-Cookies-2.0
2209 | Set-Cookie3: CID=%s; path="/"; domain="115.com"; path_spec; domain_dot; discard; HttpOnly=None; version=0
2210 | Set-Cookie3: SEID=%s; path="/"; domain="115.com"; path_spec; domain_dot; discard; HttpOnly=None; version=0
2211 | Set-Cookie3: UID=%s; path="/"; domain="115.com"; path_spec; domain_dot; discard; HttpOnly=None; version=0'''%(cid,seid,uid)
2212 | else:
2213 | cstr='CID=%s;SEID=%s;UID=%s;KID=%s'%(cid,seid,uid,kid)
2214 | return cstr
2215 | except:
2216 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
2217 |
2218 | def savecookiefile(cstr):
2219 | cid=seid=uid=kid=''
2220 | try:
2221 | cookies=json.loads(cstr)
2222 | for cookie in cookies:
2223 | if 'name' in cookie and 'value' in cookie:
2224 | if cookie['name'] == 'CID': cid = cookie['value']
2225 | if cookie['name'] == 'SEID': seid = cookie['value']
2226 | if cookie['name'] == 'UID': uid = cookie['value']
2227 | if cookie['name'] == 'KID': kid = cookie['value']
2228 | except:
2229 | cid=''
2230 | if cid=='':
2231 | match = re.search(r'CID\s*\x3D\s*(?P[A-Za-z0-9\x5F]+)', cstr, re.IGNORECASE | re.MULTILINE)
2232 | if match:
2233 | cid = match.group('value')
2234 | match = re.search(r'SEID\s*\x3D\s*(?P[A-Za-z0-9\x5F]+)', cstr, re.IGNORECASE | re.MULTILINE)
2235 | if match:
2236 | seid = match.group('value')
2237 | match = re.search(r'UID\s*\x3D\s*(?P[A-Za-z0-9\x5F]+)', cstr, re.IGNORECASE | re.MULTILINE)
2238 | if match:
2239 | uid = match.group('value')
2240 | match = re.search(r'KID\s*\x3D\s*(?P[A-Za-z0-9\x5F]+)', cstr, re.IGNORECASE | re.MULTILINE)
2241 | if match:
2242 | kid = match.group('value')
2243 | if cid=='': return False
2244 | cookiedat='''#LWP-Cookies-2.0
2245 | Set-Cookie3: CID=%s; path="/"; domain="115.com"; path_spec; domain_dot; discard; HttpOnly=None; version=0
2246 | Set-Cookie3: SEID=%s; path="/"; domain="115.com"; path_spec; domain_dot; discard; HttpOnly=None; version=0
2247 | Set-Cookie3: UID=%s; path="/"; domain="115.com"; path_spec; domain_dot; discard; HttpOnly=None; version=0
2248 | Set-Cookie3: KID=%s; path="/"; domain="115.com"; path_spec; domain_dot; discard; HttpOnly=None; version=0'''%(cid,seid,uid,kid)
2249 |
2250 | try:
2251 | cookiefilename = xbmc.translatePath(os.path.join(xbmcaddon.Addon(id='plugin.video.115').getAddonInfo('path'), 'cookie.dat'))
2252 | with open(cookiefilename, "wb") as cookieFile:
2253 | cookieFile.write(comm.ensure_binary(cookiedat))
2254 | cookieFile.close()
2255 | return True
2256 | except:
2257 | xbmc.log(msg=format_exc(),level=xbmc.LOGERROR)
2258 | return False
2259 |
2260 | if __name__ == '__main__':
2261 | #fid_pclist=plugin.get_storage('fid_pclist')
2262 | _cookiestr=loadcookiefile()
2263 | fid_pclist={}
2264 | fid_downloadurls={}
2265 | socket.setdefaulttimeout(40)
2266 | server_class = ThreadedHTTPServer
2267 | #MyHandler.protocol_version='HTTP/1.1'
2268 | MyHandler.protocol_version='HTTP/1.0'
2269 | httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
2270 | xbmc.log(msg='XBMCLocalProxy Starts - %s:%s' % (HOST_NAME, PORT_NUMBER),level=xbmc.LOGERROR)
2271 | monitor = xbmc.Monitor()
2272 | while not monitor.abortRequested():
2273 | httpd.handle_request()
2274 | httpd.server_close()
2275 | #fid_pclist.sync()
2276 | xbmc.log(msg='XBMCLocalProxy Stop - %s:%s' % (HOST_NAME, PORT_NUMBER),level=xbmc.LOGERROR)
--------------------------------------------------------------------------------