├── .gitignore ├── LICENSE ├── README.md ├── ascii_verification_code.py ├── lixian.py ├── lixian_alias.py ├── lixian_batch.py ├── lixian_cli.py ├── lixian_cli_parser.py ├── lixian_colors.py ├── lixian_colors_console.py ├── lixian_colors_linux.py ├── lixian_colors_win32.py ├── lixian_commands ├── __init__.py ├── add.py ├── config.py ├── delete.py ├── download.py ├── help.py ├── info.py ├── list.py ├── login.py ├── logout.py ├── pause.py ├── readd.py ├── rename.py ├── restart.py └── util.py ├── lixian_config.py ├── lixian_download_asyn.py ├── lixian_download_tools.py ├── lixian_encoding.py ├── lixian_filter_expr.py ├── lixian_hash.py ├── lixian_hash_bt.py ├── lixian_hash_ed2k.py ├── lixian_help.py ├── lixian_logging.py ├── lixian_nodes.py ├── lixian_plugins ├── __init__.py ├── api │ └── __init__.py ├── commands │ ├── __init__.py │ ├── aria2.py │ ├── decode_url.py │ ├── diagnostics.py │ ├── echo.py │ ├── export_download_urls.py │ ├── extend_links.py │ ├── get_torrent.py │ ├── hash.py │ ├── kuai.py │ ├── list_torrent.py │ └── speed_test.py ├── filters │ ├── __init__.py │ ├── date.py │ ├── name.py │ ├── raw.py │ ├── regexp.py │ ├── size.py │ ├── sort.py │ └── total_size.py ├── parsers │ ├── __init__.py │ ├── icili.py │ ├── kuai.py │ ├── qjwm.py │ ├── simplecd.py │ └── verycd.py └── queries │ ├── __init__.py │ └── torrentz.py ├── lixian_progress.py ├── lixian_queries.py ├── lixian_query.py ├── lixian_url.py ├── lixian_util.py ├── lixian_verification_code.py └── tests ├── 123.txt ├── 123456.txt ├── The-quick-brown-fox-jumps-over-the-lazy-dog.txt ├── a.txt ├── abc.txt └── empty.txt /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | ============================================== 2 | This is a copy of the MIT license. 3 | ============================================== 4 | Copyright (C) 2012 Boyu Guo 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy of 7 | this software and associated documentation files (the "Software"), to deal in 8 | the Software without restriction, including without limitation the rights to 9 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 10 | of the Software, and to permit persons to whom the Software is furnished to do 11 | so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | xunlei-lixian 2 | ============= 3 | 迅雷离线下载脚本。 4 | 5 | ### 更新 6 | `2016.11.20` 抱歉,这个项目我基本上已经不维护了。我本人甚至很久没有续费迅雷会员了……最后一个更新尝试着修复了登录问题。现在登录密码只能使用明文,如果配置过`lx config password`的需要重新设置下。另外建议删除`.xunlei.lixian.cookies`文件。 7 | 8 | ### 声明 9 | 迅雷离线下载为会员功能。非会员无法使用。 10 | 11 | Quick start 12 | ----------- 13 | 14 | python lixian_cli.py login "Your Xunlei account" "Your password" 15 | python lixian_cli.py login "Your password" 16 | python lixian_cli.py login 17 | 18 | python lixian_cli.py config username "Your Xunlei account" 19 | python lixian_cli.py config password "Your password" 20 | 21 | python lixian_cli.py list 22 | python lixian_cli.py list --completed 23 | python lixian_cli.py list --completed --name --original-url --download-url --no-status --no-id 24 | python lixian_cli.py list --deleted 25 | python lixian_cli.py list --expired 26 | python lixian_cli.py list id1 id2 27 | python lixian_cli.py list zip rar 28 | python lixian_cli.py list 2012.04.04 2012.04.05 29 | 30 | python lixian_cli.py download task-id 31 | python lixian_cli.py download ed2k-url 32 | python lixian_cli.py download --tool=wget ed2k-url 33 | python lixian_cli.py download --tool=asyn ed2k-url 34 | python lixian_cli.py download ed2k-url --output "file to save" 35 | python lixian_cli.py download id1 id2 id3 36 | python lixian_cli.py download url1 url2 url3 37 | python lixian_cli.py download --input download-urls-file 38 | python lixian_cli.py download --input download-urls-file --delete 39 | python lixian_cli.py download --input download-urls-file --output-dir root-dir-to-save-files 40 | python lixian_cli.py download bt://torrent-info-hash 41 | python lixian_cli.py download 1.torrent 42 | python lixian_cli.py download torrent-info-hash 43 | python lixian_cli.py download --bt http://xxx/xxx.torrent 44 | python lixian_cli.py download bt-task-id/file-id 45 | python lixian_cli.py download --all 46 | python lixian_cli.py download mkv 47 | python lixian_cli.py download 2012.04.04 48 | python lixian_cli.py download 0 1 2 49 | python lixian_cli.py download 0-2 50 | 51 | python lixian_cli.py add url 52 | python lixian_cli.py add 1.torrent 53 | python lixian_cli.py add torrent-info-hash 54 | python lixian_cli.py add --bt http://xxx/xxx.torrent 55 | 56 | python lixian_cli.py delete task-id 57 | python lixian_cli.py delete url 58 | python lixian_cli.py delete file-name-on-cloud-to-delete 59 | 60 | python lixian_cli.py pause id 61 | 62 | python lixian_cli.py restart id 63 | 64 | python lixian_cli.py rename id name 65 | 66 | python lixian_cli.py logout 67 | 68 | 安装指南 69 | -------- 70 | 71 | 1. 安装git(非github用户应该只需要执行第一步Download and Install Git) 72 | 73 | http://help.github.com/set-up-git-redirect 74 | 75 | 2. 下载代码(Windows用户请在git-bash里执行) 76 | 77 | git clone git://github.com/iambus/xunlei-lixian.git 78 | 79 | 3. 安装Python 2.x(请下载最新的2.7版本。不支持Python 3.x。) 80 | 81 | http://www.python.org/getit/ 82 | 83 | 4. 在命令行里运行 84 | 85 | python lixian_cli.py 86 | 87 | 注:不方便安装git的用户可以选择跳过前两步,在github网页上下载最新的源代码包(选择"Download as zip"或者"Download as tar.gz"): 88 | 89 | https://github.com/iambus/xunlei-lixian/downloads 90 | 91 | 92 | 一些提示 93 | -------- 94 | 95 | 1. 你可以为python lixian_cli.py创建一个别名(比如lx),以减少敲键次数。 96 | 97 | Linux上可以使用: 98 | 99 | ln -s 你的lixian_cli.py路径 ~/bin/lx 100 | 101 | Windows上可以创建一个lx.bat脚本,放在你的PATH中: 102 | 103 | @echo off 104 | python 完整的lixian_cli.py路径 %* 105 | 106 | 注:下文中提到的lx都是指python lixian_cli.py的别名。 107 | 108 | 2. 你可以使用lx config保存一些配置。见“命令详解”一节。 109 | 110 | lx config delete 111 | lx config tool asyn 112 | lx config username your-id 113 | lx config password your-password 114 | 115 | 注:密码保存的时候会加密(hash) 116 | 117 | 3. 部分命令有短名字。lx d相当于lx download,lx a相当于lx add,lx l相当于lx list,lx x相当于lx list。也可以通过plugin api自己添加alias。 118 | 119 | 4. 使用lx download下载的文件会自动验证hash。其中ed2k和bt会做完整的hash校验。http下载只做部分校验。 120 | 121 | 注:包含多个文件的bt种子,如果没有完整下载所有文件,对于已下载的文件,可能有少量片段无法验证。如果很重视文件的正确性请选择下载bt种子中的所有文件。(目前还没有发现由于软件问题而导致hash验证失败的情况。) 122 | 123 | 5. 如果觉得大文件的hash速度太慢,可以关掉: 124 | 125 | lx download --no-hash ... 126 | 127 | 也可以使用lx config默认关掉它: 128 | 129 | lx config no-hash 130 | 131 | 6. lx hash命令可以用于手动计算hash。见“其他工具”一节。 132 | 133 | 134 | 命令详解 135 | -------- 136 | 137 | 注:下文中提到的lx都是指python lixian_cli.py的别名。 138 | 139 | 常用命令: 140 | 141 | * lx login 142 | * lx download 143 | * lx list 144 | * lx add 145 | * lx delete 146 | * lx pause 147 | * lx restart 148 | * lx rename 149 | * lx readd 150 | * lx config 151 | * lx info 152 | * lx help 153 | 154 | ### lx login 155 | 登录,获得一个有效session,默认保存路径是~/.xunlei.lixian.cookies。一般来说,除非服务器故障或者执行了lx logout(或者你手动删除了cookies文件),否则session的有效期是一天左右。session过期之后需要手动重新执行login。但如果使用lx config password把密码保存到配置文件里,则会自动重新登录。后文会介绍[lx config](#lx-config)。 156 | 157 | lx login接受两个参数,用户名和密码。第二次登录可以只填密码。 158 | 159 | lx login username password 160 | lx login password 161 | 162 | 如果不希望明文显示密码,也可以直接运行 163 | 164 | lx login 165 | 166 | 或者使用-代替密码 167 | 168 | lx login username - 169 | 170 | 上面的命令会进入交互式不回显的密码输入。 171 | 172 | 可以用--cookies指定保存的session文件路径。-表示不保存(在login这个例子里,没什么实际意义)。 173 | 174 | lx login username password --cookies some-path 175 | lx login username password --cookies - 176 | 177 | 注意,除了lx login外,大多数lx命令,比如lx download,都需要先执行登录。这些命令大多支持--username和--password,以及--cookies参数,根据传递进来的参数,检查用户是否已经登录,如果尚未登录则尝试登录。一般来说不建议在其他命令里使用这些参数(因为麻烦),除非你不希望保存session信息到硬盘。 178 | 179 | ### lx download 180 | 下载。目前支持普通的http下载,ed2k下载,和bt下载。可以使用thunder/flashget/qq旋风的连接(bt任务除外)。在信息足够的情况下(见“一些提示”一节的第3条),下载的文件会自动验证hash,出错了会重新下载(我个人目前还没遇到过下载文件损坏的情况)。见“一些提示”一节的第3条。 181 | 182 | lx download id 183 | lx download http://somewhere 184 | lx download ed2k://somefile 185 | lx download bt://info-hash 186 | lx download link1 link2 link3 ... 187 | lx download --all 188 | lx download keywords 189 | lx download date 190 | 191 | 对于bt任务,可以指定本地.torrent文件路径,或者torrent文件的info hash。(很多网站使用info hash来标识一个bt种子文件,这种情况你就不需要下载种子了,lx download可以自动下载种子,不过前提是之前已经有人使用迅雷离线下载过同样的种子。[如后所述](#其他工具),你也可以使用lx hash --info-hash来手动生成bt种子的info hash。) 192 | 193 | lx download Community.S03E01.720p.HDTV.X264-DIMENSION.torrent 194 | lx download 61AAA3C6FBB8B71EBE2F5A2A3481296B51D882F6 195 | lx download bt://61AAA3C6FBB8B71EBE2F5A2A3481296B51D882F6 196 | 197 | 如果url本身指向了要添加任务的种子文件,需要加上--bt参数告诉lx脚本这是一个种子。 198 | 199 | lx download --bt http://tvu.org.ru/torrent.php?tid=64757 200 | 201 | 可以把多个连接保存到文件里,使用--input参数批量下载: 202 | 203 | lx download --input links.txt 204 | 205 | 注意:在断点续传的情况下,如果文件已经存在,并且文件大小相等,并且使用了--continue,重新下载并不只是简单的忽略这个文件,而是先做hash校验,如果校验通过才忽略。如果文件比较多或者比较大,可能比较耗时。建议手动从--input文件里删除已经下载过的链接。也可以使用--mini-hash参数,如下。 206 | 207 | 如果指定了--mini-hash参数,对于已经下载过的文件,并且文件大小正确(一般意味着这个文件的正确性已经在前一次下载中验证过了),会做一个最简单的校验。对于尚未下载完成的任务,在完成之后还是会做完整的hash。 208 | 209 | 如果指定了--no-hash参数,永远不会做完整的hash。但还是会做文件大小检验和取样hash(很快)。 210 | 211 | 可以使用--delete参数在下载完成之后删除任务。 212 | 213 | lx download link --delete 214 | 215 | 如果一个文件已经存在,使用参数--continue支持断点续传,使用--overwrite覆盖已存在的文件,重新下载。 216 | 217 | 你可能需要用--tool参数来指定下载工具。默认的下载工具是wget,有些环境的wget是最低功能版本,不支持指定cookie或者断点续传。这种情况可以使用--tool=asyn。这在“支持的下载工具”一节有说明。 218 | 219 | lx download --tool=wget link 220 | lx download --tool=asyn link 221 | 222 | --output和--output-dir分别用来指定保存文件的路径和目录。 223 | 224 | 如果要下载的文件尚未在离线任务里,会被自动添加。 225 | 226 | 你也可以使用指定要下载的任务id(lx list命令可以用来查看任务id): 227 | 228 | lx download task-id 229 | 230 | 但是要注意,多任务下载的时候,不能混用id和url(以后可能会支持)。 231 | 232 | 类似任务id,也可以指定任务的序列号。序列号从0开始。可以使用lx list -n查看序列号。如果希望lx list默认显示序列号,可以使用lx config n。若要下载任务列表中的第一个任务: 233 | 234 | lx download 0 235 | 236 | 要下载前三个任务: 237 | 238 | lx download 0-2 239 | 240 | 对于bt任务,如果只想下载部分文件,可以在task id后指定文件id: 241 | 242 | lx download bt-task-id/file-id bt-task-id/file-id2 243 | 244 | 或者: 245 | 246 | lx download bt-task-id/[1,3,5-7] 247 | 248 | 注:上面的命令下载对应bt任务里文件id为1,3,5,6,7的五个文件。 249 | 250 | 也可以指定bt子文件的扩展名: 251 | 252 | lx download bt-task-id/.mkv 253 | 254 | 或者: 255 | 256 | lx download bt-task-id/[.mkv,.mp4] 257 | 258 | 更多的用法:TODO 259 | 260 | 可以使用--all参数下载所有的任务(如果已经在参数中指定了要下载的链接或者任务id,--all参数会被忽略): 261 | 262 | lx download --all 263 | 264 | 也可以使用一个简单的关键字匹配要下载的文件名: 265 | 266 | lx download mkv 267 | 268 | 也可以搜索多个关键字(满足其中一个就算匹配): 269 | 270 | lx download mkv mp4 271 | 272 | 任务的添加日期也可以作为关键字: 273 | 274 | lx download 2012.04.04 275 | lx download 2012.04.04 2012.04.05 276 | 277 | ### lx list 278 | 列出已存在的离线任务。默认只会列出任务id,任务名,以及状态。可以使用--original-url和--download-url参数来列出原始链接和下载链接。--completed参数用于忽略未完成任务。 279 | 280 | lx list 281 | lx list --completed 282 | lx list --no-status --original-url --download-url 283 | 284 | 如果要列出bt任务的子文件,可以在任务id后面加上/: 285 | 286 | lx list id/ 287 | 288 | 可以使用--deleted或者--expired参数来列出已删除和已过期的任务。 289 | 290 | 详细参数可以参考lx help list。 291 | 292 | ### lx add 293 | 添加任务到迅雷离线服务器上。 294 | 295 | lx add url1 url2 url3 296 | lx add --input links.txt 297 | lx add --bt torrent-file 298 | lx add --bt torrent-url 299 | lx add --bt info-hash 300 | 301 | 提示:lx download会自动添加任务,而无需执行lx add。 302 | 303 | ### lx delete 304 | 从迅雷离线服务器上删除任务。 305 | 306 | lx delete id1 id2 307 | lx delete ed2k://... 308 | lx delete mkv 309 | lx delete --all mkv 310 | lx delete --all mkv mp4 311 | 312 | ### lx pause 313 | 暂停任务。 314 | 315 | lx pause id1 id2 316 | lx pause --all mkv 317 | 318 | ### lx restart 319 | 重新开始任务。 320 | 321 | lx restart id1 id2 322 | lx restart --all mkv 323 | 324 | ### lx rename 325 | 重命名任务 326 | 327 | lx rename task-id task-name 328 | 329 | ### lx logout 330 | 不想保留session可以使用lx logout退出。一般用不着。 331 | 332 | lx logout 333 | lx logout --cookies your-cookies-file 334 | 335 | ### lx readd 336 | 重新添加已过期或者已删除的任务。 337 | 338 | lx readd --deleted task-id 339 | lx readd --expired task-name 340 | 341 | 提示:可以用lx list --deleted或者lx list --expired列出已删除和过期的任务。 342 | 343 | ### lx config 344 | 保存配置。配置文件的保存路径是~/.xunlei.lixian.config。虽然你可以差不多可以保存任何参数,但是目前只有以下几个参数会真正起作用: 345 | 346 | * username 347 | * password 348 | * tool 349 | * continue 350 | * delete 351 | * output-dir 352 | * hash 353 | * mini-hash 354 | * id 355 | * n 356 | * size 357 | * format-size 358 | * colors 359 | * wget-opts(见稍后的说明) 360 | * aria2-opts(见稍后的说明)(见支持的下载工具一节) 361 | * axel-opts(见稍后的说明) 362 | * watch-interval 363 | * log-level 364 | * log-path 365 | 366 | (因为只有这几个参数我觉得是比较有用的。如果你觉得其他的参数有用可以发信给我或者直接open一个issue。) 367 | 368 | 不加参数会打印当前保存的所有配置: 369 | 370 | lx config 371 | 372 | 可以使用--print打印指定的配置: 373 | 374 | lx config --print password 375 | 376 | 添加一个新的参数: 377 | 378 | lx config username your-username 379 | lx config password your-password 380 | lx config delete 381 | lx config no-delete 382 | 383 | 删除一个参数: 384 | 385 | lx config --delete password 386 | 387 | 注:密码是hash过的,不是明文保存。 388 | 注:如果不希望在命令行参数中明文保存密码,可以运行lx config password,或者lx config password -,会进入交互式不回显密码输入(只支持password配置)。 389 | 390 | 关于wget-opts/aria2-opts/axel-opts,因为这些工具的命令行参数一般都包含-,所以需要用额外的--转义。另外多个命令行参数需要用引号合并到一起: 391 | 392 | lx config -- aria2-opts "-s10 -x10 -c" 393 | 394 | ### lx info 395 | 打印cookies文件里保存的迅雷内部id,包括登录的ID,一个内部使用的ID,以及gdriveid。 396 | 397 | 关于gdriveid:理论上gdriveid是下载迅雷离线链接需要的唯一cookie,你可以用lx list --download-url获取下载地址,然后用lx info获取gdriveid,然后手动使用其他工具下载,比如wget "--header=Cookie: gdriveid=your-gdriveid" download-url。 398 | 399 | -i参数可以只打印登录ID: 400 | 401 | lx info -i 402 | 403 | 如果想把登录id复制到剪切板: 404 | 405 | lx info -i | clip 406 | 407 | ### lx help 408 | 打印帮助信息。 409 | 410 | lx help 411 | lx help examples 412 | lx help readme 413 | lx help download 414 | 415 | 支持的下载工具 416 | -------------- 417 | 418 | * wget:默认下载工具。注意有些Linux发行版(比如某些运行在路由设备上的mini系统)自带的wget可能无法满足功能要求。可以尝试使用其他工具。 419 | * asyn:内置的下载工具。在命令行中加上--tool=asyn可以启用。注意此工具的下载表现一般,在高速下载或者设备性能不太好的情况(比如运行在低端路由上),CPU使用可能稍高。在我的RT-N16上,以250K/s的速度下载,CPU使用大概在10%~20%。 420 | * urllib2:内置下载工具。不支持断点续传错误重连,不建议使用。 421 | * curl:尚未测试。 422 | * aria2:测试通过。注意某些环境里的aria2c需要加上额外的参数才能运行。可以使用lx config进行配置:lx config -- aria2-opts --event-poll=select 423 | * axel: 测试通过。注意官方版本的axel有一个URL重定向长度超过255被截断的bug,需要手动修改源代码编译。见issue #44. 424 | * 其他工具,比如ProZilla,暂时都不支持。有需要请可以我,或者直接提交一个issue。 425 | 426 | 427 | 其他工具 428 | -------- 429 | 430 | * lx hash可以用于手动计算hash。 431 | 432 | lx hash --ed2k filename 433 | lx hash --info-hash torrent-file 434 | lx hash --verify-sha1 filename sha1 435 | lx hash --verify-bt filename torrent-file 436 | 437 | * lixian_batch.py是我自己用的一个简单的“多任务”下载脚本。其实就是多个--input文件,每个文件里定义的链接下载到文件所在的目录里。 438 | 439 | python lixian_batch.py folder1/links.txt folder2/links.txt ... 440 | 441 | 既知问题 442 | -------- 443 | 444 | 1. --tool=asyn的性能不是很好。见“支持的下载工具”一节里的说明。 445 | 2. 有些时候任务添加到服务器上,但是马上刷新拿不到这个数据。这应该是服务器同步的问题。技术上可以自动重刷一遍,但是暂时没有做。用户可以自己重试下。 446 | 3. bt下载的校验如果失败,可能需要重新下载所有文件。从技术上来讲这是没有必要的。但是一来重下出错的片段有些繁琐,二来我自己都从来没遇到过bt校验失败需要重下的情况,所以暂时不考虑支持片段修复。更新:bt校验失败不会重下。 447 | 4. 有时候因为帐号异常,登录需要验证码。目前还不支持验证码。 448 | 449 | 以后 450 | ---- 451 | 452 | 其实一开始是考虑做一个可以在路由器上运行的网页版离线下载管理器的。但是这个工作量比命令行版的大很多(不是一个数量级的),在资源消耗和出错概率上也大很多,而且可能还要有更多的依赖库,安装起来也不方便。当然主要还是精力和需求的原因。现在的这个命令行本对我来说已经够用了,也挺简单,短期就不考虑增加网页版了。 453 | 454 | 相关项目 455 | -------- 456 | 457 | * [layerssss/lixian-portal](http://micy.in/lixian-portal/): 给iambus/xunlei-lixian做的一个简洁实用的webui 458 | 459 | 特别感谢 460 | -------- 461 | 462 | [群晖公司](http://www.synology.com/)在部分产品中绑定了迅雷离线脚本,并且捐赠了作者一台[DS213+](http://www.synology.com/products/product.php?product_name=DS213%2B)作为反馈。再此表示感谢! 463 | 464 | 许可协议 465 | -------- 466 | 467 | xunlei-lixian使用MIT许可协议。 468 | 469 | 此文档未完成。 470 | -------------- 471 | 472 | -------------------------------------------------------------------------------- /ascii_verification_code.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from PIL import Image 4 | from StringIO import StringIO 5 | 6 | __author__ = 'deadblue' 7 | 8 | def convert_to_ascii(img_data): 9 | return _martix_to_ascii( 10 | _crop_and_border( 11 | _image_to_martix(img_data) 12 | ) 13 | ) 14 | 15 | def _image_to_martix(img_data): 16 | img = Image.open(StringIO(img_data)).convert('L') 17 | w,h = img.size 18 | martix = [] 19 | for y in xrange(h / 2): 20 | row = [] 21 | for x in xrange(w): 22 | p1 = img.getpixel((x, y * 2)) 23 | p2 = img.getpixel((x, y * 2 + 1)) 24 | if p1 > 192 and p2 > 192: 25 | row.append(0) 26 | elif p1 > 192: 27 | row.append(1) 28 | elif p2 > 192: 29 | row.append(2) 30 | else: 31 | row.append(3) 32 | martix.append(row) 33 | return martix 34 | 35 | def _crop_and_border(martix): 36 | # 测量四周空白大小 37 | t,b,l,r = 0,0,0,0 38 | for y in xrange(len(martix)): 39 | if sum(martix[y]) == 0: 40 | t += 1 41 | else: break 42 | for y in xrange(len(martix)): 43 | if sum(martix[-1 - y]) == 0: 44 | b += 1 45 | else: break 46 | for x in xrange(len(martix[0])): 47 | if sum( map(lambda row:row[x], martix) ) == 0: 48 | l += 1 49 | else: break 50 | for x in xrange(len(martix[0])): 51 | if sum( map(lambda row:row[-1 - x], martix) ) == 0: 52 | r += 1 53 | else: break 54 | # 上下裁剪与补边 55 | w = len(martix[0]) 56 | if t > 0: 57 | martix = martix[t-1:] 58 | else: 59 | martix.insert(0, [0] * w) 60 | if b > 1: 61 | martix = martix[:1-b] 62 | elif b == 0: 63 | martix.append([0] * w) 64 | # 左右裁剪与补边 65 | for ri in xrange(len(martix)): 66 | row = martix[ri] 67 | if l > 0: 68 | row = row[l-1:] 69 | else: 70 | row.insert(0, 0) 71 | if r > 1: 72 | row = row[:1-r] 73 | elif r == 0: 74 | row.append(0) 75 | martix[ri] = row 76 | return martix 77 | 78 | def _martix_to_ascii(martix): 79 | buf = [] 80 | for row in martix: 81 | rbuf = [] 82 | for cell in row: 83 | if cell == 0: 84 | rbuf.append('#') 85 | elif cell == 1: 86 | rbuf.append('"') 87 | elif cell == 2: 88 | rbuf.append(',') 89 | elif cell == 3: 90 | rbuf.append(' ') 91 | buf.append(''.join(rbuf)) 92 | return '\n'.join(buf) -------------------------------------------------------------------------------- /lixian_alias.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | __all__ = ['register_alias', 'to_alias'] 4 | 5 | aliases = {'d': 'download', 'l': 'list', 'a': 'add', 'x': 'delete'} 6 | 7 | def register_alias(alias, command): 8 | aliases[alias] = command 9 | 10 | def get_aliases(): 11 | return aliases 12 | 13 | def get_alias(a): 14 | aliases = get_aliases() 15 | if a in aliases: 16 | return aliases[a] 17 | 18 | def to_alias(a): 19 | return get_alias(a) or a 20 | 21 | -------------------------------------------------------------------------------- /lixian_batch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | import os.path 5 | import lixian_cli 6 | 7 | def download_batch(files): 8 | for f in map(os.path.abspath, files): 9 | print 'Downloading', f, '...' 10 | os.chdir(os.path.dirname(f)) 11 | lixian_cli.execute_command(['download', '--input', f, '--delete', '--continue']) 12 | 13 | if __name__ == '__main__': 14 | download_batch(sys.argv[1:]) 15 | 16 | -------------------------------------------------------------------------------- /lixian_cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from lixian_commands.util import * 4 | import lixian_help 5 | import sys 6 | 7 | from lixian_commands.login import login 8 | from lixian_commands.logout import logout 9 | from lixian_commands.download import download_task 10 | from lixian_commands.list import list_task 11 | from lixian_commands.add import add_task 12 | from lixian_commands.delete import delete_task 13 | from lixian_commands.pause import pause_task 14 | from lixian_commands.restart import restart_task 15 | from lixian_commands.rename import rename_task 16 | from lixian_commands.readd import readd_task 17 | from lixian_commands.info import lixian_info 18 | from lixian_commands.config import lx_config 19 | from lixian_commands.help import lx_help 20 | 21 | 22 | def execute_command(args=sys.argv[1:]): 23 | import lixian_plugins # load plugins at import 24 | if not args: 25 | usage() 26 | sys.exit(1) 27 | command = args[0] 28 | if command.startswith('-'): 29 | if command in ('-h', '--help'): 30 | usage(lixian_help.welcome_help) 31 | elif command in ('-v', '--version'): 32 | print '0.0.x' 33 | else: 34 | usage() 35 | sys.exit(1) 36 | sys.exit(0) 37 | import lixian_alias 38 | command = lixian_alias.to_alias(command) 39 | commands = {'login': login, 40 | 'logout': logout, 41 | 'download': download_task, 42 | 'list': list_task, 43 | 'add': add_task, 44 | 'delete': delete_task, 45 | 'pause': pause_task, 46 | 'restart': restart_task, 47 | 'rename': rename_task, 48 | 'readd': readd_task, 49 | 'info': lixian_info, 50 | 'config': lx_config, 51 | 'help': lx_help} 52 | import lixian_plugins.commands 53 | commands.update(lixian_plugins.commands.commands) 54 | if command not in commands: 55 | usage() 56 | sys.exit(1) 57 | if '-h' in args or '--help' in args: 58 | lx_help([command]) 59 | else: 60 | commands[command](args[1:]) 61 | 62 | if __name__ == '__main__': 63 | execute_command() 64 | 65 | 66 | -------------------------------------------------------------------------------- /lixian_cli_parser.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['expand_command_line', 'parse_command_line', 'Parser', 'command_line_parse', 'command_line_option', 'command_line_value', 'command_line_parser', 'with_parser'] 3 | 4 | def expand_windows_command_line(args): 5 | from glob import glob 6 | expanded = [] 7 | for x in args: 8 | try: 9 | xx = glob(x) 10 | except: 11 | xx = None 12 | if xx: 13 | expanded += xx 14 | else: 15 | expanded.append(x) 16 | return expanded 17 | 18 | def expand_command_line(args): 19 | import platform 20 | return expand_windows_command_line(args) if platform.system() == 'Windows' else args 21 | 22 | def parse_command_line(args, keys=[], bools=[], alias={}, default={}, help=None): 23 | args = expand_command_line(args) 24 | options = {} 25 | for k in keys: 26 | options[k] = None 27 | for k in bools: 28 | options[k] = None 29 | left = [] 30 | args = args[:] 31 | while args: 32 | x = args.pop(0) 33 | if x == '--': 34 | left.extend(args) 35 | break 36 | if x.startswith('-') and len(x) > 1: 37 | k = x.lstrip('-') 38 | if k in bools: 39 | options[k] = True 40 | elif k.startswith('no-') and k[3:] in bools: 41 | options[k[3:]] = False 42 | elif k in keys: 43 | options[k] = args.pop(0) 44 | elif '=' in k and k[:k.index('=')] in keys: 45 | options[k[:k.index('=')]] = k[k.index('=')+1:] 46 | elif k in alias: 47 | k = alias[k] 48 | if k in bools: 49 | options[k] = True 50 | else: 51 | options[k] = args.pop(0) 52 | elif '=' in k and k[:k.index('=')] in alias: 53 | k, v = k[:k.index('=')], k[k.index('=')+1:] 54 | k = alias[k] 55 | if k not in keys: 56 | raise RuntimeError('Invalid boolean option '+x) 57 | options[k] = v 58 | else: 59 | if help: 60 | print 'Unknown option ' + x 61 | print 62 | print help 63 | exit(1) 64 | else: 65 | raise RuntimeError('Unknown option '+x) 66 | else: 67 | left.append(x) 68 | 69 | for k in default: 70 | if options[k] is None: 71 | options[k] = default[k] 72 | 73 | class Args(object): 74 | def __init__(self, args, left): 75 | self.__dict__['_args'] = args 76 | self.__dict__['_left'] = left 77 | def __getattr__(self, k): 78 | v = self._args.get(k, None) 79 | if v: 80 | return v 81 | if '_' in k: 82 | return self._args.get(k.replace('_', '-'), None) 83 | def __setattr__(self, k, v): 84 | self._args[k] = v 85 | def __getitem__(self, i): 86 | if type(i) == int: 87 | return self._left[i] 88 | else: 89 | return self._args[i] 90 | def __setitem__(self, i, v): 91 | if type(i) == int: 92 | self._left[i] = v 93 | else: 94 | self._args[i] = v 95 | def __len__(self): 96 | return len(self._left) 97 | def __str__(self): 98 | return '' % (self._args, self._left) 99 | return Args(options, left) 100 | 101 | class Stack: 102 | def __init__(self, **args): 103 | self.__dict__.update(args) 104 | 105 | class Parser: 106 | def __init__(self): 107 | self.stack = [] 108 | def with_parser(self, parser): 109 | self.stack.append(parser) 110 | return self 111 | def __call__(self, args, keys=[], bools=[], alias={}, default={}, help=None): 112 | stack = Stack(keys=list(keys), bools=list(bools), alias=dict(alias), default=dict(default)) 113 | keys = [] 114 | bools = [] 115 | alias = {} 116 | default = {} 117 | for stack in [x.args_stack for x in self.stack] + [stack]: 118 | keys += stack.keys 119 | bools += stack.bools 120 | alias.update(stack.alias) 121 | default.update(stack.default) 122 | args = parse_command_line(args, keys=keys, bools=bools, alias=alias, default=default, help=help) 123 | for fn in self.stack: 124 | new_args = fn(args) 125 | if new_args: 126 | args = new_args 127 | return args 128 | 129 | def command_line_parse(keys=[], bools=[], alias={}, default={}): 130 | def wrapper(fn): 131 | if hasattr(fn, 'args_stack'): 132 | stack = fn.args_stack 133 | stack.keys += keys 134 | stack.bools += bools 135 | stack.alias.update(alias) 136 | stack.default.update(default) 137 | else: 138 | fn.args_stack = Stack(keys=list(keys), bools=list(bools), alias=dict(alias), default=dict(default)) 139 | return fn 140 | return wrapper 141 | 142 | def command_line_option(name, alias=None, default=None): 143 | alias = {alias:name} if alias else {} 144 | default = {name:default} if default is not None else {} 145 | return command_line_parse(bools=[name], alias=alias, default=default) 146 | 147 | def command_line_value(name, alias=None, default=None): 148 | alias = {alias:name} if alias else {} 149 | default = {name:default} if default else {} 150 | return command_line_parse(keys=[name], alias=alias, default=default) 151 | 152 | def command_line_parser(*args, **kwargs): 153 | def wrapper(f): 154 | parser = Parser() 155 | for x in reversed(getattr(f, 'args_parsers', [])): 156 | parser = parser.with_parser(x) 157 | if hasattr(f, 'args_stack'): 158 | def parse_no_body(args): 159 | pass 160 | parse_no_body.args_stack = f.args_stack 161 | parser = parser.with_parser(parse_no_body) 162 | import functools 163 | @functools.wraps(f) 164 | def parse(args_list): 165 | return f(parser(args_list, *args, **kwargs)) 166 | return parse 167 | return wrapper 168 | 169 | def with_parser(parser): 170 | def wrapper(f): 171 | if hasattr(f, 'args_parsers'): 172 | f.args_parsers.append(parser) 173 | else: 174 | f.args_parsers = [parser] 175 | return f 176 | return wrapper 177 | 178 | 179 | -------------------------------------------------------------------------------- /lixian_colors.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import sys 4 | 5 | def get_console_type(use_colors=True): 6 | if use_colors and sys.stdout.isatty() and sys.stderr.isatty(): 7 | import platform 8 | if platform.system() == 'Windows': 9 | import lixian_colors_win32 10 | return lixian_colors_win32.WinConsole 11 | else: 12 | import lixian_colors_linux 13 | return lixian_colors_linux.AnsiConsole 14 | else: 15 | import lixian_colors_console 16 | return lixian_colors_console.Console 17 | 18 | console_type = get_console_type() 19 | raw_console_type = get_console_type(False) 20 | 21 | def Console(use_colors=True): 22 | return get_console_type(use_colors)() 23 | 24 | def get_softspace(output): 25 | if hasattr(output, 'softspace'): 26 | return output.softspace 27 | import lixian_colors_console 28 | if isinstance(output, lixian_colors_console.Console): 29 | return get_softspace(output.output) 30 | return 0 31 | 32 | class ScopedColors(console_type): 33 | def __init__(self, *args): 34 | console_type.__init__(self, *args) 35 | def __call__(self): 36 | console = self 37 | class Scoped: 38 | def __enter__(self): 39 | self.stdout = sys.stdout 40 | softspace = get_softspace(sys.stdout) 41 | sys.stdout = console 42 | sys.stdout.softspace = softspace 43 | def __exit__(self, type, value, traceback): 44 | softspace = get_softspace(sys.stdout) 45 | sys.stdout = self.stdout 46 | sys.stdout.softspace = softspace 47 | return Scoped() 48 | 49 | class RawScopedColors(raw_console_type): 50 | def __init__(self, *args): 51 | raw_console_type.__init__(self, *args) 52 | def __call__(self): 53 | class Scoped: 54 | def __enter__(self): 55 | pass 56 | def __exit__(self, type, value, traceback): 57 | pass 58 | return Scoped() 59 | 60 | class RootColors: 61 | def __init__(self, use_colors=True): 62 | self.use_colors = use_colors 63 | def __getattr__(self, name): 64 | return getattr(ScopedColors() if self.use_colors else RawScopedColors(), name) 65 | def __call__(self, use_colors): 66 | assert use_colors in (True, False, None), use_colors 67 | return RootColors(use_colors) 68 | 69 | colors = RootColors() 70 | 71 | -------------------------------------------------------------------------------- /lixian_colors_console.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['Console'] 3 | 4 | import sys 5 | 6 | styles = [ 7 | 'black', 8 | 'blue', 9 | 'green', 10 | 'red', 11 | 'cyan', 12 | 'yellow', 13 | 'purple', 14 | 'white', 15 | 16 | 'bold', 17 | 'italic', 18 | 'underline', 19 | 'inverse', 20 | ] 21 | 22 | 23 | class Console: 24 | def __init__(self, output=None, styles=[]): 25 | output = output or sys.stdout 26 | if isinstance(output, Console): 27 | self.output = output.output 28 | self.styles = output.styles + styles 29 | else: 30 | self.output = output 31 | self.styles = styles 32 | assert not isinstance(self.output, Console) 33 | def __getattr__(self, name): 34 | if name in styles: 35 | return self.ansi(name) 36 | else: 37 | raise AttributeError(name) 38 | def ansi(self, code): 39 | return self.__class__(self.output, self.styles + [code]) if code not in (None, '') else self 40 | def __call__(self, s): 41 | self.write(s) 42 | def write(self, s): 43 | self.output.write(s) 44 | def flush(self, *args): 45 | self.output.flush(*args) 46 | 47 | -------------------------------------------------------------------------------- /lixian_colors_linux.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['AnsiConsole'] 3 | 4 | from lixian_colors_console import Console 5 | 6 | import sys 7 | 8 | colors = { 9 | 'bold' : [1, 22], 10 | 'italic' : [3, 23], 11 | 'underline' : [4, 24], 12 | 'inverse' : [7, 27], 13 | 'white' : [37, 39], 14 | 'grey' : [90, 39], 15 | 'black' : [30, 39], 16 | 'blue' : [34, 39], 17 | 'cyan' : [36, 39], 18 | 'green' : [32, 39], 19 | 'purple' : [35, 39], 20 | 'magenta' : [35, 39], 21 | 'red' : [31, 39], 22 | 'yellow' : [33, 39] 23 | } 24 | 25 | class Render: 26 | def __init__(self, output, code): 27 | self.output = output 28 | self.left, self.right = code 29 | def __enter__(self): 30 | self.output.write(self.left) 31 | self.output.flush() 32 | def __exit__(self, type, value, traceback): 33 | self.output.write(self.right) 34 | self.output.flush() 35 | 36 | def mix_styles(styles): 37 | left = [] 38 | right = [] 39 | for style in styles: 40 | if style in colors: 41 | color = colors[style] 42 | left.append(color[0]) 43 | right.append(color[1]) 44 | right.reverse() 45 | return [''.join('\033[%dm' % n for n in left), ''.join('\033[%dm' % n for n in right)] 46 | 47 | class AnsiConsole(Console): 48 | def __init__(self, output=None, styles=[]): 49 | Console.__init__(self, output, styles) 50 | 51 | def write(self, s): 52 | if self.styles: 53 | with self.render(mix_styles(self.styles)): 54 | self.output.write(s) 55 | self.output.flush() 56 | else: 57 | self.output.write(s) 58 | self.output.flush() 59 | 60 | def render(self, code): 61 | return Render(self.output, code) 62 | 63 | -------------------------------------------------------------------------------- /lixian_colors_win32.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['WinConsole'] 3 | 4 | from lixian_colors_console import Console 5 | 6 | import ctypes 7 | from ctypes import windll, byref, Structure 8 | from ctypes.wintypes import SHORT, WORD 9 | 10 | import sys 11 | 12 | INVALID_HANDLE_VALUE = -1 13 | STD_OUTPUT_HANDLE = -11 14 | STD_ERROR_HANDLE = -12 15 | 16 | class COORD(Structure): 17 | _fields_ = (('X', SHORT), 18 | ('Y', SHORT),) 19 | 20 | class SMALL_RECT(Structure): 21 | _fields_ = (('Left', SHORT), 22 | ('Top', SHORT), 23 | ('Right', SHORT), 24 | ('Bottom', SHORT),) 25 | 26 | class CONSOLE_SCREEN_BUFFER_INFO(Structure): 27 | _fields_ = (('dwSize', COORD), 28 | ('dwCursorPosition', COORD), 29 | ('wAttributes', WORD), 30 | ('srWindow', SMALL_RECT), 31 | ('dwMaximumWindowSize', COORD),) 32 | 33 | 34 | def GetWinError(): 35 | code = ctypes.GetLastError() 36 | message = ctypes.FormatError(code) 37 | return '[Error %s] %s' % (code, message) 38 | 39 | def GetStdHandle(handle): 40 | h = windll.kernel32.GetStdHandle(handle) 41 | if h == INVALID_HANDLE_VALUE: 42 | raise OSError(GetWinError()) 43 | return h 44 | 45 | def GetConsoleScreenBufferInfo(handle): 46 | info = CONSOLE_SCREEN_BUFFER_INFO() 47 | if not windll.kernel32.GetConsoleScreenBufferInfo(handle, byref(info)): 48 | raise OSError(GetWinError()) 49 | return info 50 | 51 | def SetConsoleTextAttribute(handle, attributes): 52 | if not windll.Kernel32.SetConsoleTextAttribute(handle, attributes): 53 | raise OSError(GetWinError()) 54 | 55 | 56 | FOREGROUND_BLUE = 0x0001 57 | FOREGROUND_GREEN = 0x0002 58 | FOREGROUND_RED = 0x0004 59 | FOREGROUND_INTENSITY = 0x0008 60 | BACKGROUND_BLUE = 0x0010 61 | BACKGROUND_GREEN = 0x0020 62 | BACKGROUND_RED = 0x0040 63 | BACKGROUND_INTENSITY = 0x0080 64 | COMMON_LVB_LEADING_BYTE = 0x0100 65 | COMMON_LVB_TRAILING_BYTE = 0x0200 66 | COMMON_LVB_GRID_HORIZONTAL = 0x0400 67 | COMMON_LVB_GRID_LVERTICAL = 0x0800 68 | COMMON_LVB_GRID_RVERTICAL = 0x1000 69 | COMMON_LVB_REVERSE_VIDEO = 0x4000 70 | COMMON_LVB_UNDERSCORE = 0x8000 71 | 72 | colors = { 73 | 'black' : 0b000, 74 | 'blue' : 0b001, 75 | 'green' : 0b010, 76 | 'red' : 0b100, 77 | 'cyan' : 0b011, 78 | 'yellow' : 0b110, 79 | 'purple' : 0b101, 80 | 'magenta': 0b101, 81 | 'white' : 0b111, 82 | } 83 | 84 | def mix_styles(styles, attributes): 85 | fg_color = -1 86 | bg_color = -1 87 | fg_bright = -1 88 | bg_bright = -1 89 | reverse = -1 90 | underscore = -1 91 | for style in styles: 92 | if style == 0: 93 | # reset mode 94 | raise NotImplementedError() 95 | elif style == 1: 96 | # foreground bright on 97 | fg_bright = 1 98 | elif style == 2: 99 | # both bright off 100 | fg_bright = 0 101 | bg_bright = 0 102 | elif style == 4 or style == 'underline': 103 | # Underscore 104 | underscore = 1 105 | elif style == 5: 106 | # background bright on 107 | bg_bright = 1 108 | elif style == 7 or style == 'inverse': 109 | # Reverse foreground and background attributes. 110 | reverse = 1 111 | elif style == 21 or style == 22: 112 | # foreground bright off 113 | fg_bright = 0 114 | elif style == 24: 115 | # Underscore: no 116 | underscore = 0 117 | elif style == 25: 118 | # background bright off 119 | bg_bright = 0 120 | elif style == 27: 121 | # Reverse: no 122 | reverse = 0 123 | elif 30 <= style <= 37: 124 | # set foreground color 125 | fg_color = style - 30 126 | elif style == 39: 127 | # default text color 128 | fg_color = 7 129 | fg_bright = 0 130 | elif 40 <= style <= 47: 131 | # set background color 132 | bg_color = style - 40 133 | elif style == 49: 134 | # default background color 135 | bg_color = 0 136 | elif 90 <= style <= 97: 137 | # set bold foreground color 138 | fg_bright = 1 139 | fg_color = style - 90 140 | elif 100 <= style <= 107: 141 | # set bold background color 142 | bg_bright = 1 143 | bg_color = style - 100 144 | elif style == 'bold': 145 | fg_bright = 1 146 | elif style in colors: 147 | fg_color = colors[style] 148 | 149 | if fg_color != -1: 150 | attributes &= ~ 0b111 151 | attributes |= fg_color 152 | if fg_bright != -1: 153 | attributes &= ~ 0b1000 154 | attributes |= fg_bright << 3 155 | if bg_color != -1: 156 | attributes &= ~ 0b1110000 157 | attributes |= bg_color << 4 158 | if bg_bright != -1: 159 | attributes &= ~ 0b10000000 160 | attributes |= bg_bright << 7 161 | if reverse != -1: 162 | attributes &= ~ COMMON_LVB_REVERSE_VIDEO 163 | attributes |= reverse << 14 164 | # XXX: COMMON_LVB_REVERSE_VIDEO doesn't work... 165 | if reverse: 166 | attributes = (attributes & ~(0b11111111 | COMMON_LVB_REVERSE_VIDEO)) | ((attributes & 0b11110000) >> 4) | ((attributes & 0b1111) << 4) 167 | if underscore != -1: 168 | attributes &= ~ COMMON_LVB_UNDERSCORE 169 | attributes |= underscore << 15 170 | 171 | return attributes 172 | 173 | class Render: 174 | def __init__(self, handle, default, attributes): 175 | self.handle = handle 176 | self.default = default 177 | self.attributes = attributes 178 | def __enter__(self): 179 | SetConsoleTextAttribute(self.handle, self.attributes) 180 | def __exit__(self, type, value, traceback): 181 | SetConsoleTextAttribute(self.handle, self.default) 182 | 183 | class WinConsole(Console): 184 | def __init__(self, output=None, styles=[], handle=STD_OUTPUT_HANDLE): 185 | Console.__init__(self, output, styles) 186 | self.handle = GetStdHandle(handle) 187 | self.default = GetConsoleScreenBufferInfo(self.handle).wAttributes 188 | 189 | def write(self, s): 190 | if self.styles: 191 | with self.render(mix_styles(self.styles, self.default)): 192 | self.output.write(s) 193 | self.output.flush() 194 | else: 195 | self.output.write(s) 196 | self.output.flush() 197 | 198 | def render(self, attributes): 199 | return Render(self.handle, self.default, attributes) 200 | 201 | 202 | -------------------------------------------------------------------------------- /lixian_commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iambus/xunlei-lixian/c9ef30dd5ea8da71bbbd2faf9e14b0629a6de4e5/lixian_commands/__init__.py -------------------------------------------------------------------------------- /lixian_commands/add.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | from lixian_cli_parser import * 4 | from lixian_config import get_config 5 | import lixian_help 6 | import lixian_query 7 | 8 | @command_line_parser(help=lixian_help.add) 9 | @with_parser(parse_login) 10 | @with_parser(parse_colors) 11 | @with_parser(parse_logging) 12 | @with_parser(parse_size) 13 | @command_line_value('limit', default=get_config('limit')) 14 | @command_line_value('page-size', default=get_config('page-size')) 15 | @command_line_value('input', alias='i') 16 | @command_line_option('torrent', alias='bt') 17 | def add_task(args): 18 | assert len(args) or args.input 19 | client = create_client(args) 20 | tasks = lixian_query.find_tasks_to_download(client, args) 21 | print 'All tasks added. Checking status...' 22 | columns = ['id', 'status', 'name'] 23 | if get_config('n'): 24 | columns.insert(0, 'n') 25 | if args.size: 26 | columns.append('size') 27 | output_tasks(tasks, columns, args) 28 | -------------------------------------------------------------------------------- /lixian_commands/config.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from lixian_commands.util import * 4 | from lixian_cli_parser import * 5 | from lixian_config import * 6 | import lixian_help 7 | from getpass import getpass 8 | 9 | @command_line_parser(help=lixian_help.config) 10 | @command_line_option('print') 11 | @command_line_option('delete') 12 | def lx_config(args): 13 | if args.delete: 14 | assert len(args) == 1 15 | delete_config(args[0]) 16 | elif args['print'] or not len(args): 17 | if len(args): 18 | assert len(args) == 1 19 | print get_config(args[0]) 20 | else: 21 | print 'Loading', global_config.path, '...\n' 22 | print source_config() 23 | print global_config 24 | else: 25 | assert len(args) in (1, 2) 26 | if args[0] == 'password': 27 | if len(args) == 1 or args[1] == '-': 28 | password = getpass('Password: ') 29 | else: 30 | password = args[1] 31 | print 'Saving password to', global_config.path 32 | put_config('password', password) 33 | else: 34 | print 'Saving configuration to', global_config.path 35 | put_config(*args) 36 | -------------------------------------------------------------------------------- /lixian_commands/delete.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | from lixian_cli_parser import * 4 | from lixian_config import get_config 5 | from lixian_encoding import default_encoding 6 | from lixian_colors import colors 7 | import lixian_help 8 | import lixian_query 9 | 10 | @command_line_parser(help=lixian_help.delete) 11 | @with_parser(parse_login) 12 | @with_parser(parse_colors) 13 | @with_parser(parse_logging) 14 | @command_line_option('i') 15 | @command_line_option('all') 16 | @command_line_option('failed') 17 | @command_line_value('limit', default=get_config('limit')) 18 | @command_line_value('page-size', default=get_config('page-size')) 19 | def delete_task(args): 20 | client = create_client(args) 21 | to_delete = lixian_query.search_tasks(client, args) 22 | if not to_delete: 23 | print 'Nothing to delete' 24 | return 25 | with colors(args.colors).red.bold(): 26 | print "Below files are going to be deleted:" 27 | for x in to_delete: 28 | print x['name'].encode(default_encoding) 29 | if args.i: 30 | yes_or_no = raw_input('Are your sure to delete them from Xunlei cloud? (y/n) ') 31 | while yes_or_no.lower() not in ('y', 'yes', 'n', 'no'): 32 | yes_or_no = raw_input('yes or no? ') 33 | if yes_or_no.lower() in ('y', 'yes'): 34 | pass 35 | elif yes_or_no.lower() in ('n', 'no'): 36 | print 'Deletion abort per user request.' 37 | return 38 | client.delete_tasks(to_delete) 39 | -------------------------------------------------------------------------------- /lixian_commands/download.py: -------------------------------------------------------------------------------- 1 | 2 | import lixian_download_tools 3 | import lixian_nodes 4 | from lixian_commands.util import * 5 | from lixian_cli_parser import * 6 | from lixian_config import * 7 | from lixian_encoding import default_encoding 8 | from lixian_colors import colors 9 | import lixian_help 10 | import lixian_query 11 | import lixian_hash 12 | import lixian_hash_bt 13 | import lixian_hash_ed2k 14 | 15 | import os 16 | import os.path 17 | import re 18 | 19 | def ensure_dir_exists(dirname): 20 | if dirname and not os.path.exists(dirname): 21 | try: 22 | os.makedirs(dirname) 23 | except os.error: 24 | if not os.path.exists(dirname): 25 | raise 26 | 27 | def escape_filename(name): 28 | amp = re.compile(r'&(amp;)+', flags=re.I) 29 | name = re.sub(amp, '&', name) 30 | name = re.sub(r'[\\/:*?"<>|]', '-', name) 31 | return name 32 | 33 | def safe_encode_native_path(path): 34 | return path.encode(default_encoding).decode(default_encoding).replace('?', '-').encode(default_encoding) 35 | 36 | def verify_basic_hash(path, task): 37 | if os.path.getsize(path) != task['size']: 38 | print 'hash error: incorrect file size (%s != %s)' % (os.path.getsize(path), task['size']) 39 | return False 40 | return lixian_hash.verify_dcid(path, task['dcid']) 41 | 42 | def verify_hash(path, task): 43 | if verify_basic_hash(path, task): 44 | if task['type'] == 'ed2k': 45 | return lixian_hash_ed2k.verify_ed2k_link(path, task['original_url']) 46 | else: 47 | return True 48 | 49 | def verify_mini_hash(path, task): 50 | return os.path.exists(path) and os.path.getsize(path) == task['size'] and lixian_hash.verify_dcid(path, task['dcid']) 51 | 52 | def verify_mini_bt_hash(dirname, files): 53 | for f in files: 54 | name = f['name'].encode(default_encoding) 55 | path = os.path.join(dirname, *name.split('\\')) 56 | if not verify_mini_hash(path, f): 57 | return False 58 | return True 59 | 60 | def download_file(client, path, task, options): 61 | download_tool = lixian_download_tools.get_tool(options['tool']) 62 | 63 | resuming = options.get('resuming') 64 | overwrite = options.get('overwrite') 65 | mini_hash = options.get('mini_hash') 66 | no_hash = options.get('no_hash') 67 | 68 | url = str(task['xunlei_url']) 69 | if options['node']: 70 | if options['node'] == 'best' or options['node'] == 'fastest': 71 | from lixian_util import parse_size 72 | if task['size'] >= parse_size(options['node_detection_threshold']): 73 | url = lixian_nodes.use_fastest_node(url, options['vod_nodes'], client.get_gdriveid()) 74 | elif options['node'] == 'fast': 75 | from lixian_util import parse_size 76 | if task['size'] >= parse_size(options['node_detection_threshold']): 77 | url = lixian_nodes.use_fast_node(url, options['vod_nodes'], parse_size(options['node_detection_acceptable']), client.get_gdriveid()) 78 | else: 79 | url = lixian_nodes.switch_node(url, options['node'], client.get_gdriveid()) 80 | 81 | def download1(download, path): 82 | if not os.path.exists(path): 83 | download() 84 | elif not resuming: 85 | if overwrite: 86 | download() 87 | else: 88 | raise Exception('%s already exists. Please try --continue or --overwrite' % path) 89 | else: 90 | if download.finished(): 91 | pass 92 | else: 93 | download() 94 | 95 | def download1_checked(client, url, path, size): 96 | download = download_tool(client=client, url=url, path=path, size=size, resuming=resuming) 97 | checked = 0 98 | while checked < 10: 99 | download1(download, path) 100 | if download.finished(): 101 | break 102 | else: 103 | checked += 1 104 | assert os.path.getsize(path) == size, 'incorrect downloaded file size (%s != %s)' % (os.path.getsize(path), size) 105 | 106 | def download2(client, url, path, task): 107 | size = task['size'] 108 | if mini_hash and resuming and verify_mini_hash(path, task): 109 | return 110 | download1_checked(client, url, path, size) 111 | verify = verify_basic_hash if no_hash else verify_hash 112 | if not verify(path, task): 113 | with colors(options.get('colors')).yellow(): 114 | print 'hash error, redownloading...' 115 | os.rename(path, path + '.error') 116 | download1_checked(client, url, path, size) 117 | if not verify(path, task): 118 | raise Exception('hash check failed') 119 | 120 | download2(client, url, path, task) 121 | 122 | 123 | def download_single_task(client, task, options): 124 | output = options.get('output') 125 | output = output and os.path.expanduser(output) 126 | output_dir = options.get('output_dir') 127 | output_dir = output_dir and os.path.expanduser(output_dir) 128 | delete = options.get('delete') 129 | resuming = options.get('resuming') 130 | overwrite = options.get('overwrite') 131 | mini_hash = options.get('mini_hash') 132 | no_hash = options.get('no_hash') 133 | no_bt_dir = options.get('no_bt_dir') 134 | save_torrent_file = options.get('save_torrent_file') 135 | 136 | assert client.get_gdriveid() 137 | if task['status_text'] != 'completed': 138 | if 'files' not in task: 139 | with colors(options.get('colors')).yellow(): 140 | print 'skip task %s as the status is %s' % (task['name'].encode(default_encoding), task['status_text']) 141 | return 142 | 143 | if output: 144 | output_path = output 145 | output_dir = os.path.dirname(output) 146 | output_name = os.path.basename(output) 147 | else: 148 | output_name = safe_encode_native_path(escape_filename(task['name'])) 149 | output_dir = output_dir or '.' 150 | output_path = os.path.join(output_dir, output_name) 151 | 152 | if task['type'] == 'bt': 153 | files, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) 154 | if single_file: 155 | dirname = output_dir 156 | else: 157 | if no_bt_dir: 158 | output_path = os.path.dirname(output_path) 159 | dirname = output_path 160 | assert dirname # dirname must be non-empty, otherwise dirname + os.path.sep + ... might be dangerous 161 | ensure_dir_exists(dirname) 162 | for t in skipped: 163 | with colors(options.get('colors')).yellow(): 164 | print 'skip task %s/%s (%s) as the status is %s' % (str(t['id']), t['index'], t['name'].encode(default_encoding), t['status_text']) 165 | if mini_hash and resuming and verify_mini_bt_hash(dirname, files): 166 | print task['name'].encode(default_encoding), 'is already done' 167 | if delete and 'files' not in task: 168 | client.delete_task(task) 169 | return 170 | if not single_file: 171 | with colors(options.get('colors')).green(): 172 | print output_name + '/' 173 | for f in files: 174 | name = f['name'] 175 | if f['status_text'] != 'completed': 176 | print 'Skipped %s file %s ...' % (f['status_text'], name.encode(default_encoding)) 177 | continue 178 | if not single_file: 179 | print name.encode(default_encoding), '...' 180 | else: 181 | with colors(options.get('colors')).green(): 182 | print name.encode(default_encoding), '...' 183 | # XXX: if file name is escaped, hashing bt won't get correct file 184 | splitted_path = map(escape_filename, name.split('\\')) 185 | name = safe_encode_native_path(os.path.join(*splitted_path)) 186 | path = dirname + os.path.sep + name # fix issue #82 187 | if splitted_path[:-1]: 188 | subdir = safe_encode_native_path(os.path.join(*splitted_path[:-1])) 189 | subdir = dirname + os.path.sep + subdir # fix issue #82 190 | ensure_dir_exists(subdir) 191 | download_file(client, path, f, options) 192 | # if save_torrent_file: 193 | # info_hash = str(task['bt_hash']) 194 | # if single_file: 195 | # torrent = os.path.join(dirname, escape_filename(task['name']).encode(default_encoding) + '.torrent') 196 | # else: 197 | # torrent = os.path.join(dirname, info_hash + '.torrent') 198 | # if os.path.exists(torrent): 199 | # pass 200 | # else: 201 | # content = client.get_torrent_file_by_info_hash(info_hash) 202 | # with open(torrent, 'wb') as ouput_stream: 203 | # ouput_stream.write(content) 204 | # if not no_hash: 205 | # torrent_file = client.get_torrent_file(task) 206 | # print 'Hashing bt ...' 207 | # from lixian_progress import SimpleProgressBar 208 | # bar = SimpleProgressBar() 209 | # file_set = [f['name'].encode('utf-8').split('\\') for f in files] if 'files' in task else None 210 | # verified = lixian_hash_bt.verify_bt(output_path, lixian_hash_bt.bdecode(torrent_file)['info'], file_set=file_set, progress_callback=bar.update) 211 | # bar.done() 212 | # if not verified: 213 | # # note that we don't delete bt download folder if hash failed 214 | # raise Exception('bt hash check failed') 215 | else: 216 | ensure_dir_exists(output_dir) 217 | 218 | with colors(options.get('colors')).green(): 219 | print output_name, '...' 220 | download_file(client, output_path, task, options) 221 | 222 | if delete and 'files' not in task: 223 | client.delete_task(task) 224 | 225 | def download_multiple_tasks(client, tasks, options): 226 | for task in tasks: 227 | download_single_task(client, task, options) 228 | skipped = filter(lambda t: t['status_text'] != 'completed', tasks) 229 | if skipped: 230 | with colors(options.get('colors')).yellow(): 231 | print "Below tasks were skipped as they were not ready:" 232 | for task in skipped: 233 | print task['id'], task['status_text'], task['name'].encode(default_encoding) 234 | 235 | @command_line_parser(help=lixian_help.download) 236 | @with_parser(parse_login) 237 | @with_parser(parse_colors) 238 | @with_parser(parse_logging) 239 | @command_line_value('tool', default=get_config('tool', 'wget')) 240 | @command_line_value('input', alias='i') 241 | @command_line_value('output', alias='o') 242 | @command_line_value('output-dir', default=get_config('output-dir')) 243 | @command_line_option('torrent', alias='bt') 244 | @command_line_option('all') 245 | @command_line_value('category') 246 | @command_line_value('limit', default=get_config('limit')) 247 | @command_line_value('page-size', default=get_config('page-size')) 248 | @command_line_option('delete', default=get_config('delete')) 249 | @command_line_option('continue', alias='c', default=get_config('continue')) 250 | @command_line_option('overwrite') 251 | @command_line_option('mini-hash', default=get_config('mini-hash')) 252 | @command_line_option('hash', default=get_config('hash', True)) 253 | @command_line_option('bt-dir', default=True) 254 | @command_line_option('save-torrent-file') 255 | @command_line_option('watch') 256 | @command_line_option('watch-present') 257 | @command_line_value('watch-interval', default=get_config('watch-interval', '3m')) 258 | @command_line_value('node', default=get_config('node')) 259 | @command_line_value('node-detection-threshold', default=get_config('node-detection-threshold', '100M')) 260 | @command_line_value('node-detection-acceptable', default=get_config('node-detection-acceptable', '1M')) 261 | @command_line_value('vod-nodes', default=get_config('vod-nodes', lixian_nodes.VOD_RANGE)) 262 | def download_task(args): 263 | assert len(args) or args.input or args.all or args.category, 'Not enough arguments' 264 | lixian_download_tools.get_tool(args.tool) # check tool 265 | download_args = {'tool': args.tool, 266 | 'output': args.output, 267 | 'output_dir': args.output_dir, 268 | 'delete': args.delete, 269 | 'resuming': args._args['continue'], 270 | 'overwrite': args.overwrite, 271 | 'mini_hash': args.mini_hash, 272 | 'no_hash': not args.hash, 273 | 'no_bt_dir': not args.bt_dir, 274 | 'save_torrent_file': args.save_torrent_file, 275 | 'node': args.node, 276 | 'node_detection_threshold': args.node_detection_threshold, 277 | 'node_detection_acceptable': args.node_detection_acceptable, 278 | 'vod_nodes': args.vod_nodes, 279 | 'colors': args.colors} 280 | client = create_client(args) 281 | query = lixian_query.build_query(client, args) 282 | query.query_once() 283 | 284 | def sleep(n): 285 | assert isinstance(n, (int, basestring)), repr(n) 286 | import time 287 | if isinstance(n, basestring): 288 | n, u = re.match(r'^(\d+)([smh])?$', n.lower()).groups() 289 | n = int(n) * {None: 1, 's': 1, 'm': 60, 'h': 3600}[u] 290 | time.sleep(n) 291 | 292 | if args.watch_present: 293 | assert not args.output, 'not supported with watch option yet' 294 | tasks = query.pull_completed() 295 | while True: 296 | if tasks: 297 | download_multiple_tasks(client, tasks, download_args) 298 | if not query.download_jobs: 299 | break 300 | if not tasks: 301 | sleep(args.watch_interval) 302 | query.refresh_status() 303 | tasks = query.pull_completed() 304 | 305 | elif args.watch: 306 | assert not args.output, 'not supported with watch option yet' 307 | tasks = query.pull_completed() 308 | while True: 309 | if tasks: 310 | download_multiple_tasks(client, tasks, download_args) 311 | if (not query.download_jobs) and (not query.queries): 312 | break 313 | if not tasks: 314 | sleep(args.watch_interval) 315 | query.refresh_status() 316 | query.query_search() 317 | tasks = query.pull_completed() 318 | 319 | else: 320 | tasks = query.peek_download_jobs() 321 | if args.output: 322 | assert len(tasks) == 1 323 | download_single_task(client, tasks[0], download_args) 324 | else: 325 | download_multiple_tasks(client, tasks, download_args) 326 | -------------------------------------------------------------------------------- /lixian_commands/help.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | import lixian_help 4 | 5 | def lx_help(args): 6 | if len(args) == 1: 7 | helper = getattr(lixian_help, args[0].lower(), lixian_help.help) 8 | usage(helper) 9 | elif len(args) == 0: 10 | usage(lixian_help.welcome_help) 11 | else: 12 | usage(lixian_help.help) 13 | -------------------------------------------------------------------------------- /lixian_commands/info.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from lixian import XunleiClient 4 | from lixian_commands.util import * 5 | from lixian_cli_parser import * 6 | import lixian_help 7 | 8 | @command_line_parser(help=lixian_help.info) 9 | @with_parser(parse_login) 10 | @command_line_option('id', alias='i') 11 | def lixian_info(args): 12 | client = XunleiClient(args.username, args.password, args.cookies, login=False) 13 | if args.id: 14 | print client.get_username() 15 | else: 16 | print 'id:', client.get_username() 17 | print 'internalid:', client.get_userid() 18 | print 'gdriveid:', client.get_gdriveid() or '' 19 | 20 | -------------------------------------------------------------------------------- /lixian_commands/list.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | from lixian_cli_parser import * 4 | from lixian_config import get_config 5 | import lixian_help 6 | import lixian_query 7 | import re 8 | 9 | @command_line_parser(help=lixian_help.list) 10 | @with_parser(parse_login) 11 | @with_parser(parse_colors) 12 | @with_parser(parse_logging) 13 | @with_parser(parse_size) 14 | @command_line_option('all', default=True) 15 | @command_line_option('completed') 16 | @command_line_option('failed') 17 | @command_line_option('deleted') 18 | @command_line_option('expired') 19 | @command_line_value('category') 20 | @command_line_value('limit', default=get_config('limit')) 21 | @command_line_value('page-size', default=get_config('page-size')) 22 | @command_line_option('id', default=get_config('id', True)) 23 | @command_line_option('name', default=True) 24 | @command_line_option('status', default=True) 25 | @command_line_option('dcid') 26 | @command_line_option('gcid') 27 | @command_line_option('original-url') 28 | @command_line_option('download-url') 29 | @command_line_option('speed') 30 | @command_line_option('progress') 31 | @command_line_option('date') 32 | @command_line_option('n', default=get_config('n')) 33 | def list_task(args): 34 | 35 | parent_ids = [a[:-1] for a in args if re.match(r'^#?\d+/$', a)] 36 | if parent_ids and not all(re.match(r'^#?\d+/$', a) for a in args): 37 | raise NotImplementedError("Can't mix 'id/' with others") 38 | assert len(parent_ids) <= 1, "sub-tasks listing only supports single task id" 39 | ids = [a[:-1] if re.match(r'^#?\d+/$', a) else a for a in args] 40 | 41 | client = create_client(args) 42 | if parent_ids: 43 | args[0] = args[0][:-1] 44 | tasks = lixian_query.search_tasks(client, args) 45 | assert len(tasks) == 1 46 | tasks = client.list_bt(tasks[0]) 47 | #tasks = client.list_bt(client.get_task_by_id(parent_ids[0])) 48 | tasks.sort(key=lambda x: int(x['index'])) 49 | else: 50 | tasks = lixian_query.search_tasks(client, args) 51 | if len(args) == 1 and re.match(r'\d+/', args[0]) and len(tasks) == 1 and 'files' in tasks[0]: 52 | parent_ids = [tasks[0]['id']] 53 | tasks = tasks[0]['files'] 54 | columns = ['n', 'id', 'name', 'status', 'size', 'progress', 'speed', 'date', 'dcid', 'gcid', 'original-url', 'download-url'] 55 | columns = filter(lambda k: getattr(args, k), columns) 56 | 57 | output_tasks(tasks, columns, args, not parent_ids) 58 | -------------------------------------------------------------------------------- /lixian_commands/login.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from lixian import XunleiClient 4 | from lixian_commands.util import * 5 | from lixian_cli_parser import * 6 | from lixian_config import get_config 7 | import lixian_help 8 | from getpass import getpass 9 | 10 | @command_line_parser(help=lixian_help.login) 11 | @with_parser(parse_login) 12 | @with_parser(parse_logging) 13 | def login(args): 14 | if args.cookies == '-': 15 | args._args['cookies'] = None 16 | if len(args) < 1: 17 | args.username = args.username or XunleiClient(cookie_path=args.cookies, login=False).get_username() or get_config('username') or raw_input('ID: ') 18 | args.password = args.password or get_config('password') or getpass('Password: ') 19 | elif len(args) == 1: 20 | args.username = args.username or XunleiClient(cookie_path=args.cookies, login=False).get_username() or get_config('username') 21 | args.password = args[0] 22 | if args.password == '-': 23 | args.password = getpass('Password: ') 24 | elif len(args) == 2: 25 | args.username, args.password = list(args) 26 | if args.password == '-': 27 | args.password = getpass('Password: ') 28 | elif len(args) == 3: 29 | args.username, args.password, args.cookies = list(args) 30 | if args.password == '-': 31 | args.password = getpass('Password: ') 32 | elif len(args) > 3: 33 | raise RuntimeError('Too many arguments') 34 | if not args.username: 35 | raise RuntimeError("What's your name?") 36 | if args.cookies: 37 | print 'Saving login session to', args.cookies 38 | else: 39 | print 'Testing login without saving session' 40 | import lixian_verification_code 41 | verification_code_reader = lixian_verification_code.default_verification_code_reader(args) 42 | XunleiClient(args.username, args.password, args.cookies, login=True, verification_code_reader=verification_code_reader) 43 | -------------------------------------------------------------------------------- /lixian_commands/logout.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian import XunleiClient 3 | from lixian_commands.util import * 4 | from lixian_cli_parser import * 5 | import lixian_config 6 | import lixian_help 7 | 8 | @command_line_parser(help=lixian_help.logout) 9 | @with_parser(parse_logging) 10 | @command_line_value('cookies', default=lixian_config.LIXIAN_DEFAULT_COOKIES) 11 | def logout(args): 12 | if len(args): 13 | raise RuntimeError('Too many arguments') 14 | print 'logging out from', args.cookies 15 | assert args.cookies 16 | client = XunleiClient(cookie_path=args.cookies, login=False) 17 | client.logout() 18 | 19 | -------------------------------------------------------------------------------- /lixian_commands/pause.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | from lixian_cli_parser import * 4 | from lixian_config import get_config 5 | from lixian_encoding import default_encoding 6 | import lixian_help 7 | import lixian_query 8 | 9 | @command_line_parser(help=lixian_help.pause) 10 | @with_parser(parse_login) 11 | @with_parser(parse_colors) 12 | @with_parser(parse_logging) 13 | @command_line_option('i') 14 | @command_line_option('all') 15 | @command_line_value('limit', default=get_config('limit')) 16 | @command_line_value('page-size', default=get_config('page-size')) 17 | def pause_task(args): 18 | client = create_client(args) 19 | to_pause = lixian_query.search_tasks(client, args) 20 | print "Below files are going to be paused:" 21 | for x in to_pause: 22 | print x['name'].encode(default_encoding) 23 | client.pause_tasks(to_pause) 24 | -------------------------------------------------------------------------------- /lixian_commands/readd.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | from lixian_cli_parser import * 4 | from lixian_encoding import default_encoding 5 | import lixian_help 6 | import lixian_query 7 | 8 | @command_line_parser(help=lixian_help.readd) 9 | @with_parser(parse_login) 10 | @with_parser(parse_logging) 11 | @command_line_option('deleted') 12 | @command_line_option('expired') 13 | @command_line_option('all') 14 | def readd_task(args): 15 | if args.deleted: 16 | status = 'deleted' 17 | elif args.expired: 18 | status = 'expired' 19 | else: 20 | raise NotImplementedError('Please use --expired or --deleted') 21 | client = create_client(args) 22 | if status == 'expired' and args.all: 23 | return client.readd_all_expired_tasks() 24 | to_readd = lixian_query.search_tasks(client, args) 25 | non_bt = [] 26 | bt = [] 27 | if not to_readd: 28 | return 29 | print "Below files are going to be re-added:" 30 | for x in to_readd: 31 | print x['name'].encode(default_encoding) 32 | if x['type'] == 'bt': 33 | bt.append((x['bt_hash'], x['id'])) 34 | else: 35 | non_bt.append((x['original_url'], x['id'])) 36 | if non_bt: 37 | urls, ids = zip(*non_bt) 38 | client.add_batch_tasks(urls, ids) 39 | for hash, id in bt: 40 | client.add_torrent_task_by_info_hash(hash, id) 41 | -------------------------------------------------------------------------------- /lixian_commands/rename.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | from lixian_cli_parser import * 4 | from lixian_encoding import from_native 5 | import lixian_help 6 | import re 7 | import sys 8 | 9 | @command_line_parser(help=lixian_help.rename) 10 | @with_parser(parse_login) 11 | @with_parser(parse_logging) 12 | def rename_task(args): 13 | if len(args) != 2 or not re.match(r'\d+$', args[0]): 14 | usage(lixian_help.rename, 'Incorrect arguments') 15 | sys.exit(1) 16 | client = create_client(args) 17 | taskid, new_name = args 18 | task = client.get_task_by_id(taskid) 19 | client.rename_task(task, from_native(new_name)) 20 | -------------------------------------------------------------------------------- /lixian_commands/restart.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_commands.util import * 3 | from lixian_cli_parser import * 4 | from lixian_config import get_config 5 | from lixian_encoding import default_encoding 6 | import lixian_help 7 | import lixian_query 8 | 9 | @command_line_parser(help=lixian_help.restart) 10 | @with_parser(parse_login) 11 | @with_parser(parse_colors) 12 | @with_parser(parse_logging) 13 | @command_line_option('i') 14 | @command_line_option('all') 15 | @command_line_value('limit', default=get_config('limit')) 16 | @command_line_value('page-size', default=get_config('page-size')) 17 | def restart_task(args): 18 | client = create_client(args) 19 | to_restart = lixian_query.search_tasks(client, args) 20 | print "Below files are going to be restarted:" 21 | for x in to_restart: 22 | print x['name'].encode(default_encoding) 23 | client.restart_tasks(to_restart) 24 | -------------------------------------------------------------------------------- /lixian_commands/util.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['parse_login', 'parse_colors', 'parse_logging', 'parse_size', 'create_client', 'output_tasks', 'usage'] 3 | 4 | from lixian_cli_parser import * 5 | from lixian_config import get_config 6 | from lixian_config import LIXIAN_DEFAULT_COOKIES 7 | from lixian_encoding import default_encoding, to_native 8 | from lixian_colors import colors 9 | from getpass import getpass 10 | import lixian_help 11 | 12 | @command_line_value('username', default=get_config('username')) 13 | @command_line_value('password', default=get_config('password')) 14 | @command_line_value('cookies', default=LIXIAN_DEFAULT_COOKIES) 15 | @command_line_value('verification-code-handler', default=get_config('verification-code-handler')) 16 | @command_line_value('verification-code-path', default=get_config('verification-code-path')) 17 | def parse_login(args): 18 | if args.password == '-': 19 | args.password = getpass('Password: ') 20 | if args.cookies == '-': 21 | args._args['cookies'] = None 22 | return args 23 | 24 | @command_line_option('colors', default=get_config('colors', True)) 25 | def parse_colors(args): 26 | pass 27 | 28 | @command_line_value('log-level', default=get_config('log-level')) 29 | @command_line_value('log-path', default=get_config('log-path')) 30 | @command_line_option('debug') 31 | @command_line_option('trace') 32 | def parse_logging(args): 33 | path = args.log_path 34 | level = args.log_level 35 | if args.trace: 36 | level = 'trace' 37 | elif args.debug: 38 | level = 'debug' 39 | if path or level: 40 | import lixian_logging 41 | level = level or 'info' 42 | lixian_logging.init_logger(use_colors=args.colors, level=level, path=path) 43 | logger = lixian_logging.get_logger() 44 | import lixian 45 | # inject logger to lixian (this makes lixian.py zero-dependency) 46 | lixian.logger = logger 47 | 48 | @command_line_option('size', default=get_config('size')) 49 | @command_line_option('format-size', default=get_config('format-size')) 50 | def parse_size(args): 51 | pass 52 | 53 | def create_client(args): 54 | from lixian import XunleiClient 55 | import lixian_verification_code 56 | verification_code_reader = lixian_verification_code.default_verification_code_reader(args) 57 | client = XunleiClient(args.username, args.password, args.cookies, verification_code_reader=verification_code_reader) 58 | if args.page_size: 59 | client.page_size = int(args.page_size) 60 | return client 61 | 62 | def output_tasks(tasks, columns, args, top=True): 63 | for i, t in enumerate(tasks): 64 | status_colors = { 65 | 'waiting': 'yellow', 66 | 'downloading': 'magenta', 67 | 'completed':'green', 68 | 'pending':'cyan', 69 | 'failed':'red', 70 | } 71 | c = status_colors[t['status_text']] 72 | with colors(args.colors).ansi(c)(): 73 | for k in columns: 74 | if k == 'n': 75 | if top: 76 | print '#%d' % t['#'], 77 | elif k == 'id': 78 | print t.get('index', t['id']), 79 | elif k == 'name': 80 | print t['name'].encode(default_encoding), 81 | elif k == 'status': 82 | with colors(args.colors).bold(): 83 | print t['status_text'], 84 | elif k == 'size': 85 | if args.format_size: 86 | from lixian_util import format_size 87 | print format_size(t['size']), 88 | else: 89 | print t['size'], 90 | elif k == 'progress': 91 | print t['progress'], 92 | elif k == 'speed': 93 | print t['speed'], 94 | elif k == 'date': 95 | print t['date'], 96 | elif k == 'dcid': 97 | print t['dcid'], 98 | elif k == 'gcid': 99 | print t['gcid'], 100 | elif k == 'original-url': 101 | print t['original_url'], 102 | elif k == 'download-url': 103 | print t['xunlei_url'], 104 | else: 105 | raise NotImplementedError(k) 106 | print 107 | 108 | def usage(doc=lixian_help.usage, message=None): 109 | if hasattr(doc, '__call__'): 110 | doc = doc() 111 | if message: 112 | print to_native(message) 113 | print to_native(doc).strip() 114 | -------------------------------------------------------------------------------- /lixian_config.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import os.path 4 | 5 | def get_config_path(filename): 6 | if os.path.exists(filename): 7 | return filename 8 | import sys 9 | local_path = os.path.join(sys.path[0], filename) 10 | if os.path.exists(local_path): 11 | return local_path 12 | user_home = os.getenv('USERPROFILE') or os.getenv('HOME') 13 | lixian_home = os.getenv('LIXIAN_HOME') or user_home 14 | return os.path.join(lixian_home, filename) 15 | 16 | LIXIAN_DEFAULT_CONFIG = get_config_path('.xunlei.lixian.config') 17 | LIXIAN_DEFAULT_COOKIES = get_config_path('.xunlei.lixian.cookies') 18 | 19 | def load_config(path): 20 | values = {} 21 | if os.path.exists(path): 22 | with open(path) as x: 23 | for line in x.readlines(): 24 | line = line.strip() 25 | if line: 26 | if line.startswith('--'): 27 | line = line.lstrip('-') 28 | if line.startswith('no-'): 29 | values[line[3:]] = False 30 | elif '=' in line: 31 | k, v = line.split('=', 1) 32 | values[k] = v 33 | else: 34 | values[line] = True 35 | else: 36 | raise NotImplementedError(line) 37 | return values 38 | 39 | def dump_config(path, values): 40 | with open(path, 'w') as x: 41 | for k in values: 42 | v = values[k] 43 | if v is True: 44 | x.write('--%s\n'%k) 45 | elif v is False: 46 | x.write('--no-%s\n'%k) 47 | else: 48 | x.write('--%s=%s\n'%(k, v)) 49 | 50 | class Config: 51 | def __init__(self, path=LIXIAN_DEFAULT_CONFIG): 52 | self.path = path 53 | self.values = load_config(path) 54 | def put(self, k, v=True): 55 | self.values[k] = v 56 | dump_config(self.path, self.values) 57 | def get(self, k, v=None): 58 | return self.values.get(k, v) 59 | def delete(self, k): 60 | if k in self.values: 61 | del self.values[k] 62 | dump_config(self.path, self.values) 63 | def source(self): 64 | if os.path.exists(self.path): 65 | with open(self.path) as x: 66 | return x.read() 67 | def __str__(self): 68 | return '' % self.values 69 | 70 | global_config = Config() 71 | 72 | def put_config(k, v=True): 73 | if k.startswith('no-') and v is True: 74 | k = k[3:] 75 | v = False 76 | global_config.put(k, v) 77 | 78 | def get_config(k, v=None): 79 | return global_config.get(k, v) 80 | 81 | def delete_config(k): 82 | return global_config.delete(k) 83 | 84 | def source_config(): 85 | return global_config.source() 86 | 87 | -------------------------------------------------------------------------------- /lixian_download_asyn.py: -------------------------------------------------------------------------------- 1 | 2 | import asyncore 3 | import asynchat 4 | import socket 5 | import re 6 | #from cStringIO import StringIO 7 | from time import time, sleep 8 | import sys 9 | import os 10 | 11 | #asynchat.async_chat.ac_out_buffer_size = 1024*1024 12 | 13 | class http_client(asynchat.async_chat): 14 | 15 | def __init__(self, url, headers=None, start_from=0): 16 | asynchat.async_chat.__init__(self) 17 | 18 | self.args = {'headers': headers, 'start_from': start_from} 19 | 20 | m = re.match(r'http://([^/:]+)(?::(\d+))?(/.*)?$', url) 21 | assert m, 'Invalid url: %s' % url 22 | host, port, path = m.groups() 23 | port = int(port or 80) 24 | path = path or '/' 25 | 26 | def resolve_host(host): 27 | try: 28 | return socket.gethostbyname(host) 29 | except: 30 | pass 31 | host_ip = resolve_host(host) 32 | if not host_ip: 33 | self.log_error("host can't be resolved: " + host) 34 | self.size = None 35 | return 36 | if host_ip == '180.168.41.175': 37 | # fuck shanghai dian DNS 38 | self.log_error('gethostbyname failed') 39 | self.size = None 40 | return 41 | 42 | 43 | request_headers = {'host': host, 'connection': 'close'} 44 | if start_from: 45 | request_headers['RANGE'] = 'bytes=%d-' % start_from 46 | if headers: 47 | request_headers.update(headers) 48 | headers = request_headers 49 | self.request = 'GET %s HTTP/1.1\r\n%s\r\n\r\n' % (path, '\r\n'.join('%s: %s' % (k, headers[k]) for k in headers)) 50 | self.op = 'GET' 51 | 52 | self.headers = {} # for response headers 53 | 54 | #self.buffer = StringIO() 55 | self.buffer = [] 56 | self.buffer_size = 0 57 | self.cache_size = 1024*1024 58 | self.size = None 59 | self.completed = 0 60 | self.set_terminator("\r\n\r\n") 61 | self.reading_headers = True 62 | 63 | self.create_socket(socket.AF_INET, socket.SOCK_STREAM) 64 | try: 65 | self.connect((host, port)) 66 | except: 67 | self.close() 68 | self.log_error('connect_failed') 69 | 70 | def handle_connect(self): 71 | self.start_time = time() 72 | self.push(self.request) 73 | 74 | def handle_close(self): 75 | asynchat.async_chat.handle_close(self) 76 | self.flush_data() 77 | if self.reading_headers: 78 | self.log_error('incomplete http response') 79 | return 80 | self.handle_status_update(self.size, self.completed, force_update=True) 81 | self.handle_speed_update(self.completed, self.start_time, force_update=True) 82 | if self.size is not None and self.completed < self.size: 83 | self.log_error('incomplete download') 84 | 85 | def handle_connection_error(self): 86 | self.handle_error() 87 | 88 | def handle_error(self): 89 | self.close() 90 | self.flush_data() 91 | error_message = sys.exc_info()[1] 92 | self.log_error('there is some error: %s' % error_message) 93 | #raise 94 | 95 | def collect_incoming_data(self, data): 96 | if self.reading_headers: 97 | #self.buffer.write(data) 98 | self.buffer.append(data) 99 | self.buffer_size += len(data) 100 | return 101 | elif self.cache_size: 102 | #self.buffer.write(data) 103 | self.buffer.append(data) 104 | self.buffer_size += len(data) 105 | #if self.buffer.tell() > self.cache_size: 106 | if self.buffer_size > self.cache_size: 107 | #self.handle_data(self.buffer.getvalue()) 108 | self.handle_data(''.join(self.buffer)) 109 | #self.buffer.truncate(0) 110 | #self.buffer.clear() 111 | del self.buffer[:] 112 | self.buffer_size = 0 113 | else: 114 | self.handle_data(data) 115 | 116 | self.completed += len(data) 117 | self.handle_status_update(self.size, self.completed) 118 | self.handle_speed_update(self.completed, self.start_time) 119 | if self.size == self.completed: 120 | self.close() 121 | self.flush_data() 122 | self.handle_status_update(self.size, self.completed, force_update=True) 123 | self.handle_speed_update(self.completed, self.start_time, force_update=True) 124 | 125 | def handle_data(self, data): 126 | print len(data) 127 | pass 128 | 129 | def flush_data(self): 130 | #if self.buffer.tell(): 131 | if self.buffer_size: 132 | #self.handle_data(self.buffer.getvalue()) 133 | self.handle_data(''.join(self.buffer)) 134 | #self.buffer.truncate(0) 135 | del self.buffer[:] 136 | self.buffer_size = 0 137 | 138 | def parse_headers(self, header): 139 | lines = header.split('\r\n') 140 | status_line = lines.pop(0) 141 | #print status_line 142 | protocal, status_code, status_text = re.match(r'^HTTP/([\d.]+) (\d+) (.+)$', status_line).groups() 143 | status_code = int(status_code) 144 | self.status_code = status_code 145 | self.status_text = status_text 146 | #headers = dict(h.split(': ', 1) for h in lines) 147 | for k, v in (h.split(': ', 1) for h in lines): 148 | self.headers[k.lower()] = v 149 | 150 | if status_code in (200, 206): 151 | pass 152 | elif status_code == 302: 153 | return self.handle_http_relocate(self.headers['location']) 154 | else: 155 | return self.handle_http_status_error() 156 | 157 | self.size = self.headers.get('content-length', None) 158 | if self.size is not None: 159 | self.size = int(self.size) 160 | self.handle_http_headers() 161 | 162 | def found_terminator(self): 163 | if self.reading_headers: 164 | self.reading_headers = False 165 | #self.parse_headers("".join(self.buffer.getvalue())) 166 | self.parse_headers("".join(self.buffer)) 167 | #self.buffer.truncate(0) 168 | del self.buffer[:] 169 | self.buffer_size = 0 170 | self.set_terminator(None) 171 | else: 172 | raise NotImplementedError() 173 | 174 | def handle_http_headers(self): 175 | pass 176 | 177 | def handle_http_status_error(self): 178 | self.close() 179 | 180 | def handle_http_relocate(self, location): 181 | self.close() 182 | relocate_times = getattr(self, 'relocate_times', 0) 183 | max_relocate_times = getattr(self, 'max_relocate_times', 2) 184 | if relocate_times >= max_relocate_times: 185 | raise Exception('too many relocate times') 186 | new_client = self.__class__(location, **self.args) 187 | new_client.relocate_times = relocate_times + 1 188 | new_client.max_relocate_times = max_relocate_times 189 | self.next_client = new_client 190 | 191 | def handle_status_update(self, total, completed, force_update=False): 192 | pass 193 | 194 | def handle_speed_update(self, completed, start_time, force_update=False): 195 | pass 196 | 197 | def log_error(self, message): 198 | print 'log_error', message 199 | self.error_message = message 200 | 201 | class ProgressBar: 202 | def __init__(self, total=0): 203 | self.total = total 204 | self.completed = 0 205 | self.start = time() 206 | self.speed = 0 207 | self.bar_width = 0 208 | self.displayed = False 209 | def update(self): 210 | self.displayed = True 211 | bar_size = 40 212 | if self.total: 213 | percent = self.completed * 100.0 / self.total 214 | if percent > 100: 215 | percent = 100.0 216 | dots = int(bar_size * percent / 100) 217 | plus = percent / 100 * bar_size - dots 218 | if plus > 0.8: 219 | plus = '=' 220 | elif plus > 0.4: 221 | plus = '-' 222 | else: 223 | plus = '' 224 | bar = '=' * dots + plus 225 | percent = int(percent) 226 | else: 227 | percent = 0 228 | bar = '-' 229 | speed = self.speed 230 | if speed < 1000: 231 | speed = '%sB/s' % int(speed) 232 | elif speed < 1000*10: 233 | speed = '%.1fK/s' % (speed/1000.0) 234 | elif speed < 1000*1000: 235 | speed = '%dK/s' % int(speed/1000) 236 | elif speed < 1000*1000*100: 237 | speed = '%.1fM/s' % (speed/1000.0/1000.0) 238 | else: 239 | speed = '%dM/s' % int(speed/1000/1000) 240 | seconds = time() - self.start 241 | if seconds < 10: 242 | seconds = '%.1fs' % seconds 243 | elif seconds < 60: 244 | seconds = '%ds' % int(seconds) 245 | elif seconds < 60*60: 246 | seconds = '%dm%ds' % (int(seconds/60), int(seconds)%60) 247 | elif seconds < 60*60*24: 248 | seconds = '%dh%dm%ds' % (int(seconds)/60/60, (int(seconds)/60)%60, int(seconds)%60) 249 | else: 250 | seconds = int(seconds) 251 | days = seconds/60/60/24 252 | seconds -= days*60*60*24 253 | hours = seconds/60/60 254 | seconds -= hours*60*60 255 | minutes = seconds/60 256 | seconds -= minutes*60 257 | seconds = '%dd%dh%dm%ds' % (days, hours, minutes, seconds) 258 | completed = ','.join((x[::-1] for x in reversed(re.findall('..?.?', str(self.completed)[::-1])))) 259 | bar = '{0:>3}%[{1:<40}] {2:<12} {3:>4} in {4:>6s}'.format(percent, bar, completed, speed, seconds) 260 | new_bar_width = len(bar) 261 | bar = bar.ljust(self.bar_width) 262 | self.bar_width = new_bar_width 263 | sys.stdout.write('\r'+bar) 264 | sys.stdout.flush() 265 | def update_status(self, total, completed): 266 | self.total = total 267 | self.completed = completed 268 | self.update() 269 | def update_speed(self, start, speed): 270 | self.start = start 271 | self.speed = speed 272 | self.update() 273 | def done(self): 274 | if self.displayed: 275 | print 276 | self.displayed = False 277 | 278 | def download(url, path, headers=None, resuming=False): 279 | class download_client(http_client): 280 | def __init__(self, url, headers=headers, start_from=0): 281 | self.output = None 282 | self.bar = ProgressBar() 283 | http_client.__init__(self, url, headers=headers, start_from=start_from) 284 | self.start_from = start_from 285 | self.last_status_time = time() 286 | self.last_speed_time = time() 287 | self.last_size = 0 288 | self.path = path 289 | def handle_close(self): 290 | http_client.handle_close(self) 291 | if self.output: 292 | self.output.close() 293 | self.output = None 294 | def handle_http_status_error(self): 295 | http_client.handle_http_status_error(self) 296 | self.log_error('http status error: %s, %s' % (self.status_code, self.status_text)) 297 | def handle_data(self, data): 298 | if not self.output: 299 | if self.start_from: 300 | self.output = open(path, 'ab') 301 | else: 302 | self.output = open(path, 'wb') 303 | self.output.write(data) 304 | def handle_status_update(self, total, completed, force_update=False): 305 | if total is None: 306 | return 307 | if time() - self.last_status_time > 1 or force_update: 308 | #print '%.02f' % (completed*100.0/total) 309 | self.bar.update_status(total+start_from, completed+start_from) 310 | self.last_status_time = time() 311 | def handle_speed_update(self, completed, start_time, force_update=False): 312 | now = time() 313 | period = now - self.last_speed_time 314 | if period > 1 or force_update: 315 | #print '%.02f, %.02f' % ((completed-self.last_size)/period, completed/(now-start_time)) 316 | self.bar.update_speed(start_time, (completed-self.last_size)/period) 317 | self.last_speed_time = time() 318 | self.last_size = completed 319 | def log_error(self, message): 320 | self.bar.done() 321 | http_client.log_error(self, message) 322 | def __del__(self): # XXX: sometimes handle_close() is not called, don't know why... 323 | #http_client.__del__(self) 324 | if self.output: 325 | self.output.close() 326 | self.output = None 327 | 328 | max_retry_times = 25 329 | retry_times = 0 330 | start_from = 0 331 | if resuming and os.path.exists(path): 332 | start_from = os.path.getsize(path) 333 | # TODO: fix status bar for resuming 334 | while True: 335 | client = download_client(url, start_from=start_from) 336 | asyncore.loop() 337 | while hasattr(client, 'next_client'): 338 | client = client.next_client 339 | client.bar.done() 340 | if getattr(client, 'error_message', None): 341 | retry_times += 1 342 | if retry_times >= max_retry_times: 343 | raise Exception(client.error_message) 344 | if client.size and client.completed: 345 | start_from = os.path.getsize(path) 346 | print 'retry', retry_times 347 | sleep(retry_times) 348 | else: 349 | break 350 | 351 | 352 | def main(): 353 | url, path = sys.argv[1:] 354 | download(url, path) 355 | 356 | if __name__ == '__main__': 357 | main() 358 | 359 | -------------------------------------------------------------------------------- /lixian_download_tools.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['download_tool', 'get_tool'] 3 | 4 | from lixian_config import * 5 | import subprocess 6 | import urllib2 7 | import os.path 8 | 9 | download_tools = {} 10 | 11 | def download_tool(name): 12 | def register(tool): 13 | download_tools[name] = tool_adaptor(tool) 14 | return tool 15 | return register 16 | 17 | class DownloadToolAdaptor: 18 | def __init__(self, tool, **kwargs): 19 | self.tool = tool 20 | self.client = kwargs['client'] 21 | self.url = kwargs['url'] 22 | self.path = kwargs['path'] 23 | self.resuming = kwargs.get('resuming') 24 | self.size = kwargs['size'] 25 | def finished(self): 26 | assert os.path.getsize(self.path) <= self.size, 'existing file (%s) bigger than expected (%s)' % (os.path.getsize(self.path), self.size) 27 | return os.path.getsize(self.path) == self.size 28 | def __call__(self): 29 | self.tool(self.client, self.url, self.path, self.resuming) 30 | 31 | def tool_adaptor(tool): 32 | import types 33 | if type(tool) == types.FunctionType: 34 | def adaptor(**kwargs): 35 | return DownloadToolAdaptor(tool, **kwargs) 36 | return adaptor 37 | else: 38 | return tool 39 | 40 | 41 | def check_bin(bin): 42 | import distutils.spawn 43 | assert distutils.spawn.find_executable(bin), "Can't find %s" % bin 44 | 45 | @download_tool('urllib2') 46 | def urllib2_download(client, download_url, filename, resuming=False): 47 | '''In the case you don't even have wget...''' 48 | assert not resuming 49 | print 'Downloading', download_url, 'to', filename, '...' 50 | request = urllib2.Request(download_url, headers={'Cookie': 'gdriveid='+client.get_gdriveid()}) 51 | response = urllib2.urlopen(request) 52 | import shutil 53 | with open(filename, 'wb') as output: 54 | shutil.copyfileobj(response, output) 55 | 56 | @download_tool('asyn') 57 | def asyn_download(client, download_url, filename, resuming=False): 58 | import lixian_download_asyn 59 | lixian_download_asyn.download(download_url, filename, headers={'Cookie': 'gdriveid='+str(client.get_gdriveid())}, resuming=resuming) 60 | 61 | @download_tool('wget') 62 | def wget_download(client, download_url, filename, resuming=False): 63 | gdriveid = str(client.get_gdriveid()) 64 | wget_opts = ['wget', '--header=Cookie: gdriveid='+gdriveid, download_url, '-O', filename] 65 | if resuming: 66 | wget_opts.append('-c') 67 | wget_opts.extend(get_config('wget-opts', '').split()) 68 | check_bin(wget_opts[0]) 69 | exit_code = subprocess.call(wget_opts) 70 | if exit_code != 0: 71 | raise Exception('wget exited abnormally') 72 | 73 | @download_tool('curl') 74 | def curl_download(client, download_url, filename, resuming=False): 75 | gdriveid = str(client.get_gdriveid()) 76 | curl_opts = ['curl', '-L', download_url, '--cookie', 'gdriveid='+gdriveid, '--output', filename] 77 | if resuming: 78 | curl_opts += ['--continue-at', '-'] 79 | curl_opts.extend(get_config('curl-opts', '').split()) 80 | check_bin(curl_opts[0]) 81 | exit_code = subprocess.call(curl_opts) 82 | if exit_code != 0: 83 | raise Exception('curl exited abnormally') 84 | 85 | @download_tool('aria2') 86 | @download_tool('aria2c') 87 | class Aria2DownloadTool: 88 | def __init__(self, **kwargs): 89 | self.gdriveid = str(kwargs['client'].get_gdriveid()) 90 | self.url = kwargs['url'] 91 | self.path = kwargs['path'] 92 | self.size = kwargs['size'] 93 | self.resuming = kwargs.get('resuming') 94 | def finished(self): 95 | assert os.path.getsize(self.path) <= self.size, 'existing file (%s) bigger than expected (%s)' % (os.path.getsize(self.path), self.size) 96 | return os.path.getsize(self.path) == self.size and not os.path.exists(self.path + '.aria2') 97 | def __call__(self): 98 | gdriveid = self.gdriveid 99 | download_url = self.url 100 | path = self.path 101 | resuming = self.resuming 102 | dir = os.path.dirname(path) 103 | filename = os.path.basename(path) 104 | aria2_opts = ['aria2c', '--header=Cookie: gdriveid='+gdriveid, download_url, '--out', filename, '--file-allocation=none'] 105 | if dir: 106 | aria2_opts.extend(('--dir', dir)) 107 | if resuming: 108 | aria2_opts.append('-c') 109 | aria2_opts.extend(get_config('aria2-opts', '').split()) 110 | check_bin(aria2_opts[0]) 111 | exit_code = subprocess.call(aria2_opts) 112 | if exit_code != 0: 113 | raise Exception('aria2c exited abnormally') 114 | 115 | @download_tool('axel') 116 | def axel_download(client, download_url, path, resuming=False): 117 | gdriveid = str(client.get_gdriveid()) 118 | axel_opts = ['axel', '--header=Cookie: gdriveid='+gdriveid, download_url, '--output', path] 119 | axel_opts.extend(get_config('axel-opts', '').split()) 120 | check_bin(axel_opts[0]) 121 | exit_code = subprocess.call(axel_opts) 122 | if exit_code != 0: 123 | raise Exception('axel exited abnormally') 124 | 125 | def get_tool(name): 126 | return download_tools[name] 127 | 128 | 129 | -------------------------------------------------------------------------------- /lixian_encoding.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_config import get_config 3 | import sys 4 | 5 | default_encoding = get_config('encoding', sys.getfilesystemencoding()) 6 | if default_encoding is None or default_encoding.lower() == 'ascii': 7 | default_encoding = 'utf-8' 8 | 9 | 10 | def to_native(s): 11 | if type(s) == unicode: 12 | return s.encode(default_encoding) 13 | else: 14 | return s 15 | 16 | def from_native(s): 17 | if type(s) == str: 18 | return s.decode(default_encoding) 19 | else: 20 | return s 21 | 22 | def try_native_to_utf_8(url): 23 | try: 24 | return url.decode(default_encoding).encode('utf-8') 25 | except: 26 | return url 27 | 28 | -------------------------------------------------------------------------------- /lixian_filter_expr.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['filter_expr'] 3 | 4 | import re 5 | 6 | def get_name(x): 7 | assert isinstance(x, (basestring, dict)) 8 | if type(x) == dict: 9 | return x['name'] 10 | else: 11 | return x 12 | 13 | def filter_expr1(links, p): 14 | if not links: 15 | return links 16 | if re.match(r'^\[[^][]+\]$', p): 17 | matched = [] 18 | for p in re.split(r'\s*,\s*', p[1:-1]): 19 | assert re.match(r'^\d+(-\d+)?|\.\w+$', p), p 20 | if re.match(r'^\d+$', p): 21 | i = int(p) 22 | matched.append((i, links[i])) 23 | elif '-' in p: 24 | start, end = p.split('-') 25 | if not start: 26 | start = 0 27 | if not end: 28 | end = len(links) - 1 29 | start = int(start) 30 | end = int(end) 31 | assert 0 <= start < len(links) 32 | assert 0 <= end < len(links) 33 | if start <= end: 34 | matched += list(enumerate(links))[start:end+1] 35 | else: 36 | matched += reversed(list(enumerate(links))[end:start+1]) 37 | elif p.startswith('.'): 38 | matched += filter(lambda (i, x): get_name(x).lower().endswith(p.lower()), enumerate(links)) 39 | else: 40 | raise NotImplementedError(p) 41 | indexes = [] 42 | for i, _ in matched: 43 | if i not in indexes: 44 | indexes.append(i) 45 | return [links[x] for x in indexes] 46 | elif re.match(r'^\d+$', p): 47 | n = int(p) 48 | if 0 <= n < len(links): 49 | return [links[int(p)]] 50 | else: 51 | return filter(lambda x: re.search(p, get_name(x), re.I), links) 52 | elif p == '*': 53 | return links 54 | elif re.match(r'\.\w+$', p): 55 | return filter(lambda x: get_name(x).lower().endswith(p.lower()), links) 56 | else: 57 | import lixian_plugins.filters 58 | filter_results = lixian_plugins.filters.filter_things(links, p) 59 | if filter_results is None: 60 | return filter(lambda x: re.search(p, get_name(x), re.I), links) 61 | else: 62 | return filter_results 63 | 64 | def filter_expr(links, expr): 65 | for p in expr.split('/'): 66 | links = filter_expr1(links, p) 67 | return links 68 | 69 | 70 | -------------------------------------------------------------------------------- /lixian_hash.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import hashlib 4 | import lixian_hash_ed2k 5 | import lixian_hash_bt 6 | import os 7 | 8 | def lib_hash_file(h, path): 9 | with open(path, 'rb') as stream: 10 | while True: 11 | bytes = stream.read(1024*1024) 12 | if not bytes: 13 | break 14 | h.update(bytes) 15 | return h.hexdigest() 16 | 17 | def sha1_hash_file(path): 18 | return lib_hash_file(hashlib.sha1(), path) 19 | 20 | def verify_sha1(path, sha1): 21 | return sha1_hash_file(path).lower() == sha1.lower() 22 | 23 | def md5_hash_file(path): 24 | return lib_hash_file(hashlib.md5(), path) 25 | 26 | def verify_md5(path, md5): 27 | return md5_hash_file(path).lower() == md5.lower() 28 | 29 | def md4_hash_file(path): 30 | return lib_hash_file(hashlib.new('md4'), path) 31 | 32 | def verify_md4(path, md4): 33 | return md4_hash_file(path).lower() == md4.lower() 34 | 35 | def dcid_hash_file(path): 36 | h = hashlib.sha1() 37 | size = os.path.getsize(path) 38 | with open(path, 'rb') as stream: 39 | if size < 0xF000: 40 | h.update(stream.read()) 41 | else: 42 | h.update(stream.read(0x5000)) 43 | stream.seek(size/3) 44 | h.update(stream.read(0x5000)) 45 | stream.seek(size-0x5000) 46 | h.update(stream.read(0x5000)) 47 | return h.hexdigest() 48 | 49 | def verify_dcid(path, dcid): 50 | return dcid_hash_file(path).lower() == dcid.lower() 51 | 52 | def main(args): 53 | option = args.pop(0) 54 | def verify_bt(f, t): 55 | from lixian_progress import SimpleProgressBar 56 | bar = SimpleProgressBar() 57 | result = lixian_hash_bt.verify_bt_file(t, f, progress_callback=bar.update) 58 | bar.done() 59 | return result 60 | if option.startswith('--verify'): 61 | hash_fun = {'--verify-sha1':verify_sha1, 62 | '--verify-md5':verify_md5, 63 | '--verify-md4':verify_md4, 64 | '--verify-dcid':verify_dcid, 65 | '--verify-ed2k':lixian_hash_ed2k.verify_ed2k_link, 66 | '--verify-bt': verify_bt, 67 | }[option] 68 | assert len(args) == 2 69 | hash, path = args 70 | if hash_fun(path, hash): 71 | print 'looks good...' 72 | else: 73 | print 'failed...' 74 | else: 75 | hash_fun = {'--sha1':sha1_hash_file, 76 | '--md5':md5_hash_file, 77 | '--md4':md4_hash_file, 78 | '--dcid':dcid_hash_file, 79 | '--ed2k':lixian_hash_ed2k.generate_ed2k_link, 80 | '--info-hash':lixian_hash_bt.info_hash, 81 | }[option] 82 | for f in args: 83 | h = hash_fun(f) 84 | print '%s *%s' % (h, f) 85 | 86 | if __name__ == '__main__': 87 | import sys 88 | args = sys.argv[1:] 89 | main(args) 90 | 91 | -------------------------------------------------------------------------------- /lixian_hash_bt.py: -------------------------------------------------------------------------------- 1 | 2 | import os.path 3 | import sys 4 | import hashlib 5 | from cStringIO import StringIO 6 | import re 7 | 8 | from lixian_encoding import default_encoding 9 | 10 | def magnet_to_infohash(magnet): 11 | import re 12 | import base64 13 | m = re.match(r'magnet:\?xt=urn:btih:(\w+)', magnet) 14 | assert m, magnet 15 | code = m.group(1) 16 | if re.match(r'^[a-zA-Z0-9]{40}$', code): 17 | return code.decode('hex') 18 | else: 19 | return base64.b32decode(code) 20 | 21 | class decoder: 22 | def __init__(self, bytes): 23 | self.bytes = bytes 24 | self.i = 0 25 | def decode_value(self): 26 | x = self.bytes[self.i] 27 | if x.isdigit(): 28 | return self.decode_string() 29 | self.i += 1 30 | if x == 'd': 31 | v = {} 32 | while self.peek() != 'e': 33 | k = self.decode_string() 34 | v[k] = self.decode_value() 35 | self.i += 1 36 | return v 37 | elif x == 'l': 38 | v = [] 39 | while self.peek() != 'e': 40 | v.append(self.decode_value()) 41 | self.i += 1 42 | return v 43 | elif x == 'i': 44 | return self.decode_int() 45 | else: 46 | raise NotImplementedError(x) 47 | def decode_string(self): 48 | i = self.bytes.index(':', self.i) 49 | n = int(self.bytes[self.i:i]) 50 | s = self.bytes[i+1:i+1+n] 51 | self.i = i + 1 + n 52 | return s 53 | def decode_int(self): 54 | e = self.bytes.index('e', self.i) 55 | n = int(self.bytes[self.i:e]) 56 | self.i = e + 1 57 | return n 58 | def peek(self): 59 | return self.bytes[self.i] 60 | 61 | class encoder: 62 | def __init__(self, stream): 63 | self.stream = stream 64 | def encode(self, v): 65 | if type(v) == str: 66 | self.stream.write(str(len(v))) 67 | self.stream.write(':') 68 | self.stream.write(v) 69 | elif type(v) == dict: 70 | self.stream.write('d') 71 | for k in sorted(v): 72 | self.encode(k) 73 | self.encode(v[k]) 74 | self.stream.write('e') 75 | elif type(v) == list: 76 | self.stream.write('l') 77 | for x in v: 78 | self.encode(x) 79 | self.stream.write('e') 80 | elif type(v) in (int, long): 81 | self.stream.write('i') 82 | self.stream.write(str(v)) 83 | self.stream.write('e') 84 | else: 85 | raise NotImplementedError(type(v)) 86 | 87 | def bdecode(bytes): 88 | return decoder(bytes).decode_value() 89 | 90 | def bencode(v): 91 | from cStringIO import StringIO 92 | stream = StringIO() 93 | encoder(stream).encode(v) 94 | return stream.getvalue() 95 | 96 | def assert_content(content): 97 | assert re.match(r'd\d+:', content), 'Probably not a valid content file [%s...]' % repr(content[:17]) 98 | 99 | def info_hash_from_content(content): 100 | assert_content(content) 101 | return hashlib.sha1(bencode(bdecode(content)['info'])).hexdigest() 102 | 103 | def info_hash(path): 104 | if not path.lower().endswith('.torrent'): 105 | print '[WARN] Is it really a .torrent file? '+path 106 | if os.path.getsize(path) > 3*1000*1000: 107 | raise NotImplementedError('Torrent file too big') 108 | with open(path, 'rb') as stream: 109 | return info_hash_from_content(stream.read()) 110 | 111 | def encode_path(path): 112 | return path.decode('utf-8').encode(default_encoding) 113 | 114 | class sha1_reader: 115 | def __init__(self, pieces, progress_callback=None): 116 | assert pieces 117 | assert len(pieces) % 20 == 0 118 | self.total = len(pieces)/20 119 | self.processed = 0 120 | self.stream = StringIO(pieces) 121 | self.progress_callback = progress_callback 122 | def next_sha1(self): 123 | self.processed += 1 124 | if self.progress_callback: 125 | self.progress_callback(float(self.processed)/self.total) 126 | return self.stream.read(20) 127 | 128 | def sha1_update_stream(sha1, stream, n): 129 | while n > 0: 130 | readn = min(n, 1024*1024) 131 | bytes = stream.read(readn) 132 | assert len(bytes) == readn 133 | n -= readn 134 | sha1.update(bytes) 135 | assert n == 0 136 | 137 | def verify_bt_single_file(path, info, progress_callback=None): 138 | # TODO: check md5sum if available 139 | if os.path.getsize(path) != info['length']: 140 | return False 141 | piece_length = info['piece length'] 142 | assert piece_length > 0 143 | sha1_stream = sha1_reader(info['pieces'], progress_callback=progress_callback) 144 | size = info['length'] 145 | with open(path, 'rb') as stream: 146 | while size > 0: 147 | n = min(size, piece_length) 148 | size -= n 149 | sha1sum = hashlib.sha1() 150 | sha1_update_stream(sha1sum, stream, n) 151 | if sha1sum.digest() != sha1_stream.next_sha1(): 152 | return False 153 | assert size == 0 154 | assert stream.read(1) == '' 155 | assert sha1_stream.next_sha1() == '' 156 | return True 157 | 158 | def verify_bt_multiple(folder, info, file_set=None, progress_callback=None): 159 | # TODO: check md5sum if available 160 | piece_length = info['piece length'] 161 | assert piece_length > 0 162 | 163 | path_encoding = info.get('encoding', 'utf-8') 164 | files = [] 165 | for x in info['files']: 166 | if 'path.utf-8' in x: 167 | unicode_path = [p.decode('utf-8') for p in x['path.utf-8']] 168 | else: 169 | unicode_path = [p.decode(path_encoding) for p in x['path']] 170 | native_path = [p.encode(default_encoding) for p in unicode_path] 171 | utf8_path = [p.encode('utf-8') for p in unicode_path] 172 | files.append({'path':os.path.join(folder, apply(os.path.join, native_path)), 'length':x['length'], 'file':utf8_path}) 173 | 174 | sha1_stream = sha1_reader(info['pieces'], progress_callback=progress_callback) 175 | sha1sum = hashlib.sha1() 176 | 177 | piece_left = piece_length 178 | complete_piece = True 179 | 180 | while files: 181 | f = files.pop(0) 182 | path = f['path'] 183 | size = f['length'] 184 | if os.path.exists(path) and ((not file_set) or (f['file'] in file_set)): 185 | if os.path.getsize(path) != size: 186 | return False 187 | if size <= piece_left: 188 | with open(path, 'rb') as stream: 189 | sha1_update_stream(sha1sum, stream, size) 190 | assert stream.read(1) == '' 191 | piece_left -= size 192 | if not piece_left: 193 | if sha1sum.digest() != sha1_stream.next_sha1() and complete_piece: 194 | return False 195 | complete_piece = True 196 | sha1sum = hashlib.sha1() 197 | piece_left = piece_length 198 | else: 199 | with open(path, 'rb') as stream: 200 | while size >= piece_left: 201 | size -= piece_left 202 | sha1_update_stream(sha1sum, stream, piece_left) 203 | if sha1sum.digest() != sha1_stream.next_sha1() and complete_piece: 204 | return False 205 | complete_piece = True 206 | sha1sum = hashlib.sha1() 207 | piece_left = piece_length 208 | if size: 209 | sha1_update_stream(sha1sum, stream, size) 210 | piece_left -= size 211 | else: 212 | if size: 213 | while size >= piece_left: 214 | size -= piece_left 215 | sha1_stream.next_sha1() 216 | sha1sum = hashlib.sha1() 217 | piece_left = piece_length 218 | if size: 219 | complete_piece = False 220 | piece_left -= size 221 | else: 222 | complete_piece = True 223 | 224 | if piece_left < piece_length: 225 | if complete_piece: 226 | if sha1sum.digest() != sha1_stream.next_sha1(): 227 | return False 228 | else: 229 | sha1_stream.next_sha1() 230 | assert sha1_stream.next_sha1() == '' 231 | 232 | return True 233 | 234 | def verify_bt(path, info, file_set=None, progress_callback=None): 235 | if not os.path.exists(path): 236 | raise Exception("File doesn't exist: %s" % path) 237 | if 'files' not in info: 238 | if os.path.isfile(path): 239 | return verify_bt_single_file(path, info, progress_callback=progress_callback) 240 | else: 241 | path = os.path.join(path, encode_path(info['name'])) 242 | return verify_bt_single_file(path, info, progress_callback=progress_callback) 243 | else: 244 | return verify_bt_multiple(path, info, file_set=file_set, progress_callback=progress_callback) 245 | 246 | def verify_bt_file(path, torrent_path, file_set=None, progress_callback=None): 247 | with open(torrent_path, 'rb') as x: 248 | return verify_bt(path, bdecode(x.read())['info'], file_set, progress_callback) 249 | 250 | -------------------------------------------------------------------------------- /lixian_hash_ed2k.py: -------------------------------------------------------------------------------- 1 | 2 | import hashlib 3 | 4 | chunk_size = 9728000 5 | buffer_size = 1024*1024 6 | 7 | def md4(): 8 | return hashlib.new('md4') 9 | 10 | def hash_stream(stream): 11 | total_md4 = None 12 | while True: 13 | chunk_md4 = md4() 14 | chunk_left = chunk_size 15 | while chunk_left: 16 | n = min(chunk_left, buffer_size) 17 | part = stream.read(n) 18 | chunk_md4.update(part) 19 | if len(part) < n: 20 | if total_md4: 21 | total_md4.update(chunk_md4.digest()) 22 | return total_md4.hexdigest() 23 | else: 24 | return chunk_md4.hexdigest() 25 | chunk_left -= n 26 | if total_md4 is None: 27 | total_md4 = md4() 28 | total_md4.update(chunk_md4.digest()) 29 | raise NotImplementedError() 30 | 31 | def hash_string(s): 32 | from cStringIO import StringIO 33 | return hash_stream(StringIO(s)) 34 | 35 | def hash_file(path): 36 | with open(path, 'rb') as stream: 37 | return hash_stream(stream) 38 | 39 | def parse_ed2k_link(link): 40 | import re, urllib 41 | ed2k_re = r'ed2k://\|file\|([^|]*)\|(\d+)\|([a-fA-F0-9]{32})\|' 42 | m = re.match(ed2k_re, link) or re.match(ed2k_re, urllib.unquote(link)) 43 | if not m: 44 | raise Exception('not an acceptable ed2k link: '+link) 45 | name, file_size, hash_hex = m.groups() 46 | from lixian_url import unquote_url 47 | return unquote_url(name), hash_hex.lower(), int(file_size) 48 | 49 | def parse_ed2k_id(link): 50 | return parse_ed2k_link(link)[1:] 51 | 52 | def parse_ed2k_file(link): 53 | return parse_ed2k_link(link)[0] 54 | 55 | def verify_ed2k_link(path, link): 56 | hash_hex, file_size = parse_ed2k_id(link) 57 | import os.path 58 | if os.path.getsize(path) != file_size: 59 | return False 60 | return hash_file(path).lower() == hash_hex.lower() 61 | 62 | def generate_ed2k_link(path): 63 | import sys, os.path, urllib 64 | filename = os.path.basename(path) 65 | encoding = sys.getfilesystemencoding() 66 | if encoding.lower() != 'ascii': 67 | filename = filename.decode(encoding).encode('utf-8') 68 | return 'ed2k://|file|%s|%d|%s|/' % (urllib.quote(filename), os.path.getsize(path), hash_file(path)) 69 | 70 | def test_md4(): 71 | assert hash_string("") == '31d6cfe0d16ae931b73c59d7e0c089c0' 72 | assert hash_string("a") == 'bde52cb31de33e46245e05fbdbd6fb24' 73 | assert hash_string("abc") == 'a448017aaf21d8525fc10ae87aa6729d' 74 | assert hash_string("message digest") == 'd9130a8164549fe818874806e1c7014b' 75 | assert hash_string("abcdefghijklmnopqrstuvwxyz") == 'd79e1c308aa5bbcdeea8ed63df412da9' 76 | assert hash_string("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789") == '043f8582f241db351ce627e153e7f0e4' 77 | assert hash_string("12345678901234567890123456789012345678901234567890123456789012345678901234567890") == 'e33b4ddc9c38f2199c3e7b164fcc0536' 78 | 79 | 80 | -------------------------------------------------------------------------------- /lixian_help.py: -------------------------------------------------------------------------------- 1 | 2 | basic_commands = [ 3 | ('help', "try this help..."), 4 | ('login', "login Xunlei cloud"), 5 | ('download', "download tasks from Xunlei cloud"), 6 | ('list', "list tasks on Xunlei cloud"), 7 | ('add', "add tasks to Xunlei cloud"), 8 | ('delete', "delete tasks from Xunlei cloud"), 9 | ('pause', "pause tasks on Xunlei cloud"), 10 | ('restart', "restart tasks on Xunlei cloud"), 11 | ('rename', "rename task"), 12 | ('readd', "re-add tasks"), 13 | ('config', "save configuration so you don't have to repeat it"), 14 | ('info', "print user id, internal user id, and gdriveid"), 15 | ('logout', "logout from Xunlei cloud"), 16 | ] 17 | 18 | def join_commands(commands): 19 | n = max(len(x[0]) for x in commands) 20 | n = max(n, 10) 21 | return ''.join(' %%-%ds %%s\n' % n % (k, h) for (k, h) in commands) 22 | 23 | basic_usage = '''python lixian_cli.py [] 24 | 25 | Basic commands: 26 | ''' + join_commands(basic_commands) 27 | 28 | extended_usage = '' 29 | 30 | # lx 31 | def usage(): 32 | return basic_usage + ''' 33 | Use 'python lixian_cli.py help' for details. 34 | Use 'python lixian_cli.py help ' for more information on a specific command. 35 | Check https://github.com/iambus/xunlei-lixian for detailed (and Chinese) doc.''' 36 | 37 | # lx xxx 38 | # lx help help 39 | help_help = '''Get helps: 40 | python lixian_cli.py help help 41 | python lixian_cli.py help examples 42 | python lixian_cli.py help readme 43 | python lixian_cli.py help ''' 44 | 45 | # lx xxx 46 | # lx help help 47 | help = help_help 48 | 49 | # lx help 50 | # lx -h 51 | def welcome_help(): 52 | return '''Python script for Xunlei cloud. 53 | 54 | Basic usage: 55 | ''' + basic_usage + extended_usage + '\n' + help_help 56 | 57 | def examples(): 58 | return '''python lixian_cli.py login "Your Xunlei account" "Your password" 59 | python lixian_cli.py login "Your password" 60 | python lixian_cli.py login 61 | 62 | python lixian_cli.py config username "Your Xunlei account" 63 | python lixian_cli.py config password "Your password" 64 | 65 | python lixian_cli.py list 66 | python lixian_cli.py list --completed 67 | python lixian_cli.py list --completed --name --original-url --download-url --no-status --no-id 68 | python lixian_cli.py list --deleted 69 | python lixian_cli.py list --expired 70 | python lixian_cli.py list id1 id2 71 | python lixian_cli.py list zip rar 72 | python lixian_cli.py list 2012.04.04 2012.04.05 73 | 74 | python lixian_cli.py download task-id 75 | python lixian_cli.py download ed2k-url 76 | python lixian_cli.py download --tool=wget ed2k-url 77 | python lixian_cli.py download --tool=asyn ed2k-url 78 | python lixian_cli.py download ed2k-url --output "file to save" 79 | python lixian_cli.py download id1 id2 id3 80 | python lixian_cli.py download url1 url2 url3 81 | python lixian_cli.py download --input download-urls-file 82 | python lixian_cli.py download --input download-urls-file --delete 83 | python lixian_cli.py download --input download-urls-file --output-dir root-dir-to-save-files 84 | python lixian_cli.py download bt://torrent-info-hash 85 | python lixian_cli.py download 1.torrent 86 | python lixian_cli.py download torrent-info-hash 87 | python lixian_cli.py download --bt http://xxx/xxx.torrent 88 | python lixian_cli.py download bt-task-id/file-id 89 | python lixian_cli.py download --all 90 | python lixian_cli.py download mkv 91 | python lixian_cli.py download 2012.04.04 92 | python lixian_cli.py download 0 1 2 93 | python lixian_cli.py download 0-2 94 | 95 | python lixian_cli.py add url 96 | python lixian_cli.py add 1.torrent 97 | python lixian_cli.py add torrent-info-hash 98 | python lixian_cli.py add --bt http://xxx/xxx.torrent 99 | 100 | python lixian_cli.py delete task-id 101 | python lixian_cli.py delete url 102 | python lixian_cli.py delete file-name-on-cloud-to-delete 103 | 104 | python lixian_cli.py pause id 105 | 106 | python lixian_cli.py restart id 107 | 108 | python lixian_cli.py rename id name 109 | 110 | python lixian_cli.py logout 111 | 112 | Please check https://github.com/iambus/xunlei-lixian for detailed (and Chinese) doc. 113 | ''' 114 | 115 | def readme(): 116 | import sys 117 | import os.path 118 | doc = os.path.join(sys.path[0], 'README.md') 119 | with open(doc) as txt: 120 | return txt.read().decode('utf-8') 121 | 122 | 123 | login = '''python lixian_cli.py login 124 | 125 | login Xunlei cloud 126 | 127 | Examples: 128 | python lixian_cli.py login "Your Xunlei account" "Your password" 129 | python lixian_cli.py login "Your password" 130 | python lixian_cli.py login 131 | ''' 132 | 133 | download = '''python lixian_cli.py download [options] [id|url]... 134 | 135 | download tasks from Xunlei cloud 136 | 137 | Options: 138 | --input=[file] -i Download URLs found in file. 139 | --output=[file] -o Download task to file. 140 | --output-dir=[dir] Download task to dir. 141 | --tool=[wget|asyn|aria2|curl] Choose download tool. 142 | Default: wget 143 | --continue -c Continue downloading a partially downloaded file. 144 | Default: false. 145 | --overwrite Overwrite partially downloaded file. 146 | Default: false. 147 | --delete Delete task from Xunlei cloud after download is finished. 148 | Default: false. 149 | --torrent --bt Treat URLs as torrent files 150 | Default: false. 151 | --all Download all tasks. This option will be ignored if specific download URLs or task ids can be found. 152 | Default: false. 153 | --hash When this option is false (--no-hash), never do full hash, but a minimal hash will be performed (supposed to be very fast). 154 | Default: true. 155 | --mini-hash If the target file already exists, and the file size is complete, do a minimal hash (instead of full hash, which would be much more expensive). This is useful when you are resuming a batch download, in this case the previously downloaded and verified files won't be re-verified. 156 | Default: false. 157 | 158 | Examples: 159 | python lixian_cli.py download task-id 160 | python lixian_cli.py download ed2k-url 161 | python lixian_cli.py download --tool=wget ed2k-url 162 | python lixian_cli.py download --tool=asyn ed2k-url 163 | python lixian_cli.py download ed2k-url --output "file to save" 164 | python lixian_cli.py download id1 id2 id3 165 | python lixian_cli.py download url1 url2 url3 166 | python lixian_cli.py download --input download-urls-file 167 | python lixian_cli.py download --input download-urls-file --delete 168 | python lixian_cli.py download --input download-urls-file --output-dir root-dir-to-save-files 169 | python lixian_cli.py download bt://torrent-info-hash 170 | python lixian_cli.py download 1.torrent 171 | python lixian_cli.py download torrent-info-hash 172 | python lixian_cli.py download --bt http://xxx/xxx.torrent 173 | python lixian_cli.py download bt-task-id/file-id 174 | python lixian_cli.py download --all 175 | python lixian_cli.py download mkv 176 | python lixian_cli.py download 2012.04.04 177 | python lixian_cli.py download 0 1 2 178 | python lixian_cli.py download 0-2 179 | ''' 180 | 181 | list = '''python lixian_cli.py list 182 | 183 | list tasks on Xunlei cloud 184 | 185 | Options: 186 | --completed Print only completed tasks. Default: no 187 | --deleted Print only deleted tasks. Default: no 188 | --expired Print only expired tasks. Default: no 189 | --[no]-n Print task sequence number. Default: no 190 | --[no]-id Print task id. Default: yes 191 | --[no]-name Print task name. Default: yes 192 | --[no]-status Print task status. Default: yes 193 | --[no]-size Print task size. Default: no 194 | --[no]-progress Print task progress (in percent). Default: no 195 | --[no]-speed Print task speed. Default: no 196 | --[no]-date Print the date task added. Default: no 197 | --[no]-original-url Print the original URL. Default: no 198 | --[no]-download-url Print the download URL used to download from Xunlei cloud. Default: no 199 | --[no]-format-size Print file size in human readable format. Default: no 200 | --[no]-colors Colorful output. Default: yes 201 | 202 | Examples: 203 | python lixian_cli.py list 204 | python lixian_cli.py list id 205 | python lixian_cli.py list bt-task-id/ 206 | python lixian_cli.py list --completed 207 | python lixian_cli.py list --completed --name --original-url --download-url --no-status --no-id 208 | python lixian_cli.py list --deleted 209 | python lixian_cli.py list --expired 210 | python lixian_cli.py list id1 id2 211 | python lixian_cli.py list zip rar 212 | python lixian_cli.py list 2012.04.04 2012.04.05 213 | ''' 214 | 215 | add = '''python lixian_cli.py add [options] url... 216 | 217 | add tasks to Xunlei cloud 218 | 219 | Options: 220 | --input=[file] Download URLs found in file. 221 | --torrent --bt Treat all arguments as torrent files (e.g. local torrent file, torrent http url, torrent info hash) 222 | Default: false. 223 | 224 | Examples: 225 | python lixian_cli.py add url 226 | python lixian_cli.py add 1.torrent 227 | python lixian_cli.py add torrent-info-hash 228 | python lixian_cli.py add --bt http://xxx/xxx.torrent 229 | ''' 230 | 231 | delete = '''python lixian_cli.py delete [options] [id|url|filename|keyword|date]... 232 | 233 | delete tasks from Xunlei cloud 234 | 235 | Options: 236 | -i prompt before delete 237 | --all delete all tasks if there are multiple matches 238 | 239 | Examples: 240 | python lixian_cli.py delete task-id 241 | python lixian_cli.py delete url 242 | python lixian_cli.py delete file-name-on-cloud-to-delete 243 | ''' 244 | 245 | pause = '''python lixian_cli.py pause [options] [id|url|filename|keyword|date]... 246 | 247 | pause tasks on Xunlei cloud 248 | 249 | Options: 250 | -i prompt before pausing tasks 251 | --all pause all tasks if there are multiple matches 252 | ''' 253 | 254 | restart = '''python lixian_cli.py restart [id|url|filename|keyword|date]... 255 | 256 | restart tasks on Xunlei cloud 257 | 258 | Options: 259 | -i prompt before restart 260 | --all restart all tasks if there are multiple matches 261 | ''' 262 | 263 | rename = '''python lixian_cli.py rename task-id task-name 264 | 265 | rename task 266 | ''' 267 | 268 | readd = '''python lixian_cli.py readd [--deleted|--expired] task-id... 269 | 270 | re-add deleted/expired tasks 271 | 272 | Options: 273 | --deleted re-add deleted tasks 274 | --expired re-add expired tasks 275 | ''' 276 | 277 | config = '''python lixian_cli.py config key [value] 278 | 279 | save configuration so you don't have to repeat it 280 | 281 | Examples: 282 | python lixian_cli.py config username "your xunlei id" 283 | python lixian_cli.py config password "your xunlei password" 284 | python lixian_cli.py config continue 285 | ''' 286 | 287 | info = '''python lixian_cli.py info 288 | 289 | print user id, internal user id, and gdriveid 290 | 291 | Options: 292 | --id -i print user id only 293 | ''' 294 | 295 | logout = '''python lixian_cli.py logout 296 | 297 | logout from Xunlei cloud 298 | ''' 299 | 300 | 301 | -------------------------------------------------------------------------------- /lixian_logging.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['init_logger', 'get_logger'] 3 | 4 | import logging 5 | 6 | INFO = logging.INFO 7 | DEBUG = logging.DEBUG 8 | TRACE = 1 9 | 10 | def file_logger(path, level): 11 | import os.path 12 | path = os.path.expanduser(path) 13 | 14 | logger = logging.getLogger('lixian') 15 | logger.setLevel(min(level, DEBUG)) # if file log is enabled, always log debug message 16 | 17 | handler = logging.FileHandler(filename=path, ) 18 | handler.setFormatter(logging.Formatter('%(asctime)s %(message)s')) 19 | 20 | logger.addHandler(handler) 21 | 22 | return logger 23 | 24 | class ConsoleLogger: 25 | def __init__(self, level=INFO): 26 | self.level = level 27 | def stdout(self, message): 28 | print message 29 | def info(self, message): 30 | if self.level <= INFO: 31 | print message 32 | def debug(self, message): 33 | if self.level <= DEBUG: 34 | print message 35 | def trace(self, message): 36 | pass 37 | 38 | class FileLogger: 39 | def __init__(self, path, level=INFO, file_level=None, console_level=None): 40 | console_level = console_level or level 41 | file_level = file_level or level 42 | self.console = ConsoleLogger(console_level) 43 | self.logger = file_logger(path, file_level) 44 | def stdout(self, message): 45 | self.console.stdout(message) 46 | def info(self, message): 47 | self.console.info(message) 48 | self.logger.info(message) 49 | def debug(self, message): 50 | self.console.debug(message) 51 | self.logger.debug(message) 52 | def trace(self, message): 53 | self.logger.log(level=TRACE, msg=message) 54 | 55 | default_logger = None 56 | 57 | def init_logger(use_colors=True, level=INFO, path=None): 58 | global default_logger 59 | if not default_logger: 60 | if isinstance(level, int): 61 | assert level in (INFO, DEBUG, TRACE) 62 | console_level = level 63 | file_level = level 64 | elif isinstance(level, basestring): 65 | level = level.lower() 66 | if level in ('info', 'debug', 'trace'): 67 | level = {'info': INFO, 'debug': DEBUG, 'trace': TRACE}[level] 68 | console_level = level 69 | file_level = level 70 | else: 71 | console_level = INFO 72 | file_level = DEBUG 73 | for level in level.split(','): 74 | device, level = level.split(':') 75 | if device == 'console': 76 | console_level = {'info': INFO, 'debug': DEBUG, 'trace': TRACE}[level] 77 | elif device == 'file': 78 | file_level = {'info': INFO, 'debug': DEBUG, 'trace': TRACE}[level] 79 | else: 80 | raise NotImplementedError('Invalid logging level: ' + device) 81 | else: 82 | raise NotImplementedError(type(level)) 83 | if path: 84 | default_logger = FileLogger(path, console_level=console_level, file_level=file_level) 85 | else: 86 | default_logger = ConsoleLogger(console_level) 87 | 88 | def get_logger(): 89 | init_logger() 90 | return default_logger 91 | 92 | -------------------------------------------------------------------------------- /lixian_nodes.py: -------------------------------------------------------------------------------- 1 | 2 | import lixian_logging 3 | 4 | import urllib2 5 | import re 6 | 7 | VOD_RANGE = '0-50' 8 | 9 | def resolve_node_url(url, gdriveid, timeout=60): 10 | request = urllib2.Request(url, headers={'Cookie': 'gdriveid=' + gdriveid}) 11 | response = urllib2.urlopen(request, timeout=timeout) 12 | response.close() 13 | return response.geturl() 14 | 15 | def switch_node_in_url(node_url, node): 16 | return re.sub(r'(http://)(vod\d+)(\.t\d+\.lixian\.vip\.xunlei\.com)', r'\1%s\3' % node, node_url) 17 | 18 | 19 | def switch_node(url, node, gdriveid): 20 | assert re.match(r'^vod\d+$', node) 21 | logger = lixian_logging.get_logger() 22 | logger.debug('Download URL: ' + url) 23 | try: 24 | url = resolve_node_url(url, gdriveid, timeout=60) 25 | logger.debug('Resolved URL: ' + url) 26 | except: 27 | import traceback 28 | logger.debug(traceback.format_exc()) 29 | return url 30 | url = switch_node_in_url(url, node) 31 | logger.debug('Switch to node URL: ' + url) 32 | return url 33 | 34 | def test_response_speed(response, max_size, max_duration): 35 | import time 36 | current_duration = 0 37 | current_size = 0 38 | start = time.clock() 39 | while current_duration < max_duration and current_size < max_size: 40 | data = response.read(max_size - current_size) 41 | if not data: 42 | # print "End of file" 43 | break 44 | current_size += len(data) 45 | end = time.clock() 46 | current_duration = end - start 47 | if current_size < 1024: 48 | raise Exception("Sample too small: %d" % current_size) 49 | return current_size / current_duration, current_size, current_duration 50 | 51 | 52 | def get_node_url_speed(url, gdriveid): 53 | request = urllib2.Request(url, headers={'Cookie': 'gdriveid=' + gdriveid}) 54 | response = urllib2.urlopen(request, timeout=3) 55 | speed, size, duration = test_response_speed(response, 2*1000*1000, 3) 56 | response.close() 57 | return speed 58 | 59 | 60 | def parse_vod_nodes(vod_nodes): 61 | if vod_nodes == 'all' or not vod_nodes: 62 | vod_nodes = VOD_RANGE 63 | nodes = [] 64 | # remove duplicate nodes 65 | seen = set() 66 | def add(node): 67 | if node not in seen: 68 | nodes.append(node) 69 | seen.add(node) 70 | for expr in re.split(r'\s*,\s*', vod_nodes): 71 | if re.match(r'^\d+-\d+$', expr): 72 | start, end = map(int, expr.split('-')) 73 | if start <= end: 74 | for i in range(start, end + 1): 75 | add("vod%d" % i) 76 | else: 77 | for i in range(start, end - 1, -1): 78 | add("vod%d" % i) 79 | elif re.match(r'^\d+$', expr): 80 | add('vod'+expr) 81 | else: 82 | raise Exception("Invalid vod expr: " + expr) 83 | return nodes 84 | 85 | def get_best_node_url_from(node_url, nodes, gdriveid): 86 | best = None 87 | best_speed = 0 88 | logger = lixian_logging.get_logger() 89 | for node in nodes: 90 | url = switch_node_in_url(node_url, node) 91 | try: 92 | speed = get_node_url_speed(url, gdriveid) 93 | logger.debug("%s speed: %s" % (node, speed)) 94 | if speed > best_speed: 95 | best_speed = speed 96 | best = url 97 | except Exception, e: 98 | logger.debug("%s error: %s" % (node, e)) 99 | return best 100 | 101 | def get_good_node_url_from(node_url, nodes, acceptable_speed, gdriveid): 102 | best = None 103 | best_speed = 0 104 | logger = lixian_logging.get_logger() 105 | for node in nodes: 106 | url = switch_node_in_url(node_url, node) 107 | try: 108 | speed = get_node_url_speed(url, gdriveid) 109 | logger.debug("%s speed: %s" % (node, speed)) 110 | if speed > acceptable_speed: 111 | return url 112 | elif speed > best_speed: 113 | best_speed = speed 114 | best = url 115 | except Exception, e: 116 | logger.debug("%s error: %s" % (node, e)) 117 | return best 118 | 119 | def use_node_by_policy(url, vod_nodes, gdriveid, policy): 120 | nodes = parse_vod_nodes(vod_nodes) 121 | assert nodes 122 | logger = lixian_logging.get_logger() 123 | logger.debug('Download URL: ' + url) 124 | try: 125 | node_url = resolve_node_url(url, gdriveid, timeout=60) 126 | logger.debug('Resolved URL: ' + node_url) 127 | except: 128 | import traceback 129 | logger.debug(traceback.format_exc()) 130 | return url 131 | default_node = re.match(r'http://(vod\d+)\.', node_url).group(1) 132 | if default_node not in nodes: 133 | nodes.insert(0, default_node) 134 | chosen = policy(node_url, nodes, gdriveid) 135 | if chosen: 136 | logger.debug('Switch to URL: ' + chosen) 137 | return chosen 138 | else: 139 | return node_url 140 | 141 | 142 | def use_fastest_node(url, vod_nodes, gdriveid): 143 | return use_node_by_policy(url, vod_nodes, gdriveid, get_best_node_url_from) 144 | 145 | def use_fast_node(url, vod_nodes, acceptable_speed, gdriveid): 146 | def policy(url, vod_nodes, gdriveid): 147 | return get_good_node_url_from(url, vod_nodes, acceptable_speed, gdriveid) 148 | return use_node_by_policy(url, vod_nodes, gdriveid, policy) 149 | 150 | -------------------------------------------------------------------------------- /lixian_plugins/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | def load_plugins_at(dir): 3 | import os 4 | import os.path 5 | import re 6 | home = os.path.dirname(os.path.dirname(__file__)) 7 | plugin_dir = os.path.join(home, dir.replace('.', '/')) 8 | plugins = os.listdir(plugin_dir) 9 | plugins = [re.sub(r'\.py$', '', p) for p in plugins if re.match(r'^[a-zA-Z]\w*\.py$', p)] 10 | for p in plugins: 11 | __import__(dir + '.' + p) 12 | 13 | def load_plugins(): 14 | load_plugins_at('lixian_plugins.commands') 15 | load_plugins_at('lixian_plugins.queries') 16 | load_plugins_at('lixian_plugins.filters') 17 | load_plugins_at('lixian_plugins.parsers') 18 | load_plugins_at('lixian_plugins') 19 | 20 | load_plugins() 21 | -------------------------------------------------------------------------------- /lixian_plugins/api/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['command', 'register_alias', 3 | 'user_query', 'extract_info_hash_from_url', 'download_torrent_from_url', 4 | 'task_filter', 'name_filter', 5 | 'page_parser'] 6 | 7 | ################################################## 8 | # commands 9 | ################################################## 10 | 11 | from lixian_plugins.commands import command 12 | 13 | ################################################## 14 | # commands 15 | ################################################## 16 | 17 | from lixian_alias import register_alias 18 | 19 | ################################################## 20 | # queries 21 | ################################################## 22 | 23 | from lixian_query import user_query 24 | 25 | def extract_info_hash_from_url(regexp): 26 | import lixian_queries 27 | import re 28 | @user_query 29 | def processor(base, x): 30 | m = re.match(regexp, x) 31 | if m: 32 | return lixian_queries.BtHashQuery(base, m.group(1)) 33 | 34 | def download_torrent_from_url(regexp): 35 | import lixian_queries 36 | import re 37 | @user_query 38 | def processor(base, x): 39 | if re.match(regexp, x): 40 | return lixian_queries.bt_url_processor(base, x) 41 | 42 | ################################################## 43 | # filters 44 | ################################################## 45 | 46 | from lixian_plugins.filters import task_filter 47 | from lixian_plugins.filters import name_filter 48 | 49 | ################################################## 50 | # parsers 51 | ################################################## 52 | 53 | def page_parser(pattern): 54 | def f(extend_links): 55 | import lixian_plugins.parsers 56 | patterns = pattern if type(pattern) is list else [pattern] 57 | for p in patterns: 58 | lixian_plugins.parsers.register_parser(p, extend_links) 59 | return f 60 | 61 | 62 | ################################################## 63 | # download tools 64 | ################################################## 65 | 66 | from lixian_download_tools import download_tool 67 | 68 | 69 | -------------------------------------------------------------------------------- /lixian_plugins/commands/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = [] 3 | 4 | commands = {} 5 | 6 | extended_commands = [] 7 | 8 | def update_helps(commands): 9 | if commands: 10 | import lixian_help 11 | lixian_help.extended_usage = '''\nExtended commands: 12 | ''' + lixian_help.join_commands([(x[0], x[1]) for x in commands]) 13 | 14 | for name, usage, doc in commands: 15 | setattr(lixian_help, name, doc) 16 | 17 | def register_command(command): 18 | extended_commands.append(command) 19 | global commands 20 | commands = dict((x.command_name, x) for x in extended_commands) 21 | update_helps(sorted((x.command_name, x.command_usage, x.command_help) for x in extended_commands)) 22 | 23 | 24 | def command(name='', usage='', help=''): 25 | def as_command(f): 26 | assert usage, 'missing command usage: ' + f.func_name 27 | f.command_name = name or f.func_name.replace('_', '-') 28 | f.command_usage = usage 29 | f.command_help = help or f.func_doc 30 | import textwrap 31 | if f.command_help: 32 | f.command_help = textwrap.dedent(f.command_help) 33 | register_command(f) 34 | return f 35 | return as_command 36 | 37 | -------------------------------------------------------------------------------- /lixian_plugins/commands/aria2.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | from lixian_config import * 5 | from lixian_encoding import default_encoding 6 | from lixian_cli_parser import command_line_parser 7 | from lixian_cli_parser import with_parser 8 | from lixian_cli_parser import command_line_option, command_line_value 9 | from lixian_commands.util import parse_login, create_client 10 | 11 | def export_aria2_conf(args): 12 | client = create_client(args) 13 | import lixian_query 14 | tasks = lixian_query.search_tasks(client, args) 15 | files = [] 16 | for task in tasks: 17 | if task['type'] == 'bt': 18 | subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) 19 | if not subs: 20 | continue 21 | if single_file: 22 | files.append((subs[0]['xunlei_url'], subs[0]['name'], None)) 23 | else: 24 | for f in subs: 25 | files.append((f['xunlei_url'], f['name'], task['name'])) 26 | else: 27 | files.append((task['xunlei_url'], task['name'], None)) 28 | output = '' 29 | for url, name, dir in files: 30 | if type(url) == unicode: 31 | url = url.encode(default_encoding) 32 | output += url + '\n' 33 | output += ' out=' + name.encode(default_encoding) + '\n' 34 | if dir: 35 | output += ' dir=' + dir.encode(default_encoding) + '\n' 36 | output += ' header=Cookie: gdriveid=' + client.get_gdriveid() + '\n' 37 | return output 38 | 39 | @command(usage='export task download urls as aria2 format') 40 | @command_line_parser() 41 | @with_parser(parse_login) 42 | @command_line_option('all') 43 | def export_aria2(args): 44 | ''' 45 | usage: lx export-aria2 [id|name]... 46 | ''' 47 | print export_aria2_conf(args) 48 | 49 | def download_aria2_stdin(aria2_conf, j): 50 | aria2_opts = ['aria2c', '-i', '-', '-j', j] 51 | aria2_opts.extend(get_config('aria2-opts', '').split()) 52 | from subprocess import Popen, PIPE 53 | sub = Popen(aria2_opts, stdin=PIPE, bufsize=1, shell=True) 54 | sub.communicate(aria2_conf) 55 | sub.stdin.close() 56 | exit_code = sub.wait() 57 | if exit_code != 0: 58 | raise Exception('aria2c exited abnormaly') 59 | 60 | def download_aria2_temp(aria2_conf, j): 61 | import tempfile 62 | temp = tempfile.NamedTemporaryFile('w', delete=False) 63 | temp.file.write(aria2_conf) 64 | temp.file.close() 65 | try: 66 | aria2_opts = ['aria2c', '-i', temp.name, '-j', j] 67 | aria2_opts.extend(get_config('aria2-opts', '').split()) 68 | import subprocess 69 | exit_code = subprocess.call(aria2_opts) 70 | finally: 71 | import os 72 | os.unlink(temp.name) 73 | if exit_code != 0: 74 | raise Exception('aria2c exited abnormaly') 75 | 76 | @command(usage='concurrently download tasks in aria2') 77 | @command_line_parser() 78 | @with_parser(parse_login) 79 | @command_line_option('all') 80 | @command_line_value('max-concurrent-downloads', alias='j', default=get_config('aria2-j', '5')) 81 | def download_aria2(args): 82 | ''' 83 | usage: lx download-aria2 -j 5 [id|name]... 84 | ''' 85 | aria2_conf = export_aria2_conf(args) 86 | import platform 87 | if platform.system() == 'Windows': 88 | download_aria2_temp(aria2_conf, args.max_concurrent_downloads) 89 | else: 90 | download_aria2_stdin(aria2_conf, args.max_concurrent_downloads) 91 | 92 | -------------------------------------------------------------------------------- /lixian_plugins/commands/decode_url.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | @command(usage='convert thunder:// (and more) to normal url') 5 | def decode_url(args): 6 | ''' 7 | usage: lx decode-url thunder://... 8 | ''' 9 | from lixian_url import url_unmask 10 | for x in args: 11 | print url_unmask(x) 12 | 13 | -------------------------------------------------------------------------------- /lixian_plugins/commands/diagnostics.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | @command(name='diagnostics', usage='print helpful information for diagnostics') 5 | def lx_diagnostics(args): 6 | ''' 7 | usage: lx diagnostics 8 | ''' 9 | from lixian_encoding import default_encoding 10 | print 'default_encoding ->', default_encoding 11 | import sys 12 | print 'sys.getdefaultencoding() ->', sys.getdefaultencoding() 13 | print 'sys.getfilesystemencoding() ->', sys.getfilesystemencoding() 14 | print r"print u'\u4e2d\u6587'.encode('utf-8') ->", u'\u4e2d\u6587'.encode('utf-8') 15 | print r"print u'\u4e2d\u6587'.encode('gbk') ->", u'\u4e2d\u6587'.encode('gbk') 16 | 17 | -------------------------------------------------------------------------------- /lixian_plugins/commands/echo.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | @command(usage='echo arguments') 5 | def echo(args): 6 | ''' 7 | lx echo ... 8 | ''' 9 | import lixian_cli_parser 10 | print ' '.join(lixian_cli_parser.expand_command_line(args)) 11 | 12 | -------------------------------------------------------------------------------- /lixian_plugins/commands/export_download_urls.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | 5 | from lixian_cli_parser import command_line_parser 6 | from lixian_cli_parser import with_parser 7 | from lixian_cli_parser import command_line_option, command_line_value 8 | from lixian_commands.util import parse_login, create_client 9 | 10 | @command(usage='export task download urls') 11 | @command_line_parser() 12 | @with_parser(parse_login) 13 | @command_line_option('all') 14 | @command_line_value('category') 15 | def export_download_urls(args): 16 | ''' 17 | usage: lx export-download-urls [id|name]... 18 | ''' 19 | assert len(args) or args.all or args.category, 'Not enough arguments' 20 | client = create_client(args) 21 | import lixian_query 22 | tasks = lixian_query.search_tasks(client, args) 23 | urls = [] 24 | for task in tasks: 25 | if task['type'] == 'bt': 26 | subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) 27 | if not subs: 28 | continue 29 | if single_file: 30 | urls.append((subs[0]['xunlei_url'], subs[0]['name'], None)) 31 | else: 32 | for f in subs: 33 | urls.append((f['xunlei_url'], f['name'], task['name'])) 34 | else: 35 | urls.append((task['xunlei_url'], task['name'], None)) 36 | for url, _, _ in urls: 37 | print url 38 | -------------------------------------------------------------------------------- /lixian_plugins/commands/extend_links.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | @command(usage='parse links') 5 | def extend_links(args): 6 | ''' 7 | usage: lx extend-links http://kuai.xunlei.com/d/... http://www.verycd.com/topics/... 8 | 9 | parse and print links from pages 10 | 11 | lx extend-links urls... 12 | lx extend-links --name urls... 13 | ''' 14 | 15 | from lixian_cli_parser import parse_command_line 16 | from lixian_encoding import default_encoding 17 | 18 | args = parse_command_line(args, [], ['name']) 19 | import lixian_plugins.parsers 20 | if args.name: 21 | for x in lixian_plugins.parsers.extend_links_name(args): 22 | print x.encode(default_encoding) 23 | else: 24 | for x in lixian_plugins.parsers.extend_links(args): 25 | print x 26 | 27 | -------------------------------------------------------------------------------- /lixian_plugins/commands/get_torrent.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | from lixian_cli_parser import command_line_parser, command_line_option 5 | from lixian_cli_parser import with_parser 6 | from lixian_cli import parse_login 7 | from lixian_commands.util import create_client 8 | 9 | @command(name='get-torrent', usage='get .torrent by task id or info hash') 10 | @command_line_parser() 11 | @with_parser(parse_login) 12 | @command_line_option('rename', default=True) 13 | def get_torrent(args): 14 | ''' 15 | usage: lx get-torrent [info-hash|task-id]... 16 | ''' 17 | client = create_client(args) 18 | for id in args: 19 | id = id.lower() 20 | import re 21 | if re.match(r'[a-fA-F0-9]{40}$', id): 22 | torrent = client.get_torrent_file_by_info_hash(id) 23 | elif re.match(r'\d+$', id): 24 | import lixian_query 25 | task = lixian_query.get_task_by_id(client, id) 26 | id = task['bt_hash'] 27 | id = id.lower() 28 | torrent = client.get_torrent_file_by_info_hash(id) 29 | else: 30 | raise NotImplementedError() 31 | if args.rename: 32 | import lixian_hash_bt 33 | from lixian_encoding import default_encoding 34 | info = lixian_hash_bt.bdecode(torrent)['info'] 35 | name = info['name'].decode(info.get('encoding', 'utf-8')).encode(default_encoding) 36 | import re 37 | name = re.sub(r'[\\/:*?"<>|]', '-', name) 38 | else: 39 | name = id 40 | path = name + '.torrent' 41 | print path 42 | with open(path, 'wb') as output: 43 | output.write(torrent) 44 | 45 | -------------------------------------------------------------------------------- /lixian_plugins/commands/hash.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | @command(name='hash', usage='compute hashes') 5 | def print_hash(args): 6 | ''' 7 | lx hash --sha1 file... 8 | lx hash --md5 file... 9 | lx hash --md4 file... 10 | lx hash --dcid file... 11 | lx hash --ed2k file... 12 | lx hash --info-hash xxx.torrent... 13 | lx hash --verify-sha1 file hash 14 | lx hash --verify-md5 file hash 15 | lx hash --verify-md4 file hash 16 | lx hash --verify-dcid file hash 17 | lx hash --verify-ed2k file ed2k://... 18 | lx hash --verify-bt file xxx.torrent 19 | ''' 20 | #assert len(args) == 1 21 | import lixian_hash 22 | #import lixian_hash_ed2k 23 | #print 'ed2k:', lixian_hash_ed2k.hash_file(args[0]) 24 | #print 'dcid:', lixian_hash.dcid_hash_file(args[0]) 25 | import lixian_cli_parser 26 | lixian_hash.main(lixian_cli_parser.expand_command_line(args)) 27 | 28 | -------------------------------------------------------------------------------- /lixian_plugins/commands/kuai.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | @command(usage='parse links from kuai.xunlei.com') 5 | def kuai(args): 6 | ''' 7 | usage: lx kuai http://kuai.xunlei.com/d/xxx... 8 | 9 | Note that you can simply use: 10 | lx add http://kuai.xunlei.com/d/xxx... 11 | or: 12 | lx download http://kuai.xunlei.com/d/xxx... 13 | ''' 14 | import lixian_kuai 15 | lixian_kuai.main(args) 16 | 17 | -------------------------------------------------------------------------------- /lixian_plugins/commands/list_torrent.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import command 3 | 4 | from lixian_cli_parser import parse_command_line 5 | from lixian_config import get_config 6 | from lixian_encoding import default_encoding 7 | 8 | def b_encoding(b): 9 | if 'encoding' in b: 10 | return b['encoding'] 11 | if 'codepage' in b: 12 | return 'cp' + str(b['codepage']) 13 | return 'utf-8' 14 | 15 | def b_name(info, encoding='utf-8'): 16 | if 'name.utf-8' in info: 17 | return info['name.utf-8'].decode('utf-8') 18 | return info['name'].decode(encoding) 19 | 20 | def b_path(f, encoding='utf-8'): 21 | if 'path.utf-8' in f: 22 | return [p.decode('utf-8') for p in f['path.utf-8']] 23 | return [p.decode(encoding) for p in f['path']] 24 | 25 | @command(usage='list files in local .torrent') 26 | def list_torrent(args): 27 | ''' 28 | usage: lx list-torrent [--size] xxx.torrent... 29 | ''' 30 | args = parse_command_line(args, [], ['size'], default={'size':get_config('size')}) 31 | torrents = args 32 | if not torrents: 33 | from glob import glob 34 | torrents = glob('*.torrent') 35 | if not torrents: 36 | raise Exception('No .torrent file found') 37 | for p in torrents: 38 | with open(p, 'rb') as stream: 39 | from lixian_hash_bt import bdecode 40 | b = bdecode(stream.read()) 41 | encoding = b_encoding(b) 42 | info = b['info'] 43 | from lixian_util import format_size 44 | if args.size: 45 | size = sum(f['length'] for f in info['files']) if 'files' in info else info['length'] 46 | print '*', b_name(info, encoding).encode(default_encoding), format_size(size) 47 | else: 48 | print '*', b_name(info, encoding).encode(default_encoding) 49 | if 'files' in info: 50 | for f in info['files']: 51 | if f['path'][0].startswith('_____padding_file_'): 52 | continue 53 | path = '/'.join(b_path(f, encoding)).encode(default_encoding) 54 | if args.size: 55 | print '%s (%s)' % (path, format_size(f['length'])) 56 | else: 57 | print path 58 | else: 59 | path = b_name(info, encoding).encode(default_encoding) 60 | if args.size: 61 | from lixian_util import format_size 62 | print '%s (%s)' % (path, format_size(info['length'])) 63 | else: 64 | print path 65 | 66 | -------------------------------------------------------------------------------- /lixian_plugins/commands/speed_test.py: -------------------------------------------------------------------------------- 1 | from lixian_plugins.api import command 2 | 3 | 4 | from lixian_cli_parser import command_line_parser 5 | from lixian_cli_parser import with_parser 6 | from lixian_cli_parser import command_line_option, command_line_value 7 | from lixian_commands.util import parse_login, parse_colors, create_client 8 | from lixian_config import get_config 9 | 10 | from lixian_encoding import default_encoding 11 | from lixian_colors import colors 12 | 13 | import lixian_nodes 14 | 15 | @command(usage='test download speed from multiple vod nodes') 16 | @command_line_parser() 17 | @with_parser(parse_login) 18 | @with_parser(parse_colors) 19 | @command_line_value('vod-nodes', default=get_config('vod-nodes', lixian_nodes.VOD_RANGE)) 20 | def speed_test(args): 21 | ''' 22 | usage: lx speed_test [--vod-nodes=0-50] [id|name] 23 | ''' 24 | assert len(args) 25 | client = create_client(args) 26 | import lixian_query 27 | tasks = lixian_query.search_tasks(client, args) 28 | if not tasks: 29 | raise Exception('No task found') 30 | task = tasks[0] 31 | urls = [] 32 | if task['type'] == 'bt': 33 | subs, skipped, single_file = lixian_query.expand_bt_sub_tasks(task) 34 | if not subs: 35 | raise Exception('No files found') 36 | subs = [f for f in subs if f['size'] > 1000*1000] or subs # skip files with length < 1M 37 | if single_file: 38 | urls.append((subs[0]['xunlei_url'], subs[0]['name'], None)) 39 | else: 40 | for f in subs: 41 | urls.append((f['xunlei_url'], f['name'], task['name'])) 42 | else: 43 | urls.append((task['xunlei_url'], task['name'], None)) 44 | url, filename, dirname = urls[0] 45 | name = dirname + '/' + filename if dirname else filename 46 | test_file(client, url, name, args) 47 | 48 | def test_file(client, url, name, options): 49 | with colors(options.colors).cyan(): 50 | print name.encode(default_encoding) 51 | # print 'File:', name.encode(default_encoding) 52 | # print 'Address:', url 53 | node_url = lixian_nodes.resolve_node_url(url, client.get_gdriveid(), timeout=3) 54 | # print 'Node:', node_url 55 | test_nodes(node_url, client.get_gdriveid(), options) 56 | 57 | def test_nodes(node_url, gdriveid, options): 58 | nodes = lixian_nodes.parse_vod_nodes(options.vod_nodes) 59 | best = None 60 | best_speed = 0 61 | for node in nodes: 62 | # print 'Node:', node 63 | url = lixian_nodes.switch_node_in_url(node_url, node) 64 | try: 65 | speed = lixian_nodes.get_node_url_speed(url, gdriveid) 66 | if best_speed < speed: 67 | best = node 68 | best_speed = speed 69 | kb = int(speed/1000) 70 | # print 'Speed: %dKB/s' % kb, '.' * (kb /100) 71 | show_node_speed(node, kb, options) 72 | except Exception, e: 73 | show_node_error(node, e, options) 74 | if best: 75 | with colors(options.colors).green(): 76 | print best, 77 | print "is the fastest node!" 78 | 79 | def show_node_speed(node, kb, options): 80 | node = "%-5s " % node 81 | speed = '%dKB/s' % kb 82 | bar = '.' * (kb /100) 83 | whitespaces = ' ' * (79 - len(node) - len(bar) - len(speed)) 84 | if kb >= 1000: 85 | with colors(options.colors).green(): 86 | # print node + bar + whitespaces + speed 87 | with colors(options.colors).bold(): 88 | print node[:-1], 89 | print bar + whitespaces + speed 90 | else: 91 | print node + bar + whitespaces + speed 92 | 93 | def show_node_error(node, e, options): 94 | with colors(options.colors).red(): 95 | print "%-5s %s" % (node, e) 96 | 97 | -------------------------------------------------------------------------------- /lixian_plugins/filters/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | import re 3 | 4 | name_filters = {} 5 | task_filters = {} 6 | 7 | def find_matcher(keyword, filters): 8 | for p in filters: 9 | if re.search(p, keyword): 10 | return filters[p] 11 | 12 | def has_task_filter(keyword): 13 | return bool(find_matcher(keyword, task_filters)) 14 | 15 | def filter_tasks_with_matcher(tasks, keyword, (mode, m)): 16 | if mode == 'single': 17 | return filter(lambda x: m(keyword, x), tasks) 18 | elif mode == 'batch': 19 | return m(keyword, tasks) 20 | else: 21 | raise NotImplementedError(mode) 22 | 23 | def filter_tasks(tasks, keyword): 24 | m = find_matcher(keyword, task_filters) 25 | if m: 26 | return filter_tasks_with_matcher(tasks, keyword, m) 27 | 28 | def filter_things(things, keyword): 29 | if not things: 30 | # XXX: neither None or things should be OK 31 | return things 32 | assert len(set(map(type, things))) == 1 33 | filters = task_filters if type(things[0]) == dict else name_filters 34 | m = find_matcher(keyword, filters) 35 | if m: 36 | return filter_tasks_with_matcher(things, keyword, m) 37 | 38 | def define_task_filter(pattern, matcher, batch=False): 39 | task_filters[pattern] = ('batch' if batch else 'single', matcher) 40 | 41 | def define_name_filter(pattern, matcher): 42 | name_filters[pattern] = ('single', matcher) 43 | task_filters[pattern] = ('single', lambda k, x: matcher(k, x['name'])) 44 | 45 | def task_filter(pattern=None, protocol=None, batch=False): 46 | assert bool(pattern) ^ bool(protocol) 47 | def define_filter(matcher): 48 | if pattern: 49 | define_task_filter(pattern, matcher, batch) 50 | else: 51 | assert re.match(r'^[\w-]+$', protocol), protocol 52 | define_task_filter(r'^%s:' % protocol, lambda k, x: matcher(re.sub(r'^[\w-]+:', '', k), x), batch) 53 | return matcher 54 | return define_filter 55 | 56 | def name_filter(pattern=None, protocol=None): 57 | # FIXME: duplicate code 58 | assert bool(pattern) ^ bool(protocol) 59 | def define_filter(matcher): 60 | if pattern: 61 | define_name_filter(pattern, matcher) 62 | else: 63 | assert re.match(r'^\w+$', protocol), protocol 64 | define_name_filter(r'^%s:' % protocol, lambda k, x: matcher(re.sub(r'^\w+:', '', k), x)) 65 | return matcher 66 | return define_filter 67 | 68 | -------------------------------------------------------------------------------- /lixian_plugins/filters/date.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import task_filter 3 | 4 | @task_filter(pattern=r'^\d{4}[-.]\d{2}[-.]\d{2}$') 5 | def filter_by_date(keyword, task): 6 | return task['date'] == keyword.replace('-', '.') 7 | 8 | -------------------------------------------------------------------------------- /lixian_plugins/filters/name.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import name_filter 3 | 4 | @name_filter(protocol='name') 5 | def filter_by_raw_text(keyword, name): 6 | return keyword.lower() in name.lower() 7 | 8 | -------------------------------------------------------------------------------- /lixian_plugins/filters/raw.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import name_filter 3 | 4 | @name_filter(protocol='raw') 5 | def filter_by_raw_text(keyword, name): 6 | return keyword.lower() in name.lower() 7 | 8 | -------------------------------------------------------------------------------- /lixian_plugins/filters/regexp.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import name_filter 3 | 4 | import re 5 | 6 | @name_filter(protocol='regexp') 7 | def filter_by_regexp(keyword, name): 8 | return re.search(keyword, name) 9 | -------------------------------------------------------------------------------- /lixian_plugins/filters/size.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import task_filter 3 | 4 | import re 5 | 6 | @task_filter(protocol='size') 7 | def filter_by_size(keyword, task): 8 | ''' 9 | Example: 10 | lx download size:10m- 11 | lx download size:1G+ 12 | lx download 0/size:1g- 13 | ''' 14 | m = re.match(r'^([<>])?(\d+(?:\.\d+)?)([GM])?([+-])?$', keyword, flags=re.I) 15 | assert m, keyword 16 | less_or_great, n, u, less_or_more = m.groups() 17 | assert bool(less_or_great) ^ bool(less_or_more), 'must bt size, size-, or size+' 18 | size = float(n) * {None: 1, 'G': 1000**3, 'g': 1000**3, 'M': 1000**2, 'm': 1000**2}[u] 19 | if less_or_great == '<' or less_or_more == '-': 20 | return task['size'] < size 21 | else: 22 | return task['size'] > size 23 | 24 | -------------------------------------------------------------------------------- /lixian_plugins/filters/sort.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import task_filter 3 | 4 | @task_filter(protocol='sort', batch=True) 5 | def sort_by_name(keyword, tasks): 6 | ''' 7 | Example: 8 | lx list sort: 9 | lx download 0/sort:/[0-1] 10 | ''' 11 | return sorted(tasks, key=lambda x: x['name']) 12 | -------------------------------------------------------------------------------- /lixian_plugins/filters/total_size.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import task_filter 3 | 4 | import re 5 | 6 | @task_filter(protocol='total-size', batch=True) 7 | def fetch_by_total_size(keyword, tasks): 8 | ''' 9 | Example: 10 | lx download total_size:1g 11 | lx download 0/total_size:1g 12 | lx list total_size:1g 13 | ''' 14 | m = re.match(r'^(\d+(?:\.\d+)?)([GM])?$', keyword, flags=re.I) 15 | assert m, keyword 16 | n, u = m.groups() 17 | limit = float(n) * {None: 1, 'G': 1000**3, 'g': 1000**3, 'M': 1000**2, 'm': 1000**2}[u] 18 | total = 0 19 | results = [] 20 | for t in tasks: 21 | total += t['size'] 22 | if total <= limit: 23 | results.append(t) 24 | else: 25 | return results 26 | return results 27 | 28 | -------------------------------------------------------------------------------- /lixian_plugins/parsers/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | import re 3 | 4 | page_parsers = {} 5 | 6 | def register_parser(site, extend_link): 7 | page_parsers[site] = extend_link 8 | 9 | 10 | def in_site(url, site): 11 | if url.startswith(site): 12 | return True 13 | if '*' in site: 14 | import fnmatch 15 | p = fnmatch.translate(site) 16 | return re.match(p, url) 17 | 18 | def find_parser(link): 19 | for p in page_parsers: 20 | if in_site(link, p): 21 | return page_parsers[p] 22 | 23 | 24 | def to_name(x): 25 | if type(x) == dict: 26 | return x['name'] 27 | else: 28 | return x 29 | 30 | def to_url(x): 31 | if type(x) == dict: 32 | return x['url'] 33 | else: 34 | return x 35 | 36 | def parse_pattern(link): 37 | m = re.search(r'[^:]//', link) 38 | if m: 39 | u = link[:m.start()+1] 40 | p = link[m.start()+3:] 41 | assert '//' not in p, link 42 | if p.endswith('/'): 43 | u += '/' 44 | p = p[:-1] 45 | return u, p 46 | 47 | def try_to_extend_link(link): 48 | parser = find_parser(link) 49 | if parser: 50 | x = parse_pattern(link) 51 | if x: 52 | links = parser(x[0]) 53 | import lixian_filter_expr 54 | return lixian_filter_expr.filter_expr(links, x[1]) 55 | else: 56 | return parser(link) 57 | 58 | def extend_link(link): 59 | return try_to_extend_link(link) or [link] 60 | 61 | def extend_links_rich(links): 62 | return sum(map(extend_link, links), []) 63 | 64 | def extend_links(links): 65 | return map(to_url, extend_links_rich(links)) 66 | 67 | def extend_links_name(links): 68 | return map(to_name, extend_links_rich(links)) 69 | 70 | -------------------------------------------------------------------------------- /lixian_plugins/parsers/icili.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import page_parser 3 | 4 | import urllib2 5 | import re 6 | 7 | def icili_links(url): 8 | assert url.startswith('http://www.icili.com/emule/download/'), url 9 | html = urllib2.urlopen(url).read() 10 | table = re.search(r'.*?
', html, flags=re.S).group() 11 | links = re.findall(r'value="(ed2k://[^"]+)"', table) 12 | return links 13 | 14 | @page_parser('http://www.icili.com/emule/download/') 15 | def extend_link(url): 16 | links = icili_links(url) 17 | from lixian_hash_ed2k import parse_ed2k_file 18 | return [{'url':x, 'name':parse_ed2k_file(x)} for x in links] 19 | 20 | -------------------------------------------------------------------------------- /lixian_plugins/parsers/kuai.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import page_parser 3 | 4 | import urllib 5 | import re 6 | 7 | def generate_lixian_url(info): 8 | print info['url'] 9 | info = dict(info) 10 | info['namehex'] = '0102' 11 | info['fid'] = re.search(r'fid=([^&]+)', info['url']).group(1) 12 | info['tid'] = re.search(r'tid=([^&]+)', info['url']).group(1) 13 | info['internalid'] = '111' 14 | info['taskid'] = 'xxx' 15 | return 'http://gdl.lixian.vip.xunlei.com/download?fid=%(fid)s&mid=666&threshold=150&tid=%(tid)s&srcid=4&verno=1&g=%(gcid)s&scn=t16&i=%(gcid)s&t=1&ui=%(internalid)s&ti=%(taskid)s&s=%(size)s&m=0&n=%(namehex)s' % info 16 | 17 | def parse_link(html): 18 | attrs = dict(re.findall(r'(\w+)="([^"]+)"', html)) 19 | if 'file_url' not in attrs: 20 | return 21 | keys = {'url': 'file_url', 'name':'file_name', 'size':'file_size', 'gcid':'gcid', 'cid':'cid', 'gcid_resid':'gcid_resid'} 22 | info = {} 23 | for k in keys: 24 | info[k] = attrs[keys[k]] 25 | #info['name'] = urllib.unquote(info['name']) 26 | return info 27 | 28 | @page_parser('http://kuai.xunlei.com/d/') 29 | def kuai_links(url): 30 | assert url.startswith('http://kuai.xunlei.com/d/'), url 31 | html = urllib.urlopen(url).read().decode('utf-8') 32 | #return re.findall(r'file_url="([^"]+)"', html) 33 | #return map(parse_link, re.findall(r'', html, flags=re.S)) 34 | return filter(bool, map(parse_link, re.findall(r'.*?', html, flags=re.S))) 35 | 36 | extend_link = kuai_links 37 | 38 | def main(args): 39 | from lixian_cli_parser import parse_command_line 40 | args = parse_command_line(args, [], ['name']) 41 | for x in args: 42 | for v in kuai_links(x): 43 | if args.name: 44 | print v['name'] 45 | else: 46 | print v['url'] 47 | 48 | 49 | if __name__ == '__main__': 50 | import sys 51 | main(sys.argv[1:]) 52 | 53 | -------------------------------------------------------------------------------- /lixian_plugins/parsers/qjwm.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import page_parser 3 | 4 | import urllib2 5 | import re 6 | 7 | def qjwm_link(url): 8 | assert re.match(r'http://.*\.qjwm\.com/down(load)?_\d+.html', url) 9 | url = url.replace('/down_', '/download_') 10 | html = urllib2.urlopen(url).read() 11 | m = re.search(r'var thunder_url = "([^"]+)";', html) 12 | if m: 13 | url = m.group(1) 14 | url = url.decode('gbk') 15 | return url 16 | 17 | 18 | @page_parser('http://*.qjwm.com/*') 19 | def extend_link(url): 20 | url = qjwm_link(url) 21 | return url and [url] or [] 22 | 23 | -------------------------------------------------------------------------------- /lixian_plugins/parsers/simplecd.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import page_parser 3 | 4 | import urllib2 5 | import re 6 | 7 | 8 | def simplecd_links(url): 9 | m = re.match(r'(http://(?:www\.)?s[ia]mplecd\.\w+/)(id|entry)/', url) 10 | assert m, url 11 | site = m.group(1) 12 | html = urllib2.urlopen(url).read() 13 | ids = re.findall(r'value="(\w+)"\s+name="selectemule"', html) 14 | form = '&'.join('rid=' + id for id in ids) 15 | q = 'mode=copy&' + form 16 | html = urllib2.urlopen(site + 'download/?' + q).read() 17 | table = re.search(r'', html, flags=re.S).group() 18 | links = re.findall(r'ed2k://[^\s<>]+', table) 19 | import lixian_url 20 | return map(lixian_url.normalize_unicode_link, links) 21 | 22 | @page_parser(['http://simplecd.*/', 23 | 'http://www.simplecd.*/', 24 | 'http://samplecd.*/', 25 | 'http://www.samplecd.*/']) 26 | def extend_link(url): 27 | links = simplecd_links(url) 28 | from lixian_hash_ed2k import parse_ed2k_file 29 | return [{'url':x, 'name':parse_ed2k_file(x)} for x in links] 30 | 31 | -------------------------------------------------------------------------------- /lixian_plugins/parsers/verycd.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import page_parser 3 | 4 | import urllib2 5 | import re 6 | 7 | def parse_links(html): 8 | html = re.search(r'.*?', html, re.S).group() 9 | links = re.findall(r'value="([^"]+)"', html) 10 | return [x for x in links if x.startswith('ed2k://')] 11 | 12 | def verycd_links(url): 13 | assert url.startswith('http://www.verycd.com/topics/'), url 14 | return parse_links(urllib2.urlopen(url).read()) 15 | 16 | @page_parser('http://www.verycd.com/topics/') 17 | def extend_link(url): 18 | links = verycd_links(url) 19 | from lixian_hash_ed2k import parse_ed2k_file 20 | return [{'url':x, 'name':parse_ed2k_file(x)} for x in links] 21 | 22 | -------------------------------------------------------------------------------- /lixian_plugins/queries/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /lixian_plugins/queries/torrentz.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_plugins.api import extract_info_hash_from_url 3 | 4 | extract_info_hash_from_url(r'^http://torrentz.eu/([0-9a-f]{40})$') 5 | 6 | -------------------------------------------------------------------------------- /lixian_progress.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | 4 | class SimpleProgressBar: 5 | def __init__(self): 6 | self.displayed = False 7 | def update(self, percent): 8 | self.displayed = True 9 | bar_size = 40 10 | percent *= 100.0 11 | if percent > 100: 12 | percent = 100.0 13 | dots = int(bar_size * percent / 100) 14 | plus = percent / 100 * bar_size - dots 15 | if plus > 0.8: 16 | plus = '=' 17 | elif plus > 0.4: 18 | plus = '-' 19 | else: 20 | plus = '' 21 | percent = int(percent) 22 | bar = '=' * dots + plus 23 | bar = '{0:>3}%[{1:<40}]'.format(percent, bar) 24 | sys.stdout.write('\r'+bar) 25 | sys.stdout.flush() 26 | def done(self): 27 | if self.displayed: 28 | print 29 | self.displayed = False 30 | 31 | -------------------------------------------------------------------------------- /lixian_queries.py: -------------------------------------------------------------------------------- 1 | 2 | from lixian_query import ExactQuery 3 | from lixian_query import SearchQuery 4 | from lixian_query import query 5 | from lixian_query import bt_query 6 | 7 | import lixian_hash_bt 8 | import lixian_url 9 | import lixian_encoding 10 | 11 | import re 12 | 13 | ################################################## 14 | # queries 15 | ################################################## 16 | 17 | class SingleTaskQuery(ExactQuery): 18 | def __init__(self, base, t): 19 | super(SingleTaskQuery, self).__init__(base) 20 | self.id = t['id'] 21 | 22 | def query_once(self): 23 | return [self.base.get_task_by_id(self.id)] 24 | 25 | def query_search(self): 26 | t = self.base.find_task_by_id(self.id) 27 | return [t] if t else [] 28 | 29 | 30 | @query(priority=1) 31 | @bt_query(priority=1) 32 | def single_id_processor(base, x): 33 | if not re.match(r'^\d+/?$', x): 34 | return 35 | n = x.rstrip('/') 36 | t = base.find_task_by_id(n) 37 | if t: 38 | return SingleTaskQuery(base, t) 39 | 40 | ################################################## 41 | 42 | class MultipleTasksQuery(ExactQuery): 43 | def __init__(self, base, tasks): 44 | super(MultipleTasksQuery, self).__init__(base) 45 | self.tasks = tasks 46 | 47 | def query_once(self): 48 | return map(self.base.get_task_by_id, (t['id'] for t in self.tasks)) 49 | 50 | def query_search(self): 51 | return filter(bool, map(self.base.find_task_by_id, (t['id'] for t in self.tasks))) 52 | 53 | @query(priority=1) 54 | @bt_query(priority=1) 55 | def range_id_processor(base, x): 56 | m = re.match(r'^(\d+)-(\d+)$', x) 57 | if not m: 58 | return 59 | begin = int(m.group(1)) 60 | end = int(m.group(2)) 61 | tasks = base.get_tasks() 62 | if begin <= end: 63 | found = filter(lambda x: begin <= x['#'] <= end, tasks) 64 | else: 65 | found = reversed(filter(lambda x: end <= x['#'] <= begin, tasks)) 66 | if found: 67 | return MultipleTasksQuery(base, found) 68 | 69 | ################################################## 70 | 71 | class SubTaskQuery(ExactQuery): 72 | def __init__(self, base, t, subs): 73 | super(SubTaskQuery, self).__init__(base) 74 | self.task = t 75 | self.subs = subs 76 | 77 | def query_once(self): 78 | task = dict(self.base.get_task_by_id(self.task['id'])) 79 | files = self.base.get_files(task) 80 | task['files'] = self.subs 81 | return [task] 82 | 83 | def query_search(self): 84 | task = self.base.find_task_by_id(self.task['id']) 85 | if not task: 86 | return [] 87 | task = dict(task) 88 | files = self.base.get_files(task) 89 | task['files'] = self.subs 90 | return [task] 91 | 92 | @query(priority=2) 93 | @bt_query(priority=2) 94 | def sub_id_processor(base, x): 95 | x = lixian_encoding.from_native(x) 96 | 97 | m = re.match(r'^(\d+)/(.+)$', x) 98 | if not m: 99 | return 100 | task_id, sub_id = m.groups() 101 | task = base.find_task_by_id(task_id) 102 | if not task: 103 | return 104 | 105 | assert task['type'] == 'bt', 'task %s is not a bt task' % lixian_encoding.to_native(task['name']) 106 | files = base.get_files(task) 107 | import lixian_filter_expr 108 | files = lixian_filter_expr.filter_expr(files, sub_id) 109 | subs = [x for x in files] 110 | return SubTaskQuery(base, task, subs) 111 | 112 | ################################################## 113 | 114 | class BtHashQuery(ExactQuery): 115 | def __init__(self, base, x): 116 | super(BtHashQuery, self).__init__(base) 117 | self.hash = re.match(r'^(?:bt://)?([0-9a-f]{40})$', x, flags=re.I).group(1).lower() 118 | self.task = self.base.find_task_by_hash(self.hash) 119 | 120 | def prepare(self): 121 | if not self.task: 122 | self.base.add_bt_task_by_hash(self.hash) 123 | 124 | def query_once(self): 125 | t = self.base.find_task_by_hash(self.hash) 126 | assert t, 'Task not found: bt://' + self.hash 127 | return [t] 128 | 129 | def query_search(self): 130 | t = self.base.find_task_by_hash(self.hash) 131 | return [t] if t else [] 132 | 133 | @query(priority=1) 134 | @bt_query(priority=1) 135 | def bt_hash_processor(base, x): 136 | if re.match(r'^(bt://)?[0-9a-f]{40}$', x, flags=re.I): 137 | return BtHashQuery(base, x) 138 | 139 | ################################################## 140 | 141 | class LocalBtQuery(ExactQuery): 142 | def __init__(self, base, x): 143 | super(LocalBtQuery, self).__init__(base) 144 | self.path = x 145 | self.hash = lixian_hash_bt.info_hash(self.path) 146 | self.task = self.base.find_task_by_hash(self.hash) 147 | with open(self.path, 'rb') as stream: 148 | self.torrent = stream.read() 149 | 150 | def prepare(self): 151 | if not self.task: 152 | self.base.add_bt_task_by_content(self.torrent, self.path) 153 | 154 | def query_once(self): 155 | t = self.base.find_task_by_hash(self.hash) 156 | assert t, 'Task not found: bt://' + self.hash 157 | return [t] 158 | 159 | def query_search(self): 160 | t = self.base.find_task_by_hash(self.hash) 161 | return [t] if t else [] 162 | 163 | @query(priority=1) 164 | @bt_query(priority=1) 165 | def local_bt_processor(base, x): 166 | import os.path 167 | if x.lower().endswith('.torrent') and os.path.exists(x): 168 | return LocalBtQuery(base, x) 169 | 170 | ################################################## 171 | 172 | class MagnetQuery(ExactQuery): 173 | def __init__(self, base, x): 174 | super(MagnetQuery, self).__init__(base) 175 | self.url = x 176 | self.hash = lixian_hash_bt.magnet_to_infohash(x).encode('hex').lower() 177 | self.task = self.base.find_task_by_hash(self.hash) 178 | 179 | def prepare(self): 180 | if not self.task: 181 | self.base.add_magnet_task(self.url) 182 | 183 | def query_once(self): 184 | t = self.base.find_task_by_hash(self.hash) 185 | assert t, 'Task not found: bt://' + self.hash 186 | return [t] 187 | 188 | def query_search(self): 189 | t = self.base.find_task_by_hash(self.hash) 190 | return [t] if t else [] 191 | 192 | @query(priority=4) 193 | @bt_query(priority=4) 194 | def magnet_processor(base, url): 195 | if re.match(r'magnet:', url): 196 | return MagnetQuery(base, url) 197 | 198 | ################################################## 199 | 200 | class BatchUrlsQuery(ExactQuery): 201 | def __init__(self, base, urls): 202 | super(BatchUrlsQuery, self).__init__(base) 203 | self.urls = urls 204 | 205 | def prepare(self): 206 | for url in self.urls: 207 | if not self.base.find_task_by_url(url): 208 | self.base.add_url_task(url) 209 | 210 | def query_once(self): 211 | return map(self.base.get_task_by_url, self.urls) 212 | 213 | def query_search(self): 214 | return filter(bool, map(self.base.find_task_by_url, self.urls)) 215 | 216 | @query(priority=6) 217 | @bt_query(priority=6) 218 | def url_extend_processor(base, url): 219 | import lixian_plugins.parsers 220 | extended = lixian_plugins.parsers.try_to_extend_link(url) 221 | if extended: 222 | extended = map(lixian_plugins.parsers.to_url, extended) 223 | return BatchUrlsQuery(base, extended) 224 | 225 | ################################################## 226 | 227 | class UrlQuery(ExactQuery): 228 | def __init__(self, base, x): 229 | super(UrlQuery, self).__init__(base) 230 | self.url = lixian_url.url_unmask(x) 231 | self.task = self.base.find_task_by_url(self.url) 232 | 233 | def prepare(self): 234 | if not self.task: 235 | self.base.add_url_task(self.url) 236 | 237 | def query_once(self): 238 | t = self.base.find_task_by_url(self.url) 239 | assert t, 'Task not found: ' + self.url 240 | return [t] 241 | 242 | def query_search(self): 243 | t = self.base.find_task_by_url(self.url) 244 | return [t] if t else [] 245 | 246 | @query(priority=7) 247 | def url_processor(base, url): 248 | if re.match(r'\w+://', url): 249 | return UrlQuery(base, url) 250 | 251 | ################################################## 252 | 253 | class BtUrlQuery(ExactQuery): 254 | def __init__(self, base, url, torrent): 255 | super(BtUrlQuery, self).__init__(base) 256 | self.url = url 257 | self.torrent = torrent 258 | self.hash = lixian_hash_bt.info_hash_from_content(self.torrent) 259 | self.task = self.base.find_task_by_hash(self.hash) 260 | 261 | def prepare(self): 262 | if not self.task: 263 | self.base.add_bt_task_by_content(self.torrent, self.url) 264 | 265 | def query_once(self): 266 | t = self.base.find_task_by_hash(self.hash) 267 | assert t, 'Task not found: bt://' + self.hash 268 | return [t] 269 | 270 | def query_search(self): 271 | t = self.base.find_task_by_hash(self.hash) 272 | return [t] if t else [] 273 | 274 | @bt_query(priority=7) 275 | def bt_url_processor(base, url): 276 | if not re.match(r'http://', url): 277 | return 278 | print 'Downloading torrent file from', url 279 | import urllib2 280 | response = urllib2.urlopen(url, timeout=60) 281 | torrent = response.read() 282 | if response.info().get('Content-Encoding') == 'gzip': 283 | def ungzip(s): 284 | from StringIO import StringIO 285 | import gzip 286 | buffer = StringIO(s) 287 | f = gzip.GzipFile(fileobj=buffer) 288 | return f.read() 289 | torrent = ungzip(torrent) 290 | return BtUrlQuery(base, url, torrent) 291 | 292 | ################################################## 293 | 294 | class FilterQuery(SearchQuery): 295 | def __init__(self, base, x): 296 | super(FilterQuery, self).__init__(base) 297 | self.keyword = x 298 | 299 | def query_search(self): 300 | import lixian_plugins.filters 301 | tasks = lixian_plugins.filters.filter_tasks(self.base.get_tasks(), self.keyword) 302 | assert tasks is not None 303 | return tasks 304 | 305 | @query(priority=8) 306 | @bt_query(priority=8) 307 | def filter_processor(base, x): 308 | import lixian_plugins.filters 309 | if lixian_plugins.filters.has_task_filter(x): 310 | return FilterQuery(base, x) 311 | 312 | ################################################## 313 | 314 | class DefaultQuery(SearchQuery): 315 | def __init__(self, base, x): 316 | super(DefaultQuery, self).__init__(base) 317 | self.text = lixian_encoding.from_native(x) 318 | 319 | def query_search(self): 320 | return filter(lambda t: t['name'].lower().find(self.text.lower()) != -1, self.base.get_tasks()) 321 | 322 | @query(priority=9) 323 | @bt_query(priority=9) 324 | def default_processor(base, x): 325 | return DefaultQuery(base, x) 326 | 327 | -------------------------------------------------------------------------------- /lixian_query.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['query', 'bt_query', 'user_query', 'Query', 'ExactQuery', 'SearchQuery', 3 | 'build_query', 'find_tasks_to_download', 'search_tasks', 'expand_bt_sub_tasks'] 4 | 5 | import lixian_hash_bt 6 | import lixian_hash_ed2k 7 | import lixian_encoding 8 | 9 | 10 | def link_normalize(url): 11 | from lixian_url import url_unmask, normalize_unicode_link 12 | url = url_unmask(url) 13 | if url.startswith('magnet:'): 14 | return 'bt://'+lixian_hash_bt.magnet_to_infohash(url).encode('hex') 15 | elif url.startswith('ed2k://'): 16 | return lixian_hash_ed2k.parse_ed2k_id(url) 17 | elif url.startswith('bt://'): 18 | return url.lower() 19 | elif url.startswith('http://') or url.startswith('ftp://'): 20 | return normalize_unicode_link(url) 21 | return url 22 | 23 | def link_equals(x1, x2): 24 | return link_normalize(x1) == link_normalize(x2) 25 | 26 | 27 | class TaskBase(object): 28 | def __init__(self, client, list_tasks, limit=None): 29 | self.client = client 30 | self.fetch_tasks_unlimited = list_tasks 31 | self.limit = limit 32 | 33 | self.queries = [] 34 | 35 | self.tasks = None 36 | self.files = {} 37 | 38 | self.commit_jobs = [[], []] 39 | 40 | self.download_jobs = [] 41 | 42 | def fetch_tasks(self): 43 | if self.limit: 44 | with self.client.attr(limit=self.limit): 45 | return self.fetch_tasks_unlimited() 46 | else: 47 | return self.fetch_tasks_unlimited() 48 | 49 | def register_queries(self, queries): 50 | self.queries += queries 51 | 52 | def unregister_query(self, query): 53 | self.queries.remove(query) 54 | 55 | def get_tasks(self): 56 | if self.tasks is None: 57 | self.tasks = self.fetch_tasks() 58 | return self.tasks 59 | 60 | def refresh_tasks(self): 61 | self.tasks = self.fetch_tasks() 62 | return self.tasks 63 | 64 | def get_files(self, task): 65 | assert isinstance(task, dict), task 66 | id = task['id'] 67 | if id in self.files: 68 | return self.files[id] 69 | self.files[id] = self.client.list_bt(task) 70 | return self.files[id] 71 | 72 | def find_task_by_id(self, id): 73 | assert isinstance(id, basestring), repr(id) 74 | for t in self.get_tasks(): 75 | if t['id'] == str(id) or t['#'] == int(id): 76 | return t 77 | 78 | def get_task_by_id(self, id): 79 | t = self.find_task_by_id(id) 80 | if not t: 81 | raise Exception('No task found for id '+id) 82 | return t 83 | 84 | def find_task_by_hash(self, hash): 85 | for t in self.get_tasks(): 86 | if t['type'] == 'bt' and t['bt_hash'].lower() == hash: 87 | return t 88 | 89 | def find_task_by_url(self, url): 90 | for t in self.get_tasks(): 91 | if link_equals(t['original_url'], url): 92 | return t 93 | 94 | def get_task_by_url(self, url): 95 | t = self.find_task_by_url(url) 96 | if not t: 97 | raise Exception('No task found for ' + lixian_encoding.to_native(url)) 98 | return t 99 | 100 | def add_url_task(self, url): 101 | self.commit_jobs[0].append(url) 102 | 103 | def add_bt_task_by_hash(self, hash): 104 | self.commit_jobs[1].append(['hash', hash]) 105 | 106 | def add_bt_task_by_content(self, content, name): 107 | self.commit_jobs[1].append(['content', (content, name)]) 108 | 109 | def add_magnet_task(self, hash): 110 | self.commit_jobs[1].append(['magnet', hash]) 111 | 112 | def commit(self): 113 | urls, bts = self.commit_jobs 114 | if urls: 115 | self.client.add_batch_tasks(map(lixian_encoding.try_native_to_utf_8, urls)) 116 | for bt_type, value in bts: 117 | if bt_type == 'hash': 118 | print 'Adding bt task', value # TODO: print the thing user inputs (may be not hash) 119 | self.client.add_torrent_task_by_info_hash(value) 120 | elif bt_type == 'content': 121 | content, name = value 122 | print 'Adding bt task', name 123 | self.client.add_torrent_task_by_content(content) 124 | elif bt_type == 'magnet': 125 | print 'Adding magnet task', value # TODO: print the thing user inputs (may be not hash) 126 | self.client.add_task(value) 127 | else: 128 | raise NotImplementedError(bt_type) 129 | self.commit_jobs = [[], []] 130 | self.refresh_tasks() 131 | 132 | def prepare(self): 133 | # prepare actions (e.g. add tasks) 134 | for query in self.queries: 135 | query.prepare() 136 | # commit and refresh task list 137 | self.commit() 138 | 139 | def query_complete(self): 140 | for query in list(self.queries): 141 | query.query_complete() 142 | 143 | def merge_results(self): 144 | tasks = merge_tasks(self.download_jobs) 145 | for t in tasks: 146 | if t['type'] == 'bt': 147 | # XXX: a dirty trick to cache requests 148 | t['base'] = self 149 | self.download_jobs = tasks 150 | 151 | def query_once(self): 152 | self.prepare() 153 | # merge results 154 | for query in self.queries: 155 | self.download_jobs += query.query_once() 156 | self.query_complete() 157 | self.merge_results() 158 | 159 | def query_search(self): 160 | for query in self.queries: 161 | self.download_jobs += query.query_search() 162 | self.merge_results() 163 | 164 | def peek_download_jobs(self): 165 | return self.download_jobs 166 | 167 | def pull_completed(self): 168 | completed = [] 169 | waiting = [] 170 | for t in self.download_jobs: 171 | if t['status_text'] == 'completed': 172 | completed.append(t) 173 | elif t['type'] != 'bt': 174 | waiting.append(t) 175 | elif 'files' not in t: 176 | waiting.append(t) 177 | else: 178 | i_completed = [] 179 | i_waiting = [] 180 | for f in t['files']: 181 | if f['status_text'] == 'completed': 182 | i_completed.append(f) 183 | else: 184 | i_waiting.append(f) 185 | if i_completed: 186 | tt = dict(t) 187 | tt['files'] = i_completed 188 | completed.append(tt) 189 | if i_waiting: 190 | tt = dict(t) 191 | tt['files'] = i_waiting 192 | waiting.append(tt) 193 | self.download_jobs = waiting 194 | return completed 195 | 196 | def refresh_status(self): 197 | self.refresh_tasks() 198 | self.files = {} 199 | tasks = [] 200 | for old_task in self.download_jobs: 201 | new_task = dict(self.get_task_by_id(old_task['id'])) 202 | if 'files' in old_task: 203 | files = self.get_files(new_task) 204 | new_task['files'] = [files[f['index']] for f in old_task['files']] 205 | tasks.append(new_task) 206 | self.download_jobs = tasks 207 | 208 | class Query(object): 209 | def __init__(self, base): 210 | self.bind(base) 211 | 212 | def bind(self, base): 213 | self.base = base 214 | self.client = base.client 215 | return self 216 | 217 | def unregister(self): 218 | self.base.unregister_query(self) 219 | 220 | def prepare(self): 221 | pass 222 | 223 | def query_once(self): 224 | raise NotImplementedError() 225 | 226 | def query_complete(self): 227 | raise NotImplementedError() 228 | 229 | def query_search(self): 230 | raise NotImplementedError() 231 | 232 | class ExactQuery(Query): 233 | def __init__(self, base): 234 | super(ExactQuery, self).__init__(base) 235 | 236 | def query_once(self): 237 | raise NotImplementedError() 238 | 239 | def query_complete(self): 240 | self.unregister() 241 | 242 | def query_search(self): 243 | raise NotImplementedError() 244 | 245 | class SearchQuery(Query): 246 | def __init__(self, base): 247 | super(SearchQuery, self).__init__(base) 248 | 249 | def query_once(self): 250 | return self.query_search() 251 | 252 | def query_complete(self): 253 | pass 254 | 255 | def query_search(self): 256 | raise NotImplementedError() 257 | 258 | ################################################## 259 | # register 260 | ################################################## 261 | 262 | processors = [] 263 | 264 | bt_processors = [] 265 | 266 | # 0 267 | # 1 -- builtin -- most 268 | # 2 -- subs -- 0/[0-9] 269 | # 4 -- magnet 270 | # 5 -- user 271 | # 6 -- extend url 272 | # 7 -- plain url, bt url 273 | # 8 -- filter 274 | # 9 -- default -- text search 275 | 276 | def query(priority): 277 | assert isinstance(priority, (int, float)) 278 | def register(processor): 279 | processors.append((priority, processor)) 280 | return processor 281 | return register 282 | 283 | def bt_query(priority): 284 | assert isinstance(priority, (int, float)) 285 | def register(processor): 286 | bt_processors.append((priority, processor)) 287 | return processor 288 | return register 289 | 290 | def user_query(processor): 291 | return query(priority=5)(processor) 292 | 293 | def load_default_queries(): 294 | import lixian_queries 295 | 296 | 297 | ################################################## 298 | # query 299 | ################################################## 300 | 301 | def to_list_tasks(client, args): 302 | if args.category: 303 | return lambda: client.read_all_tasks_by_category(args.category) 304 | elif args.deleted: 305 | return client.read_all_deleted 306 | elif args.expired: 307 | return client.read_all_expired 308 | elif args.completed: 309 | return client.read_all_tasks 310 | elif args.failed: 311 | return client.read_all_tasks 312 | elif args.all: 313 | return client.read_all_tasks 314 | else: 315 | return client.read_all_tasks 316 | 317 | def to_query(base, arg, processors): 318 | for _, process in sorted(processors): 319 | q = process(base, arg) 320 | if q: 321 | return q 322 | raise NotImplementedError('No proper query process found for: ' + arg) 323 | 324 | def merge_files(files1, files2): 325 | ids = [] 326 | files = [] 327 | for f in files1 + files2: 328 | if f['id'] not in ids: 329 | files.append(f) 330 | ids.append(f['id']) 331 | return files 332 | 333 | def merge_tasks(tasks): 334 | result_tasks = [] 335 | task_mapping = {} 336 | for task in tasks: 337 | assert type(task) == dict, repr(type) 338 | id = task['id'] 339 | assert 'index' not in task 340 | if id in task_mapping: 341 | if 'files' in task and 'files' in task_mapping[id]: 342 | task_mapping[id]['files'] = merge_files(task_mapping[id]['files'], task['files']) 343 | else: 344 | if 'files' in task: 345 | t = dict(task) 346 | result_tasks.append(t) 347 | task_mapping[id] = t 348 | else: 349 | result_tasks.append(task) 350 | task_mapping[id] = task 351 | return result_tasks 352 | 353 | class AllQuery(SearchQuery): 354 | def __init__(self, base): 355 | super(AllQuery, self).__init__(base) 356 | def query_search(self): 357 | return self.base.get_tasks() 358 | 359 | class CompletedQuery(SearchQuery): 360 | def __init__(self, base): 361 | super(CompletedQuery, self).__init__(base) 362 | def query_search(self): 363 | return filter(lambda x: x['status_text'] == 'completed', self.base.get_tasks()) 364 | 365 | class FailedQuery(SearchQuery): 366 | def __init__(self, base): 367 | super(FailedQuery, self).__init__(base) 368 | def query_search(self): 369 | return filter(lambda x: x['status_text'] == 'failed', self.base.get_tasks()) 370 | 371 | class NoneQuery(SearchQuery): 372 | def __init__(self, base): 373 | super(NoneQuery, self).__init__(base) 374 | def query_search(self): 375 | return [] 376 | 377 | def default_query(options): 378 | if options.category: 379 | return AllQuery 380 | elif options.deleted: 381 | return AllQuery 382 | elif options.expired: 383 | return AllQuery 384 | elif options.completed: 385 | return CompletedQuery 386 | elif options.failed: 387 | return FailedQuery 388 | elif options.all: 389 | return AllQuery 390 | else: 391 | return NoneQuery 392 | 393 | def parse_queries(base, args): 394 | return [to_query(base, arg, bt_processors if args.torrent else processors) for arg in args] or [default_query(args)(base)] 395 | 396 | def parse_limit(args): 397 | limit = args.limit 398 | if limit: 399 | limit = int(limit) 400 | ids = [] 401 | for x in args: 402 | import re 403 | if re.match(r'^\d+$', x): 404 | ids.append(int(x)) 405 | elif re.match(r'^(\d+)/', x): 406 | ids.append(int(x.split('/')[0])) 407 | elif re.match(r'^(\d+)-(\d+)$', x): 408 | ids.extend(map(int, x.split('-'))) 409 | else: 410 | return limit 411 | if ids and limit: 412 | return min(max(ids)+1, limit) 413 | elif ids: 414 | return max(ids)+1 415 | else: 416 | return limit 417 | 418 | def build_query(client, args): 419 | if args.input: 420 | import fileinput 421 | args._left.extend(line.strip() for line in fileinput.input(args.input) if line.strip()) 422 | load_default_queries() # IMPORTANT: init default queries 423 | limit = parse_limit(args) 424 | base = TaskBase(client, to_list_tasks(client, args), limit) 425 | base.register_queries(parse_queries(base, args)) 426 | return base 427 | 428 | ################################################## 429 | # compatible APIs 430 | ################################################## 431 | 432 | def find_tasks_to_download(client, args): 433 | base = build_query(client, args) 434 | base.query_once() 435 | return base.peek_download_jobs() 436 | 437 | def search_tasks(client, args): 438 | base = build_query(client, args) 439 | base.query_search() 440 | return base.peek_download_jobs() 441 | 442 | def expand_bt_sub_tasks(task): 443 | files = task['base'].get_files(task) # XXX: a dirty trick to cache requests 444 | not_ready = [] 445 | single_file = False 446 | if len(files) == 1 and files[0]['name'] == task['name']: 447 | single_file = True 448 | if 'files' in task: 449 | ordered_files = [] 450 | for t in task['files']: 451 | assert isinstance(t, dict) 452 | if t['status_text'] != 'completed': 453 | not_ready.append(t) 454 | else: 455 | ordered_files.append(t) 456 | files = ordered_files 457 | return files, not_ready, single_file 458 | 459 | 460 | ################################################## 461 | # simple helpers 462 | ################################################## 463 | 464 | def get_task_by_id(client, id): 465 | base = TaskBase(client, client.read_all_tasks) 466 | return base.get_task_by_id(id) 467 | 468 | def get_task_by_any(client, arg): 469 | import lixian_cli_parser 470 | tasks = search_tasks(client, lixian_cli_parser.parse_command_line([arg])) 471 | if not tasks: 472 | raise LookupError(arg) 473 | if len(tasks) > 1: 474 | raise LookupError('Too many results for ' + arg) 475 | return tasks[0] 476 | 477 | -------------------------------------------------------------------------------- /lixian_url.py: -------------------------------------------------------------------------------- 1 | 2 | import base64 3 | import urllib 4 | 5 | def xunlei_url_encode(url): 6 | return 'thunder://'+base64.encodestring('AA'+url+'ZZ').replace('\n', '') 7 | 8 | def xunlei_url_decode(url): 9 | assert url.startswith('thunder://') 10 | url = base64.decodestring(url[10:]) 11 | assert url.startswith('AA') and url.endswith('ZZ') 12 | return url[2:-2] 13 | 14 | def flashget_url_encode(url): 15 | return 'Flashget://'+base64.encodestring('[FLASHGET]'+url+'[FLASHGET]').replace('\n', '') 16 | 17 | def flashget_url_decode(url): 18 | assert url.startswith('Flashget://') 19 | url = base64.decodestring(url[11:]) 20 | assert url.startswith('[FLASHGET]') and url.endswith('[FLASHGET]') 21 | return url.replace('[FLASHGET]', '') 22 | 23 | def flashgetx_url_decode(url): 24 | assert url.startswith('flashgetx://|mhts|') 25 | name, size, hash, end = url.split('|')[2:] 26 | assert end == '/' 27 | return 'ed2k://|file|'+base64.decodestring(name)+'|'+size+'|'+hash+'/' 28 | 29 | def qqdl_url_encode(url): 30 | return 'qqdl://' + base64.encodestring(url).replace('\n', '') 31 | 32 | def qqdl_url_decode(url): 33 | assert url.startswith('qqdl://') 34 | return base64.decodestring(url[7:]) 35 | 36 | def url_unmask(url): 37 | if url.startswith('thunder://'): 38 | return normalize_unicode_link(xunlei_url_decode(url)) 39 | elif url.startswith('Flashget://'): 40 | return flashget_url_decode(url) 41 | elif url.startswith('flashgetx://'): 42 | return flashgetx_url_decode(url) 43 | elif url.startswith('qqdl://'): 44 | return qqdl_url_decode(url) 45 | else: 46 | return url 47 | 48 | def normalize_unicode_link(url): 49 | import re 50 | def escape_unicode(m): 51 | c = m.group() 52 | if ord(c) < 0x80: 53 | return c 54 | else: 55 | return urllib.quote(c.encode('utf-8')) 56 | def escape_str(m): 57 | c = m.group() 58 | if ord(c) < 0x80: 59 | return c 60 | else: 61 | return urllib.quote(c) 62 | if type(url) == unicode: 63 | return re.sub(r'.', escape_unicode, url) 64 | else: 65 | return re.sub(r'.', escape_str, url) 66 | 67 | def unquote_url(x): 68 | x = urllib.unquote(x) 69 | if type(x) != str: 70 | return x 71 | try: 72 | return x.decode('utf-8') 73 | except UnicodeDecodeError: 74 | return x.decode('gbk') # can't decode in utf-8 and gbk 75 | 76 | -------------------------------------------------------------------------------- /lixian_util.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = [] 3 | 4 | import re 5 | 6 | def format_1d(n): 7 | return re.sub(r'\.0*$', '', '%.1f' % n) 8 | 9 | def format_size(n): 10 | if n < 1000: 11 | return '%sB' % n 12 | elif n < 1000**2: 13 | return '%sK' % format_1d(n/1000.) 14 | elif n < 1000**3: 15 | return '%sM' % format_1d(n/1000.**2) 16 | elif n < 1000**4: 17 | return '%sG' % format_1d(n/1000.**3) 18 | 19 | 20 | def parse_size(size): 21 | size = str(size) 22 | if re.match('^\d+$', size): 23 | return int(size) 24 | m = re.match(r'^(\d+(?:\.\d+)?)(K|M|G)B?$', size, flags=re.I) 25 | if not m: 26 | raise Exception("Invalid size format: %s" % size) 27 | return int(float(m.group(1)) * {'K': 1000, 'M': 1000*1000, 'G': 1000*1000*1000}[m.group(2).upper()]) 28 | 29 | 30 | -------------------------------------------------------------------------------- /lixian_verification_code.py: -------------------------------------------------------------------------------- 1 | 2 | def file_path_verification_code_reader(path): 3 | def reader(image): 4 | with open(path, 'wb') as output: 5 | output.write(image) 6 | print 'Verification code picture is saved to %s, please open it manually and enter what you see.' % path 7 | code = raw_input('Verification code: ') 8 | return code 9 | return reader 10 | 11 | def ascii_verification_code_reader(image_data): 12 | import ascii_verification_code 13 | print ascii_verification_code.convert_to_ascii(image_data) 14 | code = raw_input('Verification code: ') 15 | return code 16 | 17 | def default_verification_code_reader(args): 18 | if args.verification_code_handler == 'ascii': 19 | return ascii_verification_code_reader 20 | elif args.verification_code_path: 21 | return file_path_verification_code_reader(args.verification_code_path) 22 | 23 | -------------------------------------------------------------------------------- /tests/123.txt: -------------------------------------------------------------------------------- 1 | 123 -------------------------------------------------------------------------------- /tests/123456.txt: -------------------------------------------------------------------------------- 1 | 123456 -------------------------------------------------------------------------------- /tests/The-quick-brown-fox-jumps-over-the-lazy-dog.txt: -------------------------------------------------------------------------------- 1 | The quick brown fox jumps over the lazy dog -------------------------------------------------------------------------------- /tests/a.txt: -------------------------------------------------------------------------------- 1 | a -------------------------------------------------------------------------------- /tests/abc.txt: -------------------------------------------------------------------------------- 1 | abc -------------------------------------------------------------------------------- /tests/empty.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iambus/xunlei-lixian/c9ef30dd5ea8da71bbbd2faf9e14b0629a6de4e5/tests/empty.txt --------------------------------------------------------------------------------