├── .gitignore ├── README.md ├── auto_del.py ├── auto_magic_seeds.py ├── catch_magic.py ├── download_new_torrents.py ├── find_torrent.py ├── give_sugar.py ├── my_bencoder.py ├── qb_del.py ├── rename_torrents.py ├── u2_auxseed.py └── u2_magic.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | .idea 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | pip-wheel-metadata/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | #### auto_del.py 2 | deluge 客户端自动删种 3 | #### auto_magic_seeds.py 4 | 给有上传速度的种子放魔法(支持各种 bt 客户端) 5 | #### catch_magic.py 6 | 追魔/搭桥(不限客户端) 7 | #### download_new_torrents.py 8 | 自动下载新种 9 | #### find_torrent.py 10 | 根据文件名反查种子并添加到 qb 客户端 11 | #### give_sugar.py 12 | 发糖 13 | #### my_bencoder.py 14 | 自用 bencode 格式编码与解码 15 | #### qb_del.py 16 | 从 qb 客户端删除赚分效率不高的种子 17 | #### rename_torrents.py 18 | 按种子标题重命名种子文件(仅支持 qb) 19 | #### u2_auxseed.py 20 | u2辅种(仅支持 qb) 21 | #### u2_magic.py 22 | 放魔法/限速(支持客户端 qb 和 de) 23 | 24 | ### ubuntu/debian 系统下编译安装 python3.11 25 | 输入以下命令 26 | ``` 27 | sudo apt -y install build-essential zlib1g zlib1g-dev libffi-dev libc6-dev libssl-dev libbz2-dev libncurses5-dev libgdbm-dev libgdbm-compat-dev liblzma-dev uuid-dev sqlite3 libsqlite3-dev libssl-dev tcl8.6-dev tk8.6-dev libreadline-dev zlib1g-dev 28 | wget https://www.python.org/ftp/python/3.11.0/Python-3.11.0.tgz 29 | tar zxvf Python-3.11.0.tgz && cd Python-3.11.0 30 | ./configure --prefix=/usr/local/python3.11 --enable-optimizations --enable-shared # 不指定安装目录,会覆盖系统的 python3 31 | make -j$(nproc) # 注意查看是否有模块报错,如果有一般是缺少依赖项,网上搜一下 32 | sudo make install 33 | sudo ln -s /usr/local/python3.11/lib/libpython3.11.so.1.0 /usr/lib/libpython3.11.so.1.0 34 | sudo ln -s /usr/local/python3.11/bin/python3.11 /usr/bin/python3.11 35 | sudo ln -s /usr/local/python3.11/bin/pip3.11 /usr/bin/pip3.11 36 | ``` 37 | 然后就可以使用命令 python3.11 和 pip3.11 38 | -------------------------------------------------------------------------------- /auto_del.py: -------------------------------------------------------------------------------- 1 | """ 2 | deluge 删种脚本,优先保留体积大、下载人数多、上传速度高、做种时间少的种子。 3 | 用过一些删种工具,逻辑都比较粗暴,所以自己写了一个, 4 | 每个种子综合考虑各项情况,分配加权把各项加起来,从低到高删除直到剩余空间大于指定值。 5 | 40% 取当前速度和平均速度的平均值,20% 取做种中种子的上传速度按做种时间权重分配的值, 6 | 剩下的 40% 中其中一部分为取做种中种子的上传速度按下载上传人数权重分配的值,另一部分为按做种人数分配的值 7 | 比例取决于参数 KS,同时有考虑体积,体积越大加权越高(大约是 0.2 次方) 8 | """ 9 | 10 | from time import sleep 11 | from deluge_client import LocalDelugeRPCClient, FailedToReconnectException 12 | from loguru import logger 13 | from ssl import SSLError 14 | from collections import deque 15 | from typing import Union, Any, Tuple, List 16 | import os 17 | 18 | MIN_FREE_SPACE = 3725 # type: Union[int, float] 19 | '最小剩余空间(GiB),当下载速度未超过临界值 MAX_DR 时小于这个值删种' 20 | MIN_FREE_SPACE_LOWER = 3725 / 3 # type: Union[int, float] 21 | '''当下载速度超过临界值 MAX_DR 时小于这个值删种。 22 | 硬盘空间足够的话建议两个值的差 1024(1TB) 以上''' 23 | MAX_DR = 10 * 1024 ** 2 # type: Union[int, float] 24 | '下载速度临界值' 25 | MODE = 1 # type: Any 26 | '为 1 时先删除做种中的种子,删完后再删下载中的种子;否则综合考虑一起删' 27 | KS = 0.5 # type: Union[int, float] 28 | '按做种人数分配的权重占 40% 的比例,取值范围 [0, 1],为 0 代表不考虑做种人数,这个参数的目的在于延长孤种的保种时间' 29 | INTERVAL = 600 # type: Union[int, float] 30 | '删种的时间间隔' 31 | MIN_DOWN_TIME = 3600 # type: Union[int, float] 32 | '下载时间小于这个值不删' 33 | S0 = 300 # type: Union[int, float] 34 | LOG_PATH = '' # type: str 35 | EXCLUDE_LABELS = ['seed', 'public'] # type: Union[Tuple[Any, ...], List[str]] 36 | '如果种子有这些标签,删种时会跳过' 37 | 38 | 39 | class Deluge(LocalDelugeRPCClient): 40 | timeout = 10 41 | 42 | def __init__(self, 43 | host: str = '127.0.0.1', 44 | port: int = 58846, 45 | username: str = '', 46 | password: str = '', 47 | decode_utf8: bool = True, 48 | automatic_reconnect: bool = True, 49 | ): 50 | super().__init__(host, port, username, password, decode_utf8, automatic_reconnect) 51 | 52 | def call(self, method, *args, **kwargs): 53 | if not self.connected and method != 'daemon.login': 54 | for i in range(5): 55 | try: 56 | self.reconnect() 57 | logger.debug(f'Connected to deluge client on {self.host}') 58 | break 59 | except SSLError: 60 | sleep(0.3 * 2 ** i) 61 | try: 62 | return super().call(method, *args, **kwargs) 63 | except FailedToReconnectException: 64 | logger.error(f'Failed to reconnect to deluge client on {self.host}') 65 | except: 66 | raise 67 | 68 | 69 | class AutoDel: 70 | def __init__(self, client: Deluge): 71 | self.client = client 72 | self.sur = deque(maxlen=100) 73 | self.free_space = MIN_FREE_SPACE * 1024 ** 3 74 | self.torrent_status = {} 75 | self.ses_dr = 0 76 | self.torrent_keys = ['active_time', 'download_payload_rate', 'name', 'state', 77 | 'seeding_time', 'total_peers', 'total_seeds', 'total_size', 78 | 'total_done', 'total_uploaded', 'upload_payload_rate', 'label' 79 | ] 80 | 81 | def update_session(self): 82 | self.free_space = self.client.core.get_free_space() 83 | if not isinstance(self.free_space, int): 84 | raise 85 | 86 | seed_ur = 0 87 | up_status = self.client.core.get_torrents_status({'state': 'Seeding'}, ['upload_payload_rate']) 88 | if not isinstance(up_status, dict): 89 | raise 90 | for _id, data in up_status.items(): 91 | seed_ur += data['upload_payload_rate'] 92 | self.sur.append(seed_ur) 93 | 94 | self.ses_dr = self.client.core.get_session_status(['download_rate'])['download_rate'] 95 | 96 | if self.free_space < MIN_FREE_SPACE * 1024 ** 3: 97 | self.torrent_status = self.client.core.get_torrents_status({}, self.torrent_keys) 98 | if not isinstance(self.torrent_status, dict): 99 | raise 100 | 101 | def run(self): 102 | while True: 103 | try: 104 | while True: 105 | try: 106 | self.update_session() 107 | break 108 | except: 109 | pass 110 | min_space = (MIN_FREE_SPACE if self.ses_dr < MAX_DR else MIN_FREE_SPACE_LOWER) * 1024 ** 3 111 | if self.free_space >= min_space: 112 | logger.debug(f'There is free space {self.free_space / 1024 ** 3:.3f} GiB. ' 113 | f'No need to del any torrents.') 114 | else: 115 | indicator, info = self.weight() 116 | while self.free_space < min_space: 117 | if not indicator: 118 | break 119 | i = indicator.index(min(indicator)) 120 | state = 'Failed to delete' 121 | try: 122 | self.client.core.remove_torrent(info[i]['_id'], True) 123 | state = 'Successfully deleted' 124 | except TimeoutError as e: 125 | # 正常操作,一般实际上是已经删了 126 | logger.error(f'{e.__class__.__name__}: {e}') 127 | except Exception as e: 128 | if e.__class__.__name__ == 'InvalidTorrentError': 129 | # 正常操作,基本上也是删了 130 | logger.error(f"{e.__module__}.{e.__class__.__name__}: " 131 | f"Torrent_id {info[i]['_id']} not in session") 132 | else: 133 | logger.exception(e) 134 | self.free_space += info[i]['done'] 135 | logger.warning(f"{state} {info[i]['state'].lower()} torrent {info[i]['_id']}, " 136 | f"name | {info[i]['name']}. ") 137 | if state == 'Successfully deleted': 138 | logger.info(f"{info[i]['done'] / 1024 ** 3:.3f} GiB space released. " 139 | f"Free space {self.free_space / 1024 ** 3:.3f} GiB.") 140 | sleep(info[i]['done'] / 1024 ** 3 / 10) 141 | del indicator[i] 142 | del info[i] 143 | except Exception as e: 144 | logger.exception(e) 145 | finally: 146 | sleep(INTERVAL) 147 | 148 | @staticmethod 149 | def torrent_filter(state): 150 | return lambda tup: tup[1]['label'] not in EXCLUDE_LABELS and tup[1]['state'] == state 151 | 152 | def weight(self): 153 | total_peer_weight = 0 154 | total_time_weight = 0 155 | total_peers = 0 156 | num = 0 157 | indicator = [] 158 | info = [] 159 | e_m = 0.0 160 | av_ur = sum(self.sur) / len(self.sur) 161 | if av_ur == 0: 162 | av_ur = 1048576 163 | 164 | for _id, data in filter(self.torrent_filter('Seeding'), self.torrent_status.items()): 165 | total_peers += data['total_peers'] 166 | num += 1 167 | 168 | av_peer_num = total_peers / num if num > 0 else 0 169 | 170 | for _id, data in filter(self.torrent_filter('Seeding'), self.torrent_status.items()): 171 | data['peer_weight'] = (data['total_peers'] * (1 - KS) + av_peer_num * KS) / ( 172 | data['total_seeds'] if data['total_seeds'] > 0 else 1) * data['total_size'] 173 | total_peer_weight += data['peer_weight'] 174 | k_time = data['seeding_time'] / 3600 175 | k_size = data['total_size'] / (S0 * 1024 ** 3) 176 | data['time_weight'] = pow(1 + pow((k_time / k_size), 2), -0.5) 177 | total_time_weight += data['time_weight'] 178 | 179 | if total_time_weight > 0: 180 | for _id, data in filter(self.torrent_filter('Seeding'), self.torrent_status.items()): 181 | ur_e = data['upload_payload_rate'] * 0.4 182 | ur_tm_p = av_ur * data['time_weight'] / total_time_weight 183 | if total_peer_weight > 0: 184 | ur_pr_p = av_ur * data['peer_weight'] / total_peer_weight 185 | ur_e += ur_pr_p * 0.4 + ur_tm_p * 0.2 186 | else: 187 | ur_e += ur_tm_p * 0.6 188 | sz_e = data['total_done'] / 1024 ** 3 189 | e = ur_e * pow(sz_e, -0.8) 190 | indicator.append(e) 191 | info.append({'_id': _id, 'name': data['name'], 'done': data['total_done'], 'state': data['state']}) 192 | if MODE == 1 or av_ur == 0: 193 | e_m = max(indicator) + 1 194 | 195 | for _id, data in filter(self.torrent_filter('Downloading'), self.torrent_status.items()): 196 | if data['active_time'] < MIN_DOWN_TIME: 197 | continue 198 | au = data['total_uploaded'] / (data['active_time'] + 1) 199 | ur = data['upload_payload_rate'] 200 | ur_e = au * 0.5 + ur * 0.5 201 | sz_a = data['download_payload_rate'] * INTERVAL / 2 202 | if sz_a < data['total_size'] - data['total_done']: 203 | sz_e = data['total_size'] / 1024 ** 3 204 | else: 205 | sz_e = (sz_a + data['total_done']) / 1024 ** 3 206 | if sz_e == 0: 207 | continue 208 | e = ur_e * pow(sz_e, -0.8) + e_m 209 | indicator.append(e) 210 | info.append({'_id': _id, 'name': data['name'], 'done': data['total_done'], 'state': data['state']}) 211 | 212 | return indicator, info 213 | 214 | 215 | log_path = LOG_PATH or f'{os.path.splitext(__file__)[0]}.log' 216 | logger.add(level='DEBUG', sink=log_path, encoding='utf-8', rotation="5 MB") 217 | 218 | AutoDel(Deluge()).run() 219 | -------------------------------------------------------------------------------- /auto_magic_seeds.py: -------------------------------------------------------------------------------- 1 | """python3.7及以上 2 | 脚本有两个功能,一个是给自己有上传速度的种子放魔法,一个是给孤种放地图炮吸引别人下载 3 | 支持客户端 deluge, qbittorrent, transmission, rutorrent 和 utorrent 4 | 依赖:pip3 install PyYAML requests bs4 lxml deluge-client qbittorrent-api transmission-rpc loguru pytz 5 | Azusa 大佬的 api,见 https://github.com/kysdm/u2_api,自动获取 token: https://greasyfork.org/zh-CN/scripts/428545 6 | 因为使用了异步,放魔法速度很快,不会有反应时间,请使用前仔细检查配置 7 | """ 8 | 9 | import asyncio 10 | import gc 11 | import json 12 | import os 13 | import random 14 | import re 15 | import sys 16 | 17 | import aiohttp 18 | import pytz 19 | import requests 20 | import qbittorrentapi 21 | import transmission_rpc 22 | 23 | from abc import abstractmethod, ABCMeta 24 | from collections import UserDict 25 | from concurrent.futures import ThreadPoolExecutor 26 | from ssl import SSLError 27 | from datetime import datetime 28 | from time import time, sleep 29 | from typing import Dict, List 30 | 31 | from bs4 import BeautifulSoup 32 | from loguru import logger 33 | 34 | from deluge_client import FailedToReconnectException, LocalDelugeRPCClient 35 | from qbittorrentapi.exceptions import APIConnectionError, HTTPError 36 | from transmission_rpc.error import TransmissionTimeoutError, TransmissionConnectError 37 | 38 | CONFIG = { # 应该跟 json 差不多,放到 ide 里方便能看出错误 39 | 'clients_info': [ 40 | { 41 | 'type': 'deluge', # 'de', 'Deluge', 'deluge' 42 | 'host': '127.0.0.1', # IP 43 | 'port': 58846, # daemon 端口 44 | 'username': '', # 本地客户端可以不填用户名和密码 45 | 'password': '' # cat ~/.config/deluge/auth 46 | }, # 多个用逗号隔开 47 | { 48 | 'type': 'qbittorrent', # 'qb', 'QB', 'qbittorrent', 'qBittorrent' 49 | 'host': 'http://127.0.0.1', # host,最好带上 http 或者 https 50 | 'port': 8080, # webui 端口 51 | 'username': '', # web 用户名 52 | 'password': '', # web 密码 53 | # 'verify': True # 验证 https 证书 54 | }, 55 | { 56 | 'type': 'transmission', # 'tr', 'Transmission', 'transmission' 57 | 'host': '127.0.0.1', # IP 58 | 'port': 9091, # webui 端口 59 | 'username': '', # web 用户名 60 | 'password': '' # web 密码 61 | }, 62 | { 63 | 'type': 'rutorrent', # 'ru', 'RuTorrent', 'rutorrent' 64 | 'url': 'https://127.0.0.1/rutorrent', # rtinst 安装完是这样的 65 | 'username': '', # web 用户名 66 | 'password': '', # web 密码 67 | # 'verify': False # 验证 https 证书 68 | }, 69 | { 70 | 'type': 'utorrent', # 'ut', 'UT', 'utorrent', 'uTorrent', 'µTorrent' 71 | 'host': 'http://127.0.0.1', # host,最好带上 http 或者 https 72 | 'port': 8080, # webui 端口 73 | 'username': '', # web 用户名 74 | 'password': '', # web 密码 75 | # 'verify': True # 验证 https 证书 76 | }, 77 | ], 78 | 'requests_args': { 79 | 'cookies': {'nexusphp_u2': ''}, # 网站 cookie 80 | 'headers': { 81 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' 82 | 'AppleWebKit/537.36 (KHTML, like Gecko) ' 83 | 'Chrome/104.0.5112.102 Safari/537.36 Edg/104.0.1293.70' 84 | }, 85 | 'proxy': '', # 'http://127.0.0.1:10809' 86 | 'timeout': 10 87 | }, 88 | 'magic_for_self': { # 做种中的种子,上传速度大于一定值,给自己放 2.33x 魔法 89 | 'enable': True, # 是否开启 90 | 'interval': 60, # 检查的间隔 91 | 'magic_downloading': True, # 是否下载中的种子放 2.33x 魔法 92 | 'min_rate': 1024, # 最小上传速度(KiB/s) 93 | 'min_size': 5, # 最小体积(GiB) 94 | 'min_d': 180, # 种子最小生存天数 95 | }, 96 | 'magic_for_all': { # 做种中的种子,做种人数小于一定值,放地图炮 free,吸引别人下载 97 | 'enable': False, # 是否开启 98 | 'interval': 86400, # 检查的间隔 99 | 'torrent_num': 5, # 一次放魔法的种子个数 100 | 'max_seeder_num': 5, # 做种人数最大值 101 | '233_all': True, # 为真时给所有人放 2.33x↑0x↓,否则给所有人 0x↓,自己放 2.33x↑ 102 | 'hours': 24, # 魔法持续时间 103 | 'min_rm_hr': 0 # 2.33x 剩余时间小于这个值(小时)还是会放 2.33x 魔法 104 | }, 105 | 'uc_max': 30000, # 单个魔法最大 uc 使用量 106 | 'total_uc_max': 200000, # 24h 内 uc 最大使用量 107 | 'api_token': '', # api 的 token,填了将默认使用 api 查询种子信息,不填就直接从 u2 网页获取信息 108 | 'uid': 50096, # 如果填了 api_token,则需要 uid 109 | 'data_path': f'{os.path.splitext(__file__)[0]}.data.txt', # 数据保存路径 110 | 'log_path': f'{os.path.splitext(__file__)[0]}.log', # 日志路径 111 | } 112 | 113 | 114 | class BtClient(metaclass=ABCMeta): 115 | """BT 客户端基类""" 116 | all_keys = ('name', # str 类型,文件名 (外层文件夹名) 117 | 'peers', # 可迭代对象,每项为一个字典,字典为单个 peer 的信息,其中必须包含 progress 项,代表进度,类型为 float(0~1) 118 | 'total_size', # int 类型,种子体积 (B) 119 | 'state', # str 类型,种子当前状态 120 | 'total_seeds', # int 类型,种子当前做种数 121 | 'tracker', # str 类型,种子当前 tracker 122 | 'upload_payload_rate', # int 类型,上传速度 (B / s) 123 | ) 124 | str_keys = ('name', 'tracker', 'state') 125 | int_keys = ('total_uploaded', 'upload_payload_rate', 'total_seeds') 126 | 127 | @classmethod 128 | def check_keys(cls, func): 129 | def wrapper(self, keys): 130 | res = func(self, keys) 131 | if 'checked' not in self.__dict__: 132 | self.checked = False 133 | if self.checked: 134 | return res 135 | 136 | unsupported_keys = [key for key in keys if key not in cls.all_keys] 137 | if unsupported_keys: 138 | raise ValueError(f'{unsupported_keys} not supported. ' 139 | f'These are the all available keys: \n{cls.all_keys}') 140 | if not isinstance(res, dict) and res is not None: 141 | raise TypeError(f'Return value of function {func.__name__} ' 142 | f'of class {self.__class__} should be dict type') 143 | if res: 144 | for _id, data in res.items(): 145 | for key, val in data.items(): 146 | if key in cls.int_keys and not isinstance(val, int): 147 | raise TypeError(f'The value of "{key}" should be int type') 148 | elif key in cls.str_keys and not isinstance(val, str) and not (key == 'tracker' and not val): 149 | raise TypeError(f'The value of "{key}" should be str type') 150 | if key not in keys: 151 | raise TypeError(f'Key "{key}" is not supported. Check return value {res}') 152 | 153 | self.checked = True 154 | return res 155 | 156 | return wrapper 157 | 158 | def __init_subclass__(cls, **kwargs): 159 | for function in 'active_torrents_info', 'seeding_torrents_info': 160 | setattr(cls, function, cls.check_keys(cls.__dict__[function])) 161 | 162 | @abstractmethod 163 | def call(self, method, *args, **kwargs): 164 | """ 165 | :param method: 方法 166 | :type method: str 167 | """ 168 | 169 | @abstractmethod 170 | def active_torrents_info(self, keys): 171 | """获取所有活动的种子信息 172 | :param keys: 包含种子信息相关键的列表,取值在 all_keys 中 173 | :type keys: List[str] 174 | :return: 以种子 hash 为键,种子信息(一个字典,键为 keys 中的值)为值的字典。 175 | 如果使用 deluge 以外客户端,需要按照 all_keys 中的说明返回指定类型数据 176 | :rtype: Dict[str, Dict[str, Any]] 177 | """ 178 | 179 | @abstractmethod 180 | def seeding_torrents_info(self, keys): 181 | """获取所有做种中的种子信息 182 | :type keys: List[str] 183 | :rtype: Dict[str, Dict[str, Any]] 184 | """ 185 | 186 | 187 | class Deluge(LocalDelugeRPCClient, BtClient): 188 | timeout = 10 189 | 190 | def __init__(self, host='127.0.0.1', port=58846, username='', password=''): 191 | super().__init__(host, port, username, password, decode_utf8=True, automatic_reconnect=True) 192 | 193 | def call(self, method, *args, **kwargs): 194 | if not self.connected and method != 'daemon.login': 195 | for i in range(6): 196 | try: 197 | self.reconnect() 198 | break 199 | except SSLError: 200 | sleep(0.3 * 2 ** i) 201 | try: 202 | return super().call(method, *args, **kwargs) 203 | except FailedToReconnectException: 204 | logger.error(f'Failed to reconnect to deluge client on {self.host}:{self.port}') 205 | except TimeoutError: 206 | logger.error(f'Timeout when connecting to deluge client on {self.host}:{self.port}') 207 | except Exception as e: 208 | if e.__class__.__name__ == 'BadLoginError': 209 | logger.error(f'Failed to connect to deluge client on {self.host}:{self.port}, Password does not match') 210 | else: 211 | raise 212 | 213 | def active_torrents_info(self, keys): 214 | return self.call('core.get_torrents_status', {'state': 'Active'}, keys) 215 | 216 | def seeding_torrents_info(self, keys): 217 | return self.call('core.get_torrents_status', {'state': 'Seeding'}, keys) 218 | 219 | 220 | class Qbittorrent(qbittorrentapi.Client, BtClient): 221 | de_key_to_qb = {'name': 'name', 'tracker': 'tracker', 'total_size': 'size', 222 | 'upload_payload_rate': 'upspeed', 'state': 'state', 'total_seeds': 'num_complete'} 223 | 224 | def __init__(self, host='http://127.0.0.1', port=8080, username='', password='', **kwargs): 225 | super().__init__(host=host, port=port, username=username, password=password, 226 | REQUESTS_ARGS={'timeout': 10}, FORCE_SCHEME_FROM_HOST=True, 227 | VERIFY_WEBUI_CERTIFICATE=True if 'verify' not in kwargs else kwargs['verify']) 228 | 229 | def call(self, method, *args, **kwargs): 230 | try: 231 | return self.__getattribute__(method)(*args, **kwargs, _retries=5) 232 | except HTTPError as e: 233 | logger.error(f'Failed to connect to qbittorrent on {self.host}:{self.port} due to http error: {e}') 234 | except APIConnectionError as e: 235 | logger.error(f'Failed to connect to qbittorrent on {self.host}:{self.port} due to ' 236 | f'qbittorrentapi.exceptions.APIConnectionError: {e}') 237 | 238 | def fix_return_value(self, lst, keys): 239 | torrents_info = {} 240 | for torrent in lst: 241 | _id = torrent['hash'] 242 | torrents_info[_id] = {} 243 | for key in keys: 244 | torrents_info[_id][key] = torrent.get(self.de_key_to_qb[key]) 245 | return torrents_info 246 | 247 | def active_torrents_info(self, keys): 248 | return self.fix_return_value(self.call('torrents_info', status_filter='active'), keys) 249 | 250 | def seeding_torrents_info(self, keys): 251 | return self.fix_return_value(self.call('torrents_info', status_filter='seeding'), keys) 252 | 253 | 254 | class Transmission(transmission_rpc.Client, BtClient): 255 | de_key_to_tr = {'name': 'name', 'total_size': 'total_size', 256 | 'upload_payload_rate': 'rate_upload', 'state': 'status'} 257 | 258 | def __init__(self, host='http://127.0.0.1', port=9091, username='', password=''): 259 | super().__init__(host=host, port=port, username=username, password=password, timeout=10) 260 | self.host = host 261 | self.port = port 262 | 263 | def call(self, method, *args, **kwargs): 264 | try: 265 | return self.__getattribute__(method)(*args, **kwargs) 266 | # 这个 rpc 模块自动尝试 10 次不好改 267 | except (TransmissionTimeoutError, TransmissionConnectError) as e: 268 | logger.error(f'Error when connect to transmission client on {self.host}:{self.port} | {e}') 269 | 270 | def keys_to_dict(self, keys: List[str], torrent: transmission_rpc.Torrent): 271 | res = {key: torrent.__getattribute__(self.de_key_to_tr[key]) for key in keys if key in self.de_key_to_tr} 272 | if 'tracker' in keys: 273 | res['tracker'] = torrent.trackers[0].announce if torrent.trackers else None 274 | if 'total_seeds' in keys: 275 | res['total_seeds'] = torrent.tracker_stats[0].seeder_count if torrent.tracker_stats else 99999 276 | return res 277 | 278 | def active_torrents_info(self, keys: List[str]): 279 | return {torrent.hashString: self.keys_to_dict(keys, torrent) 280 | for torrent in self.call('get_torrents') if torrent.rate_upload > 0} 281 | 282 | def seeding_torrents_info(self, keys: List[str]): 283 | return {torrent.hashString: self.keys_to_dict(keys, torrent) 284 | for torrent in self.call('get_torrents') if torrent.status == 'seeding'} 285 | 286 | 287 | class RuTorrent(BtClient): 288 | tr_keys = {'tracker', 'total_seeds'} 289 | 290 | def __init__(self, url, username, password, **kwargs): 291 | self.url = url 292 | self.auth = (username, password) 293 | self.verify = False if 'verify' not in kwargs else kwargs['verify'] 294 | 295 | def call(self, method, *args, **kwargs): 296 | data = {'mode': method} 297 | data.update(kwargs) 298 | res = '' 299 | for i in range(6): 300 | try: 301 | res = requests.post(f"{self.url.rstrip('/')}/plugins/httprpc/action.php", 302 | auth=self.auth, data=data, verify=self.verify).text 303 | return json.loads(res) 304 | except json.JSONDecodeError as e: 305 | if 'Authorization Required' in res: 306 | logger.error(f'Failed to connect to rutorrent instance via {self.url}, ' 307 | f'check your username and password.') 308 | return 309 | else: 310 | logger.error(e) 311 | sleep(0.3 * 2 ** i) 312 | 313 | @staticmethod 314 | def info_from_list(keys, lst): 315 | res = {} 316 | if 'name' in keys: 317 | res['name'] = lst[4] 318 | if 'total_size' in keys: 319 | res['total_size'] = int(lst[5]) 320 | if 'state' in keys: 321 | res['state'] = 'seeding' if int(lst[19]) == 0 else 'downloading' 322 | if 'upload_payload_rate' in keys: 323 | res['upload_payload_rate'] = int(lst[11]) 324 | return res 325 | 326 | def update_tracker_info(self, info, lst): 327 | for _id, data in self.call('trkall').items(): 328 | if _id.lower() in info: 329 | update = ({'tracker': None, 'total_seeds': 99999} if not data 330 | else {'tracker': data[0][0], 'total_seeds': int(data[0][4])}) 331 | info[_id.lower()].update({key: update[key] for key in lst}) 332 | 333 | def torrents_info(self, status, keys): 334 | res = {} 335 | for _id, data in self.call('list')['t'].items(): 336 | if status == 'active' and int(data[11]) <= 0: 337 | continue 338 | if status == 'seeding' and int(data[19]) != 0: 339 | continue 340 | res[_id.lower()] = self.info_from_list(keys, data) 341 | if set(keys) & self.tr_keys: 342 | self.update_tracker_info(res, set(keys) & self.tr_keys) 343 | return res 344 | 345 | def active_torrents_info(self, keys): 346 | return self.torrents_info('active', keys) 347 | 348 | def seeding_torrents_info(self, keys): 349 | return self.torrents_info('seeding', keys) 350 | 351 | 352 | class UTorrent(BtClient): 353 | key_to_index = {'name': 2, 'total_size': 3, 'total_seeds': 15, 'upload_payload_rate': 8} 354 | 355 | def __init__(self, host='127.0.0.1', port=8080, username='', password='', **kwargs): 356 | if not any(host.startswith(pre) for pre in ('http://', 'https://')): 357 | host = f'http://{host}' 358 | self.url = f'{host}:{port}/gui' 359 | self.verify = True if 'verif' not in kwargs else kwargs['verify'] 360 | self.auth = (username, password) 361 | self.token, self.cookies = None, None 362 | self.err_msg = f'Failed to get utorrent web-api token via {self.url}, Check your username and password' 363 | self.get_token() 364 | 365 | def get_token(self): 366 | resp = requests.get(f'{self.url}/token.html', auth=self.auth, verify=self.verify) 367 | if resp: 368 | self.token = BeautifulSoup(resp.text, 'lxml').div.text 369 | self.cookies = {'GUID': resp.cookies['GUID']} 370 | else: 371 | logger.error(self.err_msg) 372 | 373 | def call(self, method, *args, **kwargs): 374 | if method == 'list': 375 | url = f'{self.url}/?list=1' 376 | else: 377 | url = f'{self.url}/?action={method}' 378 | params = {'token': self.token} 379 | params.update(kwargs) 380 | for i in range(6): 381 | resp = '' 382 | try: 383 | resp = requests.get(url, auth=self.auth, cookies=self.cookies, params=params, verify=self.verify).text 384 | if not resp: 385 | logger.error(self.err_msg) 386 | return 387 | else: 388 | return json.loads(resp) 389 | except Exception as e: 390 | if 'invalid request' in resp: 391 | self.get_token() 392 | else: 393 | logger.error(e) 394 | sleep(0.3 * 2 ** i) 395 | 396 | @staticmethod 397 | def get_state(num, rem): 398 | """状态有很多种,这里敷衍一下和 rutorrent 一样""" 399 | if num % 2 == 0: 400 | return 'paused' 401 | return 'seeding' if rem == 0 else 'downloading' 402 | 403 | def get_tracker(self, info): 404 | for torrent in self.call('getprops', hash=list(info.keys()))['props']: 405 | info[torrent['hash'].lower()]['tracker'] = torrent['trackers'].split('\r\n')[0] 406 | 407 | def active_torrents_info(self, keys): 408 | return self.torrents_info('active', keys) 409 | 410 | def seeding_torrents_info(self, keys): 411 | return self.torrents_info('seeding', keys) 412 | 413 | def torrents_info(self, status, keys): 414 | res = {} 415 | for torrent in self.call('list')['torrents']: 416 | if status == 'active' and torrent[8] <= 0: 417 | continue 418 | if status == 'seeding' and self.get_state(torrent[1], torrent[18]) != 'seeding': 419 | continue 420 | res[torrent[0].lower()] = {} 421 | for key in keys: 422 | if key in self.key_to_index: 423 | res[torrent[0].lower()][key] = torrent[self.key_to_index[key]] 424 | if 'state' in keys: 425 | res[torrent[0].lower()]['state'] = self.get_state(torrent[1], torrent[18]) 426 | if 'tracker' in keys: 427 | self.get_tracker(res) 428 | return res 429 | 430 | 431 | class MagicInfo(UserDict): 432 | def __init__(self, dic=None, **kwargs): 433 | super(MagicInfo, self).__init__(dic, **kwargs) 434 | self.c = False 435 | 436 | def __setitem__(self, key, value): 437 | if key in self.data: 438 | if 'uc' in value and 'uc' in self.data[key]: 439 | value['uc'] += self.data[key]['uc'] 440 | self.data[key].update(value) 441 | else: 442 | self.data[key] = value 443 | self.c = True 444 | 445 | def __set__(self, instance, value): 446 | self.data = value 447 | 448 | def del_unused(self): 449 | for _id in list(self.data.keys()): 450 | if 'ts' not in self.data[_id] or int(time()) - self.data[_id]['ts'] > 86400: 451 | del self.data[_id] 452 | self.c = True 453 | 454 | def cost(self): 455 | uc_cost = 0 456 | for _id, val in self.data.items(): 457 | uc_cost += val.get('uc') or 0 458 | return uc_cost 459 | 460 | def save(self): 461 | if self.c: 462 | with open(CONFIG['data_path'], 'w', encoding='utf-8') as fp: 463 | json.dump(self.data, fp) 464 | self.c = False 465 | 466 | def min_secs(self): 467 | total = self.cost() 468 | for _id, data in self.data.items(): 469 | total -= data.get('uc') or 0 470 | if total <= CONFIG['total_uc_max']: 471 | if 'ts' in data: 472 | return data['ts'] + 86400 - int(time()) 473 | 474 | 475 | class Request: 476 | u2_args = CONFIG['requests_args'] 477 | api_args = {'timeout': CONFIG['requests_args'].get('timeout'), 478 | 'proxy': CONFIG['requests_args'].get('proxy')} 479 | 480 | def __init__(self): 481 | self.session = None 482 | 483 | async def request(self, url, method='get', retries=5, **kwargs): 484 | """异步 http 请求 485 | :rtype: str | Dict[str, str | Dict[str, List[Dict[str, str | int | None]]]] 486 | 487 | Examples 488 | -------- 489 | >>> async with aiohttp.ClientSession() as self.session: 490 | ... tasks = [] 491 | ... for index in range(3): 492 | ... tasks.append(self.request(f"https://u2.dmhy.org/torrents.php?page={index}")) 493 | ... await asyncio.gather(*tasks) 494 | """ 495 | if url.startswith('https://u2.dmhy.org'): 496 | [kwargs.setdefault(key, val) for key, val in self.u2_args.items()] 497 | else: 498 | [kwargs.setdefault(key, val) for key, val in self.api_args.items()] 499 | kwargs.setdefault('timeout', 10) 500 | 501 | for i in range(retries + 1): 502 | try: 503 | async with self.session.request(method, url, **kwargs) as resp: 504 | if resp.status < 300: 505 | if url.startswith('https://u2.dmhy.org') and method == 'get': 506 | logger.debug(f'Downloaded page: {url}') 507 | return await (resp.text() if url.startswith('https://u2.dmhy.org') else resp.json()) 508 | else: 509 | logger.error(f'Incorrect status code <{resp.status}> | {url}') 510 | await asyncio.sleep(3) 511 | except Exception as e: 512 | if i == retries: 513 | logger.error(e) 514 | elif isinstance(e, asyncio.TimeoutError): 515 | kwargs['timeout'] += 20 516 | 517 | 518 | class MagicSeed(Request): 519 | magic_info = MagicInfo({}) 520 | instances = [] 521 | 522 | def __new__(cls, *args, **kwargs): 523 | _instance = super().__new__(cls) 524 | if cls == MagicSeed: 525 | cls.instances.append(_instance) 526 | return _instance 527 | 528 | def __init__(self, client): 529 | super(MagicSeed, self).__init__() 530 | self.client = client 531 | 532 | async def main(self): 533 | tasks = [] 534 | 535 | for _id, data in self.client.active_torrents_info( 536 | ['name', 'tracker', 'total_size', 'upload_payload_rate', 'state']).items(): 537 | 538 | if (_id not in self.magic_info # 魔法还在有效期内则不加入 539 | or 'ts' in self.magic_info[_id] and int(time()) - self.magic_info[_id]['ts'] >= 86400): 540 | if data['tracker'] and ('daydream.dmhy.best' in data['tracker'] 541 | or 'tracker.dmhy.org' in data['tracker']): # 过滤不是 U2 的种子 542 | magic_downloading = CONFIG['magic_for_self']['magic_downloading'] 543 | if magic_downloading or data['state'] not in ['Downloading', 'downloading']: # 过滤下载中的种子 544 | if data['upload_payload_rate'] >= CONFIG['magic_for_self']['min_rate'] * 1024: 545 | if data['total_size'] >= CONFIG['magic_for_self']['min_size'] * 1024 ** 3: 546 | tasks.append(self.check_torrent(_id, data['name'])) 547 | 548 | async with aiohttp.ClientSession() as self.session: 549 | res = await asyncio.gather(*tasks) 550 | self.magic_info.del_unused() 551 | 552 | magic_tasks = [self.send_magic(__id, _tid, {'user': 'SELF', 'hours': 24, 'ur': 2.33, 'dr': 1}) 553 | for __id, _tid, ur_233 in res if _tid and __id not in self.magic_info] 554 | async with aiohttp.ClientSession() as self.session: 555 | await asyncio.gather(*magic_tasks) 556 | self.magic_info.save() 557 | 558 | async def check_torrent(self, _id, name): 559 | if not CONFIG['api_token']: 560 | return await self.info_from_u2(_id, name) 561 | else: 562 | try: 563 | return await self.info_from_api(_id, name) 564 | except Exception as e: 565 | logger.exception(e) 566 | return await self.info_from_u2(_id, name) 567 | 568 | async def info_from_u2(self, _id, name): 569 | url = f'https://u2.dmhy.org/torrents.php' 570 | params = {'search': _id, 'search_area': 5} 571 | page = await self.request(url, params=params) 572 | soup = BeautifulSoup(page.replace('\n', ''), 'lxml') 573 | 574 | '''获取时区''' 575 | tz_info = soup.find('a', {'href': 'usercp.php?action=tracker#timezone'})['title'] 576 | pre_suf = [['时区', ',点击修改。'], ['時區', ',點擊修改。'], ['Current timezone is ', ', click to change.']] 577 | tz = [tz_info[len(pre):-len(suf)].strip() for pre, suf in pre_suf if tz_info.startswith(pre)][0] 578 | timezone = pytz.timezone(tz) 579 | 580 | table = soup.select('table.torrents') 581 | tid = None 582 | if table: 583 | cont = table[0].contents[1].contents 584 | tid = int(cont[1].a['href'][15:-6]) 585 | 586 | # 判断种子是否已有 2.33x 优惠 587 | for img in cont[1].select('tr')[1].td.select('img') or []: 588 | if img.get('class') == ['arrowup'] and float(img.next_element.text[:-1].replace(',', '.')) >= 2.33: 589 | logger.info(f'Torrent {_id}, id: {tid}: 2.33x upload magic existed!') 590 | time_tag = cont[1].time 591 | if time_tag: 592 | magicst = self.ts(time_tag.get('title') or time_tag.text, timezone) - 86400 593 | self.magic_info[_id] = {'ts': magicst} 594 | else: 595 | self.magic_info[_id] = {'ts': int(time()) + 86400 * 30} 596 | return _id, tid, True 597 | 598 | # 判断种子时间是否小于最小天数 599 | delta = time() - self.ts(cont[3].time.get('title') or cont[3].time.get_text(' '), timezone) 600 | if delta < CONFIG['magic_for_self']['min_d'] * 86400: 601 | self.magic_info[_id] = {'ts': int(time())} 602 | return _id, tid, False 603 | 604 | else: 605 | logger.error(f'Torrent {_id} , name: {name} was not found in u2...') 606 | self.magic_info[_id] = {'ts': int(time()) + 86400 * 3} 607 | return _id, tid, False 608 | 609 | async def info_from_api(self, _id, name): 610 | _param = {'uid': CONFIG['uid'], 'token': CONFIG['api_token']} 611 | 612 | history_data = await self.request('https://u2.kysdm.com/api/v1/history', 613 | params={**_param, 'hash': _id}) 614 | tid = None 615 | if history_data['data']['history']: 616 | tid = history_data['data']['history'][0]['torrent_id'] 617 | 618 | res = await self.request('https://u2.kysdm.com/api/v1/promotion_super', 619 | params={**_param, 'torrent_id': tid}) 620 | if float(res['data']['promotion_super'][0]['private_ratio'].split(' / ')[0]) >= 2.33: 621 | logger.info(f'Torrent {_id}, id: {tid}: 2.33x upload magic existed!') 622 | 623 | res = await self.request('https://u2.kysdm.com/api/v1/promotion_specific', 624 | params={**_param, 'torrent_id': tid}) 625 | pro_list = res['data']['promotion'] 626 | 627 | pro_end_time = time() 628 | for pro_data in pro_list: 629 | if float(pro_data['ratio'].split(' / ')[0]) >= 2.33: 630 | if not pro_data['for_user_id'] or pro_data['for_user_id'] == CONFIG['uid']: 631 | if not pro_data['expiration_time']: 632 | self.magic_info[_id] = {'ts': int(time()) + 86400 * 30} 633 | break 634 | else: 635 | end_time = self.ts(pro_data['expiration_time'].replace('T', ' ')) 636 | if self.ts(pro_data['from_time'].replace('T', ' ')) < time() < end_time: 637 | if end_time > pro_end_time: 638 | pro_end_time = end_time 639 | self.magic_info[_id] = {'ts': int(pro_end_time) - 86400} 640 | return _id, tid, True 641 | 642 | upload_date = history_data['data']['history'][0]['uploaded_at'] 643 | if time() - self.ts(upload_date.replace('T', ' ')) < CONFIG['magic_for_self']['min_d'] * 86400: 644 | self.magic_info[_id] = {'ts': int(time())} 645 | return _id, tid, False 646 | 647 | else: 648 | logger.error(f'Torrent {_id} , name: {name} was not found...') 649 | self.magic_info[_id] = {'ts': int(time()) + 86400 * 3} 650 | return _id, tid, False 651 | 652 | @staticmethod 653 | def ts(date, tz=pytz.timezone('Asia/Shanghai')): 654 | dt = datetime.strptime(date, '%Y-%m-%d %H:%M:%S') 655 | return tz.localize(dt).timestamp() 656 | 657 | async def send_magic(self, _id, tid, _data): 658 | """ 659 | Args: 660 | _id (str): 种子 hash 661 | tid (str | int): 种子 id 662 | _data (Dict[str, None | str | int | float]): 包含 ur, dr, hours, user 魔法信息的字典 663 | """ 664 | data = {'action': 'magic', 'divergence': '', 'base_everyone': '', 'base_self': '', 'base_other': '', 665 | 'torrent': tid, 'tsize': '', 'ttl': '', 'user_other': '', 'start': 0, 'promotion': 8, 'comment': ''} 666 | data.update(_data) 667 | 668 | try: 669 | p1 = await self.request('https://u2.dmhy.org/promotion.php?test=1', method='post', data=data) 670 | res_json = json.loads(p1) 671 | if res_json['status'] == 'operational': 672 | uc = int(float(BeautifulSoup(res_json['price'], 'lxml').span['title'].replace(',', ''))) 673 | 674 | if uc > CONFIG['uc_max']: 675 | logger.warning(f'Torrent id: {tid} cost {uc}uc, too expensive | data: {data}') 676 | self.magic_info[_id] = {'ts': int(time())} 677 | return 678 | 679 | cost = self.magic_info.cost() 680 | if cost > CONFIG['total_uc_max']: 681 | secs = min(self.magic_info.min_secs(), 1800) 682 | logger.warning(f'24h ucoin usage exceeded, Waiting for {secs}s ------ | data: {data}') 683 | await asyncio.sleep(secs) 684 | return 685 | self.magic_info[_id] = {'uc': uc} 686 | 687 | p2 = await self.request('https://u2.dmhy.org/promotion.php', method='post', retries=0, data=data) 688 | if re.match(r'^$', p2): 689 | logger.info(f"Sent a {data['ur']}x upload and {data['dr']}x download " 690 | f"magic to torrent {_id}, tid: {tid}, user {data['user'].lower()}, " 691 | f"duration {data['hours']}h, uc usage {uc}, 24h total {cost + uc}") 692 | self.magic_info[_id] = {'ts': int(time())} 693 | else: 694 | logger.error(f'Failed to send magic to torrent {_id}, id: {tid} | data: {data}') 695 | self.magic_info[_id] = {'uc': -uc} 696 | if self.magic_info.cost() > CONFIG['total_uc_max']: 697 | self.magic_info.save() 698 | 699 | except Exception as e: 700 | logger.exception(e) 701 | 702 | def magic_for_self(self): 703 | loop = asyncio.new_event_loop() 704 | asyncio.set_event_loop(loop) 705 | while True: 706 | try: 707 | loop.run_until_complete(self.main()) 708 | except Exception as e: 709 | logger.exception(e) 710 | finally: 711 | gc.collect() 712 | sleep(CONFIG['magic_for_self']['interval']) 713 | 714 | 715 | class Run(MagicSeed): 716 | def __init__(self): 717 | super(Run, self).__init__(None) 718 | with open(CONFIG['data_path'], 'a', encoding='utf-8'): 719 | pass 720 | with open(CONFIG['data_path'], 'r', encoding='utf-8') as fp: 721 | try: 722 | self.magic_info = json.load(fp) 723 | except json.JSONDecodeError: 724 | pass 725 | self.clients = [] # 多线程调用同一个 deluge 就 segfault,没找到好的解决办法 726 | 727 | async def main(self): 728 | info = {} 729 | for client in self.clients: 730 | info.update(client.seeding_torrents_info(['name', 'total_seeds', 'tracker'])) 731 | 732 | _id_list = [] 733 | for _id, data in info.items(): 734 | if data['tracker'] and 'daydream.dmhy.best' in data['tracker'] or 'tracker.dmhy.org' in data['tracker']: 735 | if data['total_seeds'] <= CONFIG['magic_for_all']['max_seeder_num']: 736 | _id_list.append(_id) 737 | 738 | num = CONFIG['magic_for_all']['torrent_num'] 739 | if len(_id_list) >= num: 740 | _id_list = random.sample(_id_list, num) 741 | logger.info(f'Found {num} torrent which num of seeders < {num} --> {_id_list}') 742 | else: 743 | logger.info(f'There are only {len(_id_list)} torrents which num of seeders < {num} --> {_id_list}') 744 | 745 | tasks = [self.check_torrent(_id, info[_id]['name']) for _id in _id_list] 746 | async with aiohttp.ClientSession() as self.session: 747 | res = await asyncio.gather(*tasks) 748 | self.magic_info.del_unused() 749 | 750 | magic_tasks = [] 751 | hr = CONFIG['magic_for_all']['hours'] 752 | for __id, _tid, ur_233 in res: 753 | if _tid: 754 | if ur_233 and not ( 755 | 'ts' in self.magic_info[__id] 756 | and self.magic_info[__id]['ts'] + 86400 - time() < CONFIG['magic_for_all']['min_rm_hr'] * 3600 757 | ): 758 | magic_tasks.append(self.send_magic(__id, _tid, {'user': 'ALL', 'hours': hr, 'ur': 1, 'dr': 0})) 759 | elif CONFIG['magic_for_all']['233_all']: 760 | magic_tasks.append(self.send_magic(__id, _tid, {'user': 'ALL', 'hours': hr, 'ur': 2.33, 'dr': 0})) 761 | else: 762 | magic_tasks.append(self.send_magic(__id, _tid, {'user': 'ALL', 'hours': hr, 'ur': 1, 'dr': 0})) 763 | magic_tasks.append(self.send_magic(__id, _tid, {'user': 'SELF', 'hours': hr, 'ur': 2.33, 'dr': 1})) 764 | 765 | async with aiohttp.ClientSession() as self.session: 766 | await asyncio.gather(*magic_tasks) 767 | self.magic_info.save() 768 | 769 | def magic_for_all(self): 770 | loop = asyncio.new_event_loop() 771 | asyncio.set_event_loop(loop) 772 | while True: 773 | try: 774 | loop.run_until_complete(self.main()) 775 | except Exception as e: 776 | logger.exception(e) 777 | finally: 778 | gc.collect() 779 | sleep(CONFIG['magic_for_all']['interval']) 780 | 781 | def run(self): 782 | if CONFIG['magic_for_all']['enable'] and not CONFIG['magic_for_self']['enable']: 783 | self.magic_for_all() 784 | else: 785 | with ThreadPoolExecutor(max_workers=len(self.instances) + 1) as executor: 786 | if CONFIG['magic_for_all']['enable']: 787 | executor.submit(self.magic_for_all) 788 | [executor.submit(instance.magic_for_self) for instance in self.instances] 789 | 790 | def __enter__(self): 791 | return self 792 | 793 | def __exit__(self, exc_type, exc_val, exc_tb): 794 | if not isinstance(exc_val, KeyboardInterrupt): 795 | logger.exception(exc_val) 796 | os._exit(0) 797 | 798 | 799 | logger.add(level='DEBUG', sink=CONFIG['log_path'], rotation="5 MB") 800 | 801 | if sys.platform == 'win32': 802 | asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) 803 | 804 | class_to_name = {Deluge: ['de', 'Deluge', 'deluge'], 805 | Qbittorrent: ['qb', 'QB', 'qbittorrent', 'qBittorrent'], 806 | Transmission: ['tr', 'Transmission', 'transmission'], 807 | RuTorrent: ['ru', 'RuTorrent', 'rutorrent'], 808 | UTorrent: ['ut', 'UT', 'utorrent', 'uTorrent', 'µTorrent']} 809 | name_to_class = {name: cls for cls, lst in class_to_name.items() for name in lst} 810 | 811 | with Run() as r: 812 | for client_info in CONFIG['clients_info']: 813 | c_type = client_info['type'] 814 | del client_info['type'] 815 | MagicSeed(name_to_class[c_type](**client_info)) 816 | r.clients.append(name_to_class[c_type](**client_info)) 817 | r.run() 818 | -------------------------------------------------------------------------------- /catch_magic.py: -------------------------------------------------------------------------------- 1 | """必填参数只有 cookie,之后修改 BK_DIR 和 WT_DIR,即可运行 2 | 依赖 pip3 install requests lxml bs4 loguru pytz 3 | u2_api: https://github.com/kysdm/u2_api,自动获取 token: https://greasyfork.org/zh-CN/scripts/428545 4 | """ 5 | 6 | import gc 7 | import json 8 | import os 9 | import re 10 | import shutil 11 | import pytz 12 | 13 | from collections import deque 14 | from concurrent.futures import ThreadPoolExecutor, as_completed 15 | from datetime import datetime 16 | from time import sleep, time 17 | from typing import Dict, List, Union, Any 18 | 19 | from requests import get, ReadTimeout, ConnectTimeout 20 | from bs4 import BeautifulSoup 21 | from loguru import logger 22 | 23 | COOKIES = {'nexusphp_u2': ''} # type: Dict[str, str] 24 | '网站 cookie' 25 | BK_DIR = '/root/backup' # type: str 26 | '备份种子文件夹路径' 27 | WT_DIR = '/de/wt' # type: str 28 | 'BT 客户端监控文件夹' 29 | INTERVAL = 120 # type: Union[int, float] 30 | '检查魔法的时间间隔' 31 | API_TOKEN = '' # type: str 32 | '填了将默认通过 api 获取最新的魔法信息,否则直接从网页获取' 33 | UID = 50096 # type: int 34 | '访问 api 需要将此改为自己的 uid,否则不用管' 35 | RUN_CRONTAB = False # type: Any 36 | '如果为真,代表脚本不会死循环,运行一次脚本退出,需要以一定间隔运行脚本,主要解决内存问题;否则一直循环运行不退出' 37 | RUN_TIMES = 1 # type: int 38 | 'RUN_CRONTAB 为真时运行脚本一次 run 函数循环的次数,默认运行一次脚本结束,但如果频繁运行影响性能的话可以改大' 39 | PROXIES = {'http': '', 'https': ''} # type: Union[Dict[str, Union[str, None]], None] 40 | '代理' 41 | MAX_SEEDER_NUM = 5 # type: int 42 | '最大的做种人数,超过不下载' 43 | LOG_PATH = f'{os.path.splitext(__file__)[0]}.log' # type: str 44 | '日志文件路径' 45 | DATA_PATH = f'{os.path.splitext(__file__)[0]}.data.txt' # type: str 46 | '数据文件路径' 47 | DOWNLOAD_NON_FREE = False # type: Any 48 | '如果为真为下载不是 free 的种子,否则的话只下载 free 的种子' 49 | MIN_DAY = 7 # type: Union[int, float] 50 | '种子发布时间超过此天数判断为旧种子,否则判断为新种子' 51 | DOWNLOAD_OLD = True # type: Any 52 | '是否下载旧种子' 53 | DOWNLOAD_NEW = False # type: Any 54 | '是否下载新种子' 55 | MAGIC_SELF = False # type: Any 56 | '如果为真,会下载给自己放魔法的种子,否则不下载' 57 | EFFECTIVE_DELAY = 60 # type: Union[int, float] 58 | '如果该魔法是 free 并且生效时间在此之内,就算种子不是 free 也直接下载' 59 | DOWNLOAD_DEAD_TO = False # type: Any 60 | '默认不下载无人做种的旧种子(新种总有人做种,所以不考虑有没有人做种一律下载),如果要下载改成 True' 61 | RE_DOWNLOAD = True # type: Any 62 | '如果为 False,检测到备份文件夹有该种子则不再次下载' 63 | CHECK_PEERLIST = False # type: Any 64 | '检查 peer 列表,如果已经在做种或者在下载则不下载种子' 65 | DA_QIAO = True # type: Any 66 | '是否搭桥,如果搭桥,即使做种人数超过最大值魔法咒语有’搭桥‘或’加速‘也会下载' 67 | MIN_RE_DL_DAYS = 0 # type: Union[int, float] 68 | '离最近一次下载该种子的最小天数,小于这个天数不下载种子' 69 | CAT_FILTER = [] # type: List[str] 70 | '''种子类型为其中之一则下载,类型见 torrents.php,多个用逗号隔开,不填就不进行类型过滤,比如 ['BDMV', 'Lossless Music']''' 71 | SIZE_FILTER = [0, -1] # type: List[Union[int, float]] 72 | '体积过滤,第一个数为体积最小值(GB),第二个为最大值(GB),-1 表示不设上限' 73 | NAME_FILTER = [] # type: List[str] 74 | '''过滤种子标题,如果标题或者文件名中包含这些字符串之一则排除不下载,多个用逗号隔开,字符串要加引号,比如 ['BDrip']''' 75 | R_ARGS = {'cookies': COOKIES, 76 | 'headers': {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' 77 | 'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.5112.81 Safari/537.36'}, 78 | 'timeout': 20, 79 | 'proxies': PROXIES 80 | } 81 | 'requests 模块参数' 82 | MIN_ADD_INTERVAL = 0 # type: Union[int, float] 83 | '重复添加同一种子的最小时间间隔(s)' 84 | 85 | 86 | class CatchMagic: 87 | pre_suf = [['时区', ',点击修改。'], ['時區', ',點擊修改。'], ['Current timezone is ', ', click to change.']] 88 | 89 | def __init__(self): 90 | self.checked, self.magic_id_0, self.tid_add_time = deque([], maxlen=200), None, {} 91 | with open(DATA_PATH, 'a', encoding='utf-8'): 92 | pass 93 | with open(DATA_PATH, 'r', encoding='utf-8') as fp: 94 | try: 95 | data = json.load(fp) 96 | self.checked = deque(data['checked'], maxlen=200) 97 | self.magic_id_0 = data['id_0'] 98 | self.tid_add_time = data['add_time'] 99 | except json.JSONDecodeError: 100 | pass 101 | self.first_time = True 102 | 103 | def info_from_u2(self): 104 | all_checked = True if self.first_time and not self.magic_id_0 else False 105 | index = 0 106 | id_0 = self.magic_id_0 107 | 108 | while True: 109 | soup = self.get_soup(f'https://u2.dmhy.org/promotion.php?action=list&page={index}') 110 | user_id = soup.find('table', {'id': 'info_block'}).a['href'][19:] 111 | 112 | for i, tr in filter(lambda tup: tup[0] > 0, enumerate(soup.find('table', {'width': '99%'}))): 113 | magic_id = int(tr.contents[0].string) 114 | if index == 0 and i == 1: 115 | self.magic_id_0 = magic_id 116 | if self.first_time and id_0 and magic_id - id_0 > 10 * INTERVAL: 117 | all_checked = True 118 | if tr.contents[5].string in ['Expired', '已失效'] or magic_id == id_0: 119 | all_checked = True 120 | break 121 | 122 | if tr.contents[1].string in ['魔法', 'Magic', 'БР']: 123 | if not tr.contents[3].a and tr.contents[3].string in ['所有人', 'Everyone', 'Для всех'] \ 124 | or MAGIC_SELF and tr.contents[3].a and tr.contents[3].a['href'][19:] == user_id: 125 | if tr.contents[5].string not in ['Terminated', '终止', '終止', 'Прекращён']: 126 | if tr.contents[2].a: 127 | tid = int(tr.contents[2].a['href'][15:]) 128 | if magic_id not in self.checked: 129 | if self.first_time and all_checked: 130 | self.checked.append(magic_id) 131 | else: 132 | yield magic_id, tid 133 | continue 134 | 135 | if magic_id not in self.checked: 136 | self.checked.append(magic_id) 137 | 138 | if all_checked: 139 | break 140 | else: 141 | index += 1 # 新增魔法数量不小于单页魔法数量 142 | 143 | def info_from_api(self): 144 | r_args = {'timeout': R_ARGS.get('timeout'), 'proxies': R_ARGS.get('proxies')} 145 | params = {'uid': UID, 'token': API_TOKEN, 'scope': 'public', 'maximum': 30} 146 | resp = get('https://u2.kysdm.com/api/v1/promotion', **r_args, params=params).json() 147 | pro_list = resp['data']['promotion'] 148 | if MAGIC_SELF: 149 | params['scope'] = 'private' 150 | resp1 = get('https://u2.kysdm.com/api/v1/promotion', **r_args, params=params).json() 151 | pro_list.extend([pro_data for pro_data in resp1['data']['promotion'] if pro_data['for_user_id'] == UID]) 152 | 153 | for pro_data in pro_list: 154 | magic_id = pro_data['promotion_id'] 155 | tid = pro_data['torrent_id'] 156 | if magic_id == self.magic_id_0: 157 | break 158 | if magic_id not in self.checked: 159 | if self.first_time and not self.magic_id_0: 160 | self.checked.append(magic_id) 161 | else: 162 | yield magic_id, tid 163 | self.magic_id_0 = pro_list[0]['promotion_id'] 164 | 165 | def all_effective_magic(self): 166 | id_0 = self.magic_id_0 167 | 168 | if not API_TOKEN: 169 | yield from self.info_from_u2() 170 | else: 171 | try: 172 | yield from self.info_from_api() 173 | except Exception as e: 174 | logger.exception(e) 175 | yield from self.info_from_u2() 176 | 177 | if self.magic_id_0 != id_0: 178 | with open(f'{DATA_PATH}', 'w', encoding='utf-8') as fp: 179 | json.dump({'checked': list(self.checked), 'id_0': self.magic_id_0, 180 | 'add_time': self.tid_add_time}, fp) 181 | self.first_time = False 182 | 183 | def dl_to(self, to_info): 184 | tid = to_info['dl_link'].split('&passkey')[0].split('id=')[1] 185 | 186 | if tid in self.tid_add_time: 187 | if time() - self.tid_add_time[tid] < MIN_ADD_INTERVAL: 188 | logger.info(f'Torrent {tid} | You have downloaded this torrent < {MIN_ADD_INTERVAL} s') 189 | 190 | if CHECK_PEERLIST and to_info['last_dl_time']: 191 | peer_list = self.get_soup(f'https://u2.dmhy.org/viewpeerlist.php?id={tid}') 192 | tables = peer_list.find_all('table') 193 | for table in tables or []: 194 | for tr in filter(lambda _tr: 'nowrap' in str(_tr), table): 195 | if tr.get('bgcolor'): 196 | logger.info(f"Torrent {tid} | you are already " 197 | f"{'downloading' if len(tr.contents) == 12 else 'seeding'} the torrent") 198 | return 199 | 200 | if f'[U2].{tid}.torrent' in os.listdir(BK_DIR): 201 | if not RE_DOWNLOAD: 202 | logger.info(f'Torrent {tid} | you have downloaded this torrent before') 203 | return 204 | else: 205 | with open(f'{BK_DIR}/[U2].{tid}.torrent', 'wb') as f: 206 | f.write(get(to_info['dl_link'], **R_ARGS).content) 207 | 208 | shutil.copy(f'{BK_DIR}/[U2].{tid}.torrent', f'{WT_DIR}/[U2].{tid}.torrent') 209 | logger.info(f"Download torrent {tid}, name {to_info['to_name']}") 210 | self.tid_add_time[tid] = time() 211 | 212 | @classmethod 213 | def get_tz(cls, soup): 214 | tz_info = soup.find('a', {'href': 'usercp.php?action=tracker#timezone'})['title'] 215 | tz = [tz_info[len(pre):-len(suf)].strip() for pre, suf in cls.pre_suf if tz_info.startswith(pre)][0] 216 | return pytz.timezone(tz) 217 | 218 | @staticmethod 219 | def timedelta(date, timezone): 220 | dt = datetime.strptime(date, '%Y-%m-%d %H:%M:%S') 221 | return time() - timezone.localize(dt).timestamp() 222 | 223 | @staticmethod 224 | def get_pro(td): 225 | pro = {'ur': 1.0, 'dr': 1.0} 226 | pro_dict = {'free': {'dr': 0.0}, '2up': {'ur': 2.0}, '50pct': {'dr': 0.5}, '30pct': {'dr': 0.3}, 'custom': {}} 227 | for img in td.select('img') or []: 228 | if not [pro.update(data) for key, data in pro_dict.items() if key in img['class'][0]]: 229 | pro[{'arrowup': 'ur', 'arrowdown': 'dr'}[img['class'][0]]] = float(img.next.text[:-1].replace(',', '.')) 230 | return list(pro.values()) 231 | 232 | @staticmethod 233 | def get_soup(url): 234 | magic_page = get(url, **R_ARGS).text 235 | if url != 'https://u2.dmhy.org/promotion.php?action=list&page=0': 236 | logger.debug(f'Download page: {url}') 237 | return BeautifulSoup(magic_page.replace('\n', ''), 'lxml') 238 | 239 | def analyze_magic(self, magic_id, tid): 240 | soup = self.get_soup(f'https://u2.dmhy.org/details.php?id={tid}') 241 | aa = soup.select('a.index') 242 | if len(aa) < 2: 243 | logger.info(f'Torrent {tid} | torrent deleted, passed') 244 | return 245 | to_info = {'to_name': aa[0].text[5:-8], 'dl_link': f"https://u2.dmhy.org/{aa[1]['href']}"} 246 | 247 | if NAME_FILTER: 248 | title = soup.find('h1', {'align': 'center', 'id': 'top'}).text 249 | if any(st in title or st in to_info['to_name'] for st in NAME_FILTER): 250 | logger.debug(f'Torrent {tid} | torrent excluded by NAME_FILTER') 251 | return 252 | 253 | if CAT_FILTER: 254 | cat = soup.time.parent.contents[7].strip() 255 | if cat not in CAT_FILTER: 256 | logger.debug(f'Torrent {tid} | torrent category {cat} does not match, passed') 257 | return 258 | 259 | if SIZE_FILTER and not (SIZE_FILTER[0] <= 0 and SIZE_FILTER[1] == -1): 260 | size_str = soup.time.parent.contents[5].strip().replace(',', '.').replace('Б', 'B') 261 | [num, unit] = size_str.split(' ') 262 | _pow = ['MiB', 'GiB', 'TiB', '喵', '寄', '烫', 'egamay', 'igagay', 'eratay'].index(unit) % 3 263 | gb = float(num) * 1024 ** (_pow - 1) 264 | if gb < SIZE_FILTER[0] or SIZE_FILTER[1] != -1 and gb > SIZE_FILTER[1]: 265 | logger.debug(f'Torrent {tid} | torrent size {size_str} does not match, passed') 266 | return 267 | 268 | if CHECK_PEERLIST or MIN_RE_DL_DAYS > 0: 269 | for tr in soup.find('table', {'width': '90%'}): 270 | if tr.td.text in ['My private torrent', '私人种子文件', '私人種子文件', 'Ваш личный торрент']: 271 | time_str = tr.find_all('time') 272 | if not time_str: 273 | to_info['last_dl_time'] = None 274 | else: 275 | date = time_str[1].get('title') or time_str[1].text 276 | to_info['last_dl_time'] = time() - self.timedelta(date, self.get_tz(soup)) 277 | if MIN_RE_DL_DAYS > 0 and to_info['last_dl_time']: 278 | if time() - to_info['last_dl_time'] < 86400 * MIN_RE_DL_DAYS: 279 | logger.debug(f"Torrent {tid} | You have downloaded this torrent " 280 | f"{(time() - to_info['last_dl_time']) // 86400} days before, passed") 281 | return 282 | 283 | delta = self.timedelta(soup.time.get('title') or soup.time.text, self.get_tz(soup)) 284 | seeder_count = int(re.search(r'(\d+)', soup.find('div', {'id': 'peercount'}).b.text).group(1)) 285 | magic_page_soup = None 286 | 287 | if delta < MIN_DAY * 86400: 288 | if DOWNLOAD_NEW: 289 | if seeder_count > MAX_SEEDER_NUM: 290 | logger.debug(f'Torrent {tid} | seeders > {MAX_SEEDER_NUM}, passed') 291 | else: 292 | if [self.get_pro(tr.contents[1])[1] for tr in soup.find('table', {'width': '90%'}) 293 | if tr.td.text in ['流量优惠', '流量優惠', 'Promotion', 'Тип раздачи (Бонусы)']][0] > 0: 294 | logger.debug(f'torrent {tid} | is not free, passed') 295 | else: 296 | self.dl_to(to_info) 297 | else: 298 | logger.debug(f'Torrent {tid} | time < {MIN_DAY} days, passed') 299 | return 300 | elif not DOWNLOAD_OLD: 301 | logger.debug(f'Torrent {tid} | time > {MIN_DAY} days, passed') 302 | return 303 | 304 | if not DOWNLOAD_NON_FREE: 305 | if [self.get_pro(tr.contents[1])[1] for tr in soup.find('table', {'width': '90%'}) 306 | if tr.td.text in ['流量优惠', '流量優惠', 'Promotion', 'Тип раздачи (Бонусы)']][0] > 0: 307 | logger.debug(f'torrent {tid} | is not free, will pass if no free magic in delay.') 308 | magic_page_soup = self.get_soup(f'https://u2.dmhy.org/promotion.php?action=detail&id={magic_id}') 309 | tbody = magic_page_soup.find('table', {'width': '75%', 'cellpadding': 4}).tbody 310 | if self.get_pro(tbody.contents[6].contents[1])[1] == 0: 311 | time_tag = tbody.contents[4].contents[1].time 312 | delay = -self.timedelta(time_tag.get('title') or time_tag.text, self.get_tz(magic_page_soup)) 313 | if -1 < delay < EFFECTIVE_DELAY: 314 | logger.debug(f'Torrent {tid} | free magic {magic_id} will be effective in {int(delay)}s') 315 | else: 316 | return 317 | else: 318 | return 319 | 320 | if seeder_count > 0 or DOWNLOAD_DEAD_TO: 321 | if seeder_count <= MAX_SEEDER_NUM: 322 | self.dl_to(to_info) 323 | return 324 | elif DA_QIAO: 325 | if not magic_page_soup: 326 | magic_page_soup = self.get_soup(f'https://u2.dmhy.org/promotion.php?action=detail&id={magic_id}') 327 | comment = magic_page_soup.legend.parent.contents[1].text 328 | if '搭' in comment and '桥' in comment or '加' in comment and '速' in comment: 329 | user = magic_page_soup.select('table.main bdo')[0].text 330 | logger.info(f'Torrent {tid} | user {user} is looking for help, downloading...') 331 | self.dl_to(to_info) 332 | return 333 | logger.debug(f'Torrent {tid} | seeders > {MAX_SEEDER_NUM}, passed') 334 | else: 335 | logger.debug(f'Torrent {tid} | no seeders, passed') 336 | 337 | def run(self): 338 | id_0 = self.magic_id_0 339 | with ThreadPoolExecutor(max_workers=6) as executor: 340 | futures = {executor.submit(self.analyze_magic, magic_id, tid): magic_id 341 | for magic_id, tid in self.all_effective_magic()} 342 | if futures: 343 | error = False 344 | for future in as_completed(futures): 345 | try: 346 | future.result() 347 | self.checked.append(futures[future]) 348 | except Exception as er: 349 | error = True 350 | if isinstance(er, (ReadTimeout, ConnectTimeout)): 351 | logger.error(er) 352 | else: 353 | logger.exception(er) 354 | if error: 355 | self.magic_id_0 = id_0 356 | with open(f'{DATA_PATH}', 'w', encoding='utf-8') as fp: 357 | json.dump({'checked': list(self.checked), 'id_0': self.magic_id_0, 358 | 'add_time': self.tid_add_time}, fp) 359 | 360 | 361 | @logger.catch() 362 | def main(catch): 363 | for _ in range(RUN_TIMES): 364 | try: 365 | catch.run() 366 | except Exception as e: 367 | logger.error(e) 368 | finally: 369 | if _ != RUN_TIMES - 1 or not RUN_CRONTAB: 370 | gc.collect() 371 | sleep(INTERVAL) 372 | 373 | 374 | logger.add(level='DEBUG', sink=LOG_PATH, rotation='2 MB') 375 | 376 | c = CatchMagic() 377 | if RUN_CRONTAB: 378 | main(c) 379 | else: 380 | while True: 381 | main(c) 382 | -------------------------------------------------------------------------------- /download_new_torrents.py: -------------------------------------------------------------------------------- 1 | # python 版本 3.6 及以上,依赖: pip3 install requests bs4 lxml loguru pymongo 2 | # 自己用了一下还可行,虽然我是直接加到 deluge... 3 | 4 | import os 5 | import re 6 | from collections import deque 7 | 8 | import pytz 9 | 10 | from datetime import datetime as dt 11 | from functools import wraps 12 | from time import sleep, time 13 | from bs4 import BeautifulSoup 14 | from loguru import logger 15 | from requests import get 16 | from base64 import b64encode 17 | from deluge_client import LocalDelugeRPCClient 18 | 19 | from my_bencoder import bdecode 20 | 21 | # *************************必填配置************************ 22 | cookies = {'nexusphp_u2': ''} 23 | passkey = '' 24 | save_path = '/de/wt' # 存放种子文件的文件夹,可以用 bt 客户端监控 25 | download_location = '/de/dl' # 存放下载内容的文件夹 26 | 27 | # ************************可修改配置*********************** 28 | proxies = { # 代理 29 | # 'http': 'http://127.0.0.1:10809', 'https': 'http://127.0.0.1:10809' 30 | } 31 | headers = { 32 | 'authority': 'u2.dmhy.org', 33 | 'accept-encoding': 'gzip, deflate', 34 | 'accept-language': 'zh-CN,zh;q=0.8', 35 | 'referer': 'https://u2.dmhy.org/index.php', 36 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' 37 | 'Chrome/98.0.4758.102 Safari/537.36 Edg/98.0.1108.62' 38 | } 39 | interval = 300 # 爬网页的间隔 40 | mgdb = False # 将种子数据保存到 mongodb,需要安装 mongodb 数据库 41 | download_sticky = True # 是否下载顶置 42 | download_no_seeder_sticky = True # 是否下载无人做种的顶置,为 True 时还是不会下载平均进度为 0 的种子 43 | download_no_free_sticky = True # 是否下载不是 free 的顶置种子 44 | download_no_free_non_sticky = False # 是否下载不是 free 的非顶置种子 45 | eval_all_keys = False # 获取种子所有信息,不开的话用到哪个就获取哪个 46 | 47 | # *************************日志设置************************ 48 | log_path = f'{os.path.splitext(__file__)[0]}.log' 49 | logger.add(level='DEBUG', rotation='2 MB', sink=log_path) 50 | 51 | # ***********************程序保存数据********************** 52 | data_path = f'{os.path.splitext(__file__)[0]}.data.txt' # 数据保存文件 53 | checked = deque([], maxlen=300) 54 | '''如果某个种子的 id 在 checked 里,那么该种子详细页不会爬第二次, 55 | 如果去获取只有详细页才有的信息,比如 info_hash 值,会返回 None''' 56 | added = deque([], maxlen=300) 57 | '''如果某个种子的 id 在 added 里,那么筛选种子的时候会跳过''' 58 | add_client = True 59 | '''True 即直接添加到客户端,只支持 deluge, qb 懒得写了 60 | 如果默认下载目录下有同名文件,则新建一个目录下载,主要是防止 rev 的种子超速 61 | False 则将种子下载到监控文件夹''' 62 | client = LocalDelugeRPCClient( 63 | '127.0.0.1', # IP 64 | 58846, # daemon port 65 | 'localclient', # username 66 | '' # password, cat ~/.config/deluge/auth 67 | ) 68 | 'add_client 为真时需要填写' 69 | 70 | # *************************END**************************** 71 | 72 | 73 | def write_list(name): 74 | if name in globals(): 75 | with open(data_path, 'r', encoding='utf-8') as f1, open(f'{data_path}.bak', 'w', encoding='utf-8') as f2: 76 | k = 0 77 | for _line in f1: 78 | if _line.startswith(name): 79 | k = 1 80 | f2.write(f'{name} = {globals()[name]}\n') 81 | else: 82 | f2.write(_line) 83 | if k == 0: 84 | f2.write(f'{name} = {globals()[name]}\n') 85 | os.remove(data_path) 86 | os.rename(f'{data_path}.bak', data_path) 87 | 88 | 89 | def get_url(url): 90 | try: 91 | html = get(url, cookies=cookies, headers=headers, proxies=proxies, timeout=20) 92 | if html.status_code < 400: 93 | if url != 'https://u2.dmhy.org/torrents.php': 94 | logger.info(f'download page {url}') 95 | return html.text 96 | except Exception as e: 97 | logger.error(e) 98 | 99 | 100 | def call_retry(_client, method, *args, **kwargs): 101 | if not _client.connected: 102 | for _ in range(5): 103 | _client.reconnect() 104 | logger.debug(f'Connected to deluge host ------ {_client.host}') 105 | break 106 | return _client.call(method, *args, **kwargs) 107 | 108 | 109 | detail_key_dict = { 110 | 'filename': ['下载', '下載', 'Download', 'Скачивание'], 111 | 'author': ['发布人', '發佈人', '發布人', 'Uploader', 'Загрузил'], 112 | 'hash': ['种子信息', '種子訊息', 'Torrent Info', 'Информация о торренте'], 113 | 'description': ['描述', '描述', 'Description', 'Описание'], 114 | 'progress': ['活力度', 'Health', 'Целостность'], 115 | 'geoips': ['同伴', 'Peers', 'Всего Участников'] 116 | } 117 | 118 | 119 | class U2Web: 120 | def __init__(self): 121 | self.keys = [key[1:] for key, obj in type(self).__dict__.items() 122 | if isinstance(obj, property) and key.startswith('_')] 123 | self.info = {} 124 | self.tr = None 125 | self.tr1 = None 126 | self.trs = None 127 | self.d_url = None 128 | self.t_url = None 129 | self.table1 = [] 130 | self.tz = '' 131 | self.passkey = passkey 132 | 133 | def __getattr__(self, item): 134 | if item in self.keys: 135 | return getattr(self, f'_{item}') 136 | else: 137 | raise KeyError(f'Key {item} is not supported. These are all supported keys: {self.keys}') 138 | 139 | def torrent_page(self): 140 | page = get_url('https://u2.dmhy.org/torrents.php') 141 | soup = BeautifulSoup(page.replace('\n', ''), 'lxml') 142 | tz_info = soup.find('a', {'href': 'usercp.php?action=tracker#timezone'})['title'] 143 | pre_suf = [['时区', ',点击修改。'], ['時區', ',點擊修改。'], ['Current timezone is ', ', click to change.']] 144 | self.tz = [tz_info[len(pre):][:-len(suf)].strip() for pre, suf in pre_suf if tz_info.startswith(pre)][0] 145 | 146 | table = soup.select('table.torrents')[0] 147 | for self.tr in table.contents[1:]: 148 | self.info = {} 149 | if self.tid not in added: # 过滤已经添加的种子 150 | self.trs = str(self.tr) 151 | yield self.tr 152 | 153 | def _seeding(self): # 是否正在做种 154 | return bool('seedhlc_current' in self.trs) 155 | 156 | def _leeching(self): # 是否正在下载 157 | return bool('leechhlc_current' in self.trs) 158 | 159 | def _sticky(self): # 是否顶置 160 | return bool('sticky' in self.trs) 161 | 162 | ''' 163 | def _hot(self): # 是否热门 164 | return bool(self.tr.select('span.hot')) 165 | ''' 166 | 167 | def _incomplete(self): # 是否曾经未完成 168 | return bool('incomplete' in self.trs) 169 | 170 | def _completed(self): # 是否曾经完成 171 | return bool('snatchhlc_finish' in self.trs) 172 | 173 | def _auxseed(self): # 是否曾经辅种 174 | return bool('snatchhlc_auxseed' in self.trs) 175 | 176 | def _tid(self): # 种子 id 177 | return int(self.tr.contents[1].a['href'][15:-6]) 178 | 179 | def _title(self): # 标题 180 | return self.tr.contents[1].a.text 181 | 182 | def _small_descripton(self): # 副标题 183 | tooltip = self.tr.find('span', {'class': 'tooltip'}) 184 | return tooltip.text if tooltip else None 185 | 186 | def _seeder_num(self): # 做种数 187 | return int(self.tr.contents[5].string) 188 | 189 | def _leecher_num(self): # 下载数 190 | return int(self.tr.contents[6].contents[0].string) 191 | 192 | def _completes(self): # 完成数 193 | return int(self.tr.contents[7].string) 194 | 195 | def _date(self): # 发布日期(字符串) 196 | return self.tr.contents[3].time.get('title') or self.tr.contents[3].time.get_text(' ') 197 | 198 | def _size(self): # 体积(字符串) 199 | return self.tr.contents[4].get_text(' ') 200 | 201 | def _promotion(self): # 上传下载比率 202 | pro = {'ur': 1.0, 'dr': 1.0} 203 | pro_dic = {'free': {'dr': 0.0}, 'twoup': {'ur': 2.0}, 'halfdown': {'dr': 0.5}, 'thirtypercent': {'dr': 0.3}} 204 | if self.tr.get('class'): 205 | [pro.update(data) for key, data in pro_dic.items() if key in self.tr['class'][0]] 206 | td = self.tr.tr and self.tr.select('tr')[1].td or self.tr.select('td')[1] 207 | pro_dic_1 = {'free': {'dr': 0.0}, '2up': {'ur': 2.0}, '50pct': {'dr': 0.5}, '30pct': {'dr': 0.3}, 'custom': {}} 208 | for img in td.select('img') or []: 209 | if not [pro.update(data) for key, data in pro_dic_1.items() if key in img['class'][0]]: 210 | pro[{'arrowup': 'ur', 'arrowdown': 'dr'}[img['class'][0]]] = float(img.next.text[:-1].replace(',', '.')) 211 | for span in td.select('span') or []: 212 | [pro.update(data) for key, data in pro_dic.items() if key in (span.get('class') and span['class'][0] or '')] 213 | return list(pro.values()) 214 | 215 | def _torrentsign(self): # 种子签名 216 | if 'torrentsign' in self.trs: 217 | return self.tr.select('span.torrentsign')[0].text 218 | 219 | def _pro_end_date(self): # 优惠结束时间 220 | if self.tr.contents[1].time: 221 | return self.tr.contents[1].time.get('title') or self.tr.contents[1].time.text 222 | 223 | def _ani_link(self): # anidb 链接 224 | td = self.tr.select('tr')[1].contents[1] 225 | if td.string: 226 | return td.a['href'] 227 | 228 | def _rating(self): # anidb 评分 229 | num = self.tr.select('tr')[1].contents[1].string 230 | if num not in (None, ' - '): 231 | return float(num) 232 | 233 | def detail_page(self): # 详情页很多地方结构的不固定,可能用正则表达式可能会好点? 234 | if self.tid not in checked: 235 | self.d_url = self.t_url 236 | soup = BeautifulSoup(get_url(self.d_url).replace('\n', ''), 'lxml') 237 | self.passkey = soup.select('a.index')[1]['href'].split('&passkey=')[1][:-8] 238 | self.table1 = soup.find('table', {'width': '90%'}) 239 | checked.append(self.info['tid']) 240 | write_list('checked') 241 | for self.tr1 in self.table1: 242 | yield self.tr1 243 | 244 | def _filename(self): # 种子内容文件名 245 | return self.tr1.a.text[5:-8] 246 | 247 | def _author(self): # 发布者 uid 248 | if not any(a in str(self.tr1) for a in ['匿名', 'torrentsign', 'Anonymous', 'Анонимно']): 249 | return self.tr1.s and self.tr1.s.text or self.tr1.a['href'][19:] 250 | 251 | ''' 252 | def _descrption(self): # 详细描述 253 | return self.tr1.bdo.text 254 | ''' 255 | 256 | def _hash(self): # info_hash 257 | return self.tr1.tr.contents[-2].contents[1].strip() 258 | 259 | def _progress(self): # (包括做种者在内的) 平均进度 260 | if not any(st in str(self.tr1) for st in ['没有流量', '沒有流量', 'No Traffic', 'Не зафиксировано']): 261 | return int(self.tr1.b.previous_element.strip()[1:-2]) 262 | 263 | def _geoips(self): # 做种者的地理位置信息 264 | if int(re.search(r'(\d+)', self.tr1.b.text).group(1)) > 0: 265 | peerlist = get_url(f'https://u2.dmhy.org/viewpeerlist.php?id={self.tid}') 266 | table = BeautifulSoup(peerlist.replace('\n', ' '), 'lxml').table 267 | ips = [] 268 | for tr in filter(lambda _tr: 'nowrap' in str(_tr), table): 269 | ip = {} 270 | for i, span in enumerate(tr.contents[0]): 271 | if i == 0: 272 | ip['user'] = tr.i and str(tr.i) or tr.bdo.text # 用户名 273 | else: 274 | ip[span['class'][1]] = span['title'] 275 | ips.append(ip) 276 | return ips 277 | 278 | @property 279 | def secs(self): # 发布时间到现在的间隔(s),不是 property 280 | tm = dt.strptime(self.date, '%Y-%m-%d %H:%M:%S') 281 | return int(time() - pytz.timezone(self.tz).localize(tm).timestamp()) 282 | 283 | @property 284 | def gbs(self): # 种子体积(gb),不是 property 285 | [num, unit] = self.size.split(' ') 286 | _pow = ['MiB', 'GiB', 'TiB', '喵', '寄', '烫', 'MiБ', 'GiБ', 'TiБ', 'egamay', 'igagay', 'eratay'].index(unit) % 3 287 | return float(num.replace(',', '.')) * 1024 ** (_pow - 1) 288 | 289 | def select_torrent(self): 290 | """ 291 | 选择种子,符合条件返回 True。有些值可能为空 292 | 规则自己写吧,反正应该很好懂,看这个逻辑也不是很方便能用配置描述.... 293 | """ 294 | if not self.seeding and not self.leeching: # 过滤下载中和做种中的种子 295 | if self.sticky: # 顶置种子 296 | if download_sticky: 297 | if not download_no_free_sticky and self.promotion[1] > 0: 298 | return 299 | if self.seeder_num > 0: # 做种数大于 0 ,直接下载 300 | return True 301 | if download_no_seeder_sticky: 302 | return 303 | if self.leecher_num > 5: 304 | if self.progress is not None and self.progress > 0: 305 | # 做种数小于于 0 ,检查下载者进度,如果平均进度全为 0 不下载 306 | return True 307 | else: 308 | if not download_no_free_non_sticky and self.promotion[1] > 0: 309 | return 310 | if self.secs < 2 * interval: # 发布不久,直接下载 311 | return True 312 | if self.seeder_num < 10 or self.leecher_num > 20: # 根据做种数和下载数判断是否要下载 313 | if self.progress is not None and self.progress < 30: # 检查平均进度 314 | return True 315 | 316 | def rss(self): 317 | while True: 318 | try: 319 | for self.tr in self.torrent_page(): 320 | if self.select_torrent(): 321 | sv = f'{save_path}/[U2].{self.tid}.torrent' 322 | link = f'https://u2.dmhy.org/download.php?id={self.tid}&passkey={self.passkey}&https=1' 323 | content = get(link, headers=headers, proxies=proxies).content 324 | 325 | if not add_client: 326 | with open(sv, 'wb') as to: 327 | to.write(content) 328 | logger.info(f'add torrent {self.tid}') 329 | else: 330 | down_loc = download_location 331 | name = '' 332 | try: 333 | name = bdecode(content)[b'info'][b'name'].decode() 334 | except: 335 | pass 336 | if client.host in ('127.0.0.1', 'localhost'): 337 | if not name or name in os.listdir(down_loc): 338 | i = 0 339 | while True: 340 | new_loc = f'{download_location}/.{i}' 341 | if not os.path.exists(new_loc): 342 | os.mkdir(new_loc) 343 | down_loc = new_loc 344 | break 345 | if name and name not in os.listdir(new_loc): 346 | down_loc = new_loc 347 | break 348 | i += 1 349 | call_retry( 350 | client, 351 | 'core.add_torrent_file', 352 | f'[U2].{self.tid}.torrent', 353 | b64encode(content), 354 | {'add_paused': False, 'download_location': down_loc} 355 | ) 356 | logger.info(f'Add torrent {self.tid} via DelugeClient') 357 | 358 | added.append(self.tid) 359 | write_list('added') 360 | if eval_all_keys: # 获取种子所有信息 361 | for _key in self.keys: 362 | getattr(self, _key) 363 | for key in list(self.info.keys()): # 删掉空的键 364 | if not self.info[key]: 365 | del self.info[key] 366 | if mgdb: 367 | col.insert_one(self.info) 368 | else: 369 | logger.debug(f'----------- torrent info ----------\n{self.info}') 370 | except Exception as e: 371 | logger.exception(e) 372 | finally: 373 | sleep(interval) 374 | 375 | def value(func): 376 | @property 377 | @wraps(func) 378 | def wrapper(self): 379 | name = func.__name__[1:] 380 | if name not in self.info: # sel.info 中没有这个 key,说明之前没有获取 381 | if name in detail_key_dict: # key 只有详细页才有 382 | self.t_url = f'https://u2.dmhy.org/details.php?id={self.tid}&hit=1' 383 | if self.tid in checked and self.d_url != self.t_url: 384 | # 已经检查过一次,并且详情页不在内存中,返回 None 385 | self.info[name] = None 386 | else: 387 | for self.tr1 in self.detail_page(): 388 | if any(word in self.tr1.td.text for word in detail_key_dict[name]): 389 | self.info[name] = func(self) 390 | break 391 | else: 392 | self.info[name] = None 393 | else: 394 | self.info[name] = func(self) 395 | return self.info.get(name) 396 | return wrapper 397 | 398 | for name in list(vars()): 399 | obj = vars()[name] 400 | if hasattr(type(obj), '__get__') and not hasattr(type(obj), '__set__'): 401 | if name.startswith('_') and not (name.startswith('__') and name.endswith('__')): 402 | vars()[name] = value(obj) 403 | 404 | del value, name, obj 405 | 406 | 407 | if __name__ == '__main__': 408 | if mgdb: 409 | import pymongo 410 | dbclient = pymongo.MongoClient('mongodb://localhost:27017/') 411 | base = dbclient['U2'] 412 | col = base['torrent_info'] 413 | torrent_info = col.find().sort('_id', -1).limit(50) 414 | added.extend([info['tid'] for info in torrent_info]) 415 | write_list('added') 416 | 417 | with open(data_path, 'a', encoding='utf-8'): 418 | pass 419 | with open(data_path, 'r', encoding='utf-8') as f: 420 | for line in f: 421 | if any(line.startswith(var) for var in ['added', 'checked']): 422 | exec(line) 423 | 424 | u2 = U2Web() 425 | u2.rss() 426 | -------------------------------------------------------------------------------- /find_torrent.py: -------------------------------------------------------------------------------- 1 | """ 2 | 根据根目录名添加种子,适用于种子数据丢失或者辅种 3 | """ 4 | import asyncio 5 | import json 6 | import os 7 | from time import sleep 8 | 9 | from typing import Dict 10 | 11 | import aiohttp 12 | import qbittorrentapi 13 | from loguru import logger 14 | 15 | rename = False # 是否改名 16 | always_add = False # 检测到种子缺失文件,是否任然添加(校验后需要下载) 17 | host = 'localhost' # qb 主机 ip 18 | port = 8080 # qb 端口 19 | username = '' # qb webui 用户名 20 | password = '' # qb webui 密码 21 | token = '' # 三方 api token,获取: https://greasyfork.org/zh-CN/scripts/428545 22 | uid = 50096 # 自己的 uid 23 | passkey = '' # passkey 24 | src_path = 'G:\\BDMV' # 包含种子内容的文件 25 | proxy = '' # 'http://127.0.0.1:10809' # 代理 26 | char_map = { 27 | '?': '?', 28 | '*': '٭', 29 | '<': '《', 30 | '>': '》', 31 | ':': ':', 32 | '"': "'", 33 | '/': '/', 34 | '\\': '/', 35 | '|': '│' 36 | } # Windows 不支持字符的替换规则 37 | os_rename = True # 是否直接通过 os 重命名文件夹而不是使用 qb 重命名 38 | 39 | logger.add(sink=f'{os.getcwd()}\\logs\\find_torrent-{{time}}.log', level='DEBUG') 40 | client = qbittorrentapi.Client(host=host, port=port, username=username, password=password) 41 | 42 | 43 | def check_files(path: str, torrent_tree: Dict): 44 | it = True 45 | paths = [path] 46 | 47 | def _check_files(info: dict): 48 | nonlocal it 49 | for k, v in info.items(): 50 | paths.append(k) 51 | if v['type'] == 'directory': 52 | _check_files(v["children"]) 53 | else: 54 | path = '/'.join(paths) 55 | if not os.path.exists(path) or os.path.getsize(path) != v['length']: 56 | it = False 57 | return 58 | paths.pop(-1) 59 | 60 | _check_files(torrent_tree) 61 | return it 62 | 63 | 64 | hashes = {torrent.hash for torrent in client.torrents_info(status_filter='completed')} 65 | 66 | 67 | async def find_torrent(fn: str, session: aiohttp.ClientSession, sem: asyncio.Semaphore): 68 | data = {'uid': uid, 'token': token, 'torrent_name': fn} 69 | async with sem: 70 | async with session.post('https://u2.kysdm.com/api/v1/search_torrent_name', json=data, proxy=proxy) as response: 71 | _json = await response.json() 72 | torrents = _json['data']['torrents'] 73 | if torrents: 74 | for torrent in torrents: 75 | if not torrent['torrent_tree']: 76 | continue 77 | if always_add or check_files(src_path, json.loads(torrent['torrent_tree'])): 78 | tid = torrent['torrent_id'] 79 | _id = torrent['torrent_hash'] 80 | name = torrent['torrent_name'] 81 | logger.info(f'文件名 {fn} 搜索到对应的种子, id 为 {tid}') 82 | 83 | if _id in hashes: 84 | logger.info(f'文件名 {fn} 对应的种子已在客户端') 85 | else: 86 | dl_link = f'https://u2.dmhy.org/download.php?id={tid}&passkey={passkey}&https=1' 87 | try: 88 | async with sem: 89 | async with session.get(dl_link, proxy=proxy) as response: 90 | content = await response.read() 91 | client.torrents_add(torrent_files=content, save_path=src_path, is_paused=True) 92 | except Exception as e: 93 | logger.error(e) 94 | logger.info(f'已添加种子, id 为 {tid}') 95 | await asyncio.sleep(0.1) # 如果不 sleep 可能报错没有这个种子 96 | 97 | if rename: 98 | title = ''.join(char_map.get(char) or char for char in torrent['title']) 99 | try: 100 | client.torrents_rename(_id, title) 101 | logger.info(f"成功重命名种子名称 {name} -> {title}") 102 | except Exception as e: 103 | logger.error(e) 104 | if os.path.isdir(f'{src_path}/{name}'): 105 | try: 106 | if os_rename: 107 | os.renames(f'{src_path}/{name}', f'{src_path}/{title}') 108 | client.torrents_rename_folder(_id, name, title) 109 | logger.info(f"成功重命名种子文件夹 {name} -> {title}") 110 | except Exception as e: 111 | logger.error(e) 112 | else: 113 | try: 114 | os.rename(f'{src_path}/{name}', f'{src_path}/{title}') 115 | client.torrents_rename_file(_id, 0, title) 116 | logger.info(f"成功重命名种子文件 {name} -> {title}") 117 | except Exception as e: 118 | logger.error(e) 119 | break 120 | else: 121 | logger.warning( 122 | f'文件名 {fn} 缺少文件, 可能的种子 id 有 {tuple(torrent["torrent_id"] for torrent in torrents)}') 123 | else: 124 | logger.debug(f'文件名 {fn} 未搜索到对应的种子') 125 | 126 | 127 | async def main(): 128 | tasks = [] 129 | sem = asyncio.Semaphore(20) 130 | async with aiohttp.ClientSession() as session: 131 | for fn in os.listdir(src_path): 132 | tasks.append(asyncio.ensure_future(find_torrent(fn, session, sem))) 133 | await asyncio.gather(*tasks) 134 | 135 | 136 | if __name__ == '__main__': 137 | asyncio.run(main()) 138 | -------------------------------------------------------------------------------- /give_sugar.py: -------------------------------------------------------------------------------- 1 | """发糖脚本,用于在论坛或者种子评论区发糖,可以随时停止和重新运行 2 | 解析回复内容会自动去掉引用、代码、链接,但不会去掉折叠内容 3 | """ 4 | 5 | import json 6 | import os 7 | import random 8 | import re 9 | from time import sleep 10 | from typing import Union, Dict, List, Tuple, Callable, Any 11 | 12 | import bs4.element 13 | import requests 14 | 15 | from loguru import logger 16 | from bs4 import BeautifulSoup 17 | 18 | URL = '' # type: str 19 | '帖子、种子、候选的 url,直接复制即可' 20 | COOKIES = {'nexusphp_u2': ''} # type: Dict[str, str] 21 | '网站 cookie' 22 | PROXIES = {'http': '', 'https': ''} # type: Union[Dict[str, Union[str, None]], None] 23 | '代理' 24 | UC = 50000 # type: Union[int, Tuple[int, int], Tuple[int, int, Union[int, float]], Tuple[Union[int, float], Union[int, float], Callable], List[Union[int, float, Callable]]] 25 | '''设定发糖数量,有四种方法 26 | 第一种,设定为一个固定值,例 27 | UC = 50000 28 | 29 | 第二种,设定一个最小值和最大值,例 30 | UC = 50000, 150000 31 | 程序会自动从两个值之间取随机数,随机数为均匀分布,理论上平均值期望就是两者平均数 32 | 33 | 第三种,设定一个最小值、平均值期望和最大值例,例 34 | UC = 20000, 100000, 660000 35 | 脚本会使用幂函数来实现,公式是 最小值+x^((最大值-平均值期望)/(平均值期望-最小值)) 36 | x 为一个随机数,最小为 0,最大让函数值达到 UC 设定的最大值 37 | 最大值可以很大,但是越大意味着收到最小金额的概率越大 38 | 39 | 第四种,设定一个随机数区间和一个函数,例 40 | UC = 0, 2**(6/7), lambda x: int(round(10000*(2 + x ** 7))) 41 | 第一个数是区间下限,第二个是区间上限,第三个是函数, 42 | 脚本将使用随机数区间生成的随机数作为函数的参数,返回值作为发糖金额 43 | ''' 44 | NUM = -1 # type: int 45 | '发糖人数, -1 表示不限制' 46 | TEXT = True # type: Any 47 | '是否解析回帖内容,如果不解析一律发给回复者本人,否则的话如果发给回复中解析出有效的用户 id (没有还是发给本人)' 48 | RGX = r'((?= NUM: 129 | logger.info(f'转账人数已达到设定值 {NUM},退出程序') 130 | exit() 131 | 132 | self.parse_page() 133 | _list = list(self.info.keys()) 134 | index = (-1 if not self.id_info else _list.index(self.id_info)) + 1 135 | i = 0 136 | 137 | if len(_list) > index: 138 | for id_info in _list[index:]: 139 | info = self.info[id_info] 140 | if info['post_uid'] not in [self.uid, None]: 141 | if info['transferred'] == 0 or 'expect_uc' in info and info['transferred'] < info['expect_uc']: 142 | if i > 0 and UPDATE: 143 | self.parse_page() 144 | i += 1 145 | self.batch_transfer(id_info) 146 | if info['transferred'] >= info['expect_uc']: 147 | self.transfer_num += 1 148 | self.id_info = id_info 149 | else: 150 | sleep(300) 151 | 152 | def batch_transfer(self, id_info): 153 | info = self.info[id_info] 154 | if 'expect_uc' not in info: 155 | if isinstance(UC, (tuple, list)): 156 | if len(UC) == 2: 157 | info['expect_uc'] = random.randint(UC[0], UC[1]) 158 | else: 159 | if callable(UC[2]): 160 | info['expect_uc'] = UC[2](random.uniform(UC[0], UC[1])) 161 | else: 162 | n = (UC[2] - UC[1]) / (UC[1] - UC[0]) 163 | x = pow((UC[2] - UC[0]) / 10000, 1/n) 164 | info['expect_uc'] = UC[0] + int(round((random.uniform(0, x) ** n) * 10000)) 165 | else: 166 | info['expect_uc'] = UC 167 | self.save() 168 | 169 | uc = info['expect_uc'] - info['transferred'] 170 | uid = info['transfer_uid'] if info['transfer_uid'] > 0 else info['post_uid'] 171 | info_msg = f"{self.page_info} | {id_info} | 计划转账 {info['expect_uc']} UCoin" 172 | msg = f"{info_msg}{' | ' + MSG if MSG else ''}" if INFO else MSG 173 | logger.info(info_msg) 174 | 175 | cost = uc * 1.5 + (int(uc / 50000) + 1) * 100 176 | if self.uc_amount < cost: 177 | logger.warning(f"{id_info} | UCoin 不足 | {self.uc_amount} < {cost} | {'退出程序' if EXT else '等待'}") 178 | if EXT: 179 | exit() 180 | else: 181 | sleep(900) 182 | self.parse_page() 183 | self.batch_transfer(id_info) 184 | 185 | times = 0 186 | if RE != -1: 187 | for _id_info, _info in self.info.items(): 188 | if _id_info != id_info and 'transfer_uid' in _info: 189 | _uid = _info['transfer_uid'] if _info['transfer_uid'] > 0 else _info['post_uid'] 190 | if _uid == uid and 'expect_uc' in _info and _info['transferred'] >= _info['expect_uc']: 191 | times += 1 192 | if times >= RE: 193 | logger.info(f"{id_info} | 已经给用户 {uid} 转账 {times} 次,跳过") 194 | return 195 | 196 | while uc > 0: 197 | data = {'event': '1003', 'recv': uid, 'amount': 50000 if uc >= 50000 else uc, 'message': msg} 198 | retries = 4 199 | for _ in range(retries + 1): 200 | try: 201 | page = requests.post('https://u2.dmhy.org/mpshop.php', **R_ARGS, data=data).text 202 | soup = BeautifulSoup(page.replace('\n', ''), 'lxml') 203 | if soup.h2 and soup.h2.text in ('Error', '错误', '錯誤', 'Ошибка'): 204 | err_msg = soup.select('table td.text')[1].text 205 | logger.error(f"{id_info} | 转账发生错误: {err_msg} | data: {data}") 206 | delay = re.findall(r'(\d+)', err_msg) 207 | if delay and int(delay[0]) <= 300: 208 | logger.info(f'将在 {int(delay[0])} 秒后重试') 209 | sleep(int(delay[0])) 210 | else: 211 | uc -= data['amount'] 212 | info['transferred'] += data['amount'] 213 | self.save() 214 | logger.info(f"{id_info} | 成功给用户 {uid} 转账 {data['amount']} UCoin") 215 | sleep(300) 216 | break 217 | except Exception as er: 218 | logger.error(f"{id_info} | 转账发生错误: {er} | data: {data}") 219 | if _ == retries: 220 | return 221 | 222 | def parse_page(self): 223 | url = f'{self.url}&page={self.index}' 224 | soup = self.get_soup(url) 225 | 226 | td = soup.find('table', {'border': '1'}).td 227 | for tag in td: 228 | if tag.name == 'div' and tag['style'].startswith('margin-top'): 229 | id_info = tag.table['id'] 230 | user_details = tag.select("a[href^='userdetails.php?id=']") 231 | uid = int(user_details[0]['href'][19:]) if user_details else None 232 | 233 | if id_info not in self.info: 234 | self.info[id_info] = {'post_uid': uid, 'transferred': 0} 235 | if uid is None: 236 | logger.info(f'{id_info} | 用户匿名,无法发糖') 237 | elif uid == self.uid: 238 | logger.info(f'{id_info} | 用户为自己,跳过') 239 | 240 | elif tag.name == 'table': 241 | if self.info[id_info]['transferred'] > 0 or uid in (self.uid, None): 242 | continue 243 | if TEXT: 244 | self.info[id_info]['text'] = self.strip_content(tag.select('span bdo')[0]) 245 | 246 | self.validate_uid(id_info) 247 | 248 | self.save() 249 | _list = list(self.info.keys()) 250 | if _list and _list[len(_list) - 1] == self.id_info: 251 | all_p = soup.find_all('p', {'align': 'center'}) 252 | if all_p[0].contents[2].name == 'a': 253 | self.index += 1 254 | self.parse_page() 255 | elif all_p[1].next_sibling.name == 'p': 256 | logger.info('所有楼层已发完,帖子已被锁定,退出程序') 257 | exit() 258 | 259 | def validate_uid(self, id_info): 260 | if TEXT: 261 | valid = True 262 | all_id = re.findall(RGX, self.info[id_info]['text']) 263 | uid = int(all_id[0]) if all_id else self.info[id_info]['post_uid'] 264 | 265 | def find_uid(): 266 | nonlocal valid 267 | 268 | _list = [self.page_info] 269 | _list.extend([page_info for page_info in self.total_info.keys() if page_info != self.page_info]) 270 | for page_info in _list: 271 | for _id_info, _info in self.total_info[page_info].items(): 272 | transfer_uid = _info.get('transfer_uid') 273 | if transfer_uid in [uid, -uid]: 274 | valid = True if transfer_uid > 0 else False 275 | return 276 | if _info['post_uid'] == uid: 277 | valid = True 278 | return 279 | 280 | if self.get_soup(f'https://u2.dmhy.org/userdetails.php?id={uid}' 281 | ).find('td', {'id': 'outer', 'align': 'center'}).h1: 282 | valid = True 283 | else: 284 | valid = False 285 | 286 | find_uid() 287 | if self.info[id_info].get('transfer_uid') not in [uid, -uid]: 288 | if valid: 289 | self.info[id_info]['transfer_uid'] = uid 290 | if all_id: 291 | logger.info(f'{id_info} | 解析到用户 ID {uid},将会给用户 {uid} 发糖') 292 | else: 293 | logger.info(f'{id_info} | 没有解析到用户 ID,将会给层主 {uid} 发糖') 294 | else: 295 | self.info[id_info]['transfer_uid'] = -uid 296 | logger.info(f"{id_info} | {uid} 不是有效的用户 ID,将会给层主 {self.info[id_info]['post_uid']} 发糖") 297 | else: 298 | if 'transfer_uid' not in self.info[id_info]: 299 | self.info[id_info]['transfer_uid'] = self.info[id_info]['post_uid'] 300 | logger.info(f"{id_info} | 将会给用户 {self.info[id_info]['post_uid']} 发糖") 301 | 302 | @staticmethod 303 | def strip_content(element): 304 | contents = [] 305 | 306 | def _strip_content(_element): 307 | if isinstance(_element, bs4.element.Tag): 308 | if _element.name == 'fieldset' and _element.legend: # 排除引用、Media Info 309 | return 310 | if _element.name == 'div' and _element.get('class') in [['codemain'], ['codetop']]: # 排除代码 311 | return 312 | if _element.name == 'a' and _element.get('class') == ['faqlink']: # 排除链接 313 | return 314 | if _element.name in ['img', 'button']: # 排除图片、折叠按钮(没有排除折叠内容) 315 | return 316 | for child_element in _element.contents: 317 | _strip_content(child_element) 318 | else: 319 | contents.append(str(_element)) 320 | 321 | _strip_content(element) 322 | return ' '.join(contents) 323 | 324 | def print_info(self): 325 | idx = 0 326 | fin_idx = 0 327 | contents = [] 328 | for id_info, info in self.info.items(): 329 | if info['post_uid'] not in [self.uid, None]: 330 | idx += 1 331 | ts = info['transferred'] 332 | if ts == 0: 333 | _list = list(self.info.keys()) 334 | if _list.index(id_info) > _list.index(self.id_info): 335 | state = '未开始' if NUM == -1 or fin_idx < NUM else '已取消' 336 | else: 337 | state = '失败' 338 | else: 339 | state = '未完成' if ts < info['expect_uc'] else '已完成' 340 | if state == '已完成': 341 | fin_idx += 1 342 | contents.append(f"{idx} | {fin_idx if state == '已完成' else '无'} | {id_info} | {info['post_uid']} | " 343 | f"{info['transfer_uid'] if info['transfer_uid'] > 0 else info['post_uid']} | " 344 | f"{ts} | {info['text']} | {state}") 345 | info_str = '\n'.join(contents) 346 | 347 | logger.info(f'-------------{self.page_info} 转账信息----------------\n' 348 | f'转账序号 | 完成序号 | 楼层 ID | 回复者 UID | 转账 UID | 转账金额 | 回复内容 | 转账状态\n' 349 | f'{info_str}') 350 | 351 | 352 | logger.add(level='DEBUG', sink=LOG_PATH) 353 | t = TransferUCoin() 354 | while True: 355 | try: 356 | t.run() 357 | except BaseException as e: 358 | if isinstance(e, (KeyboardInterrupt, SystemExit)): 359 | t.print_info() 360 | break 361 | else: 362 | logger.exception(e) 363 | sleep(300) 364 | -------------------------------------------------------------------------------- /my_bencoder.py: -------------------------------------------------------------------------------- 1 | """ 2 | Bencoding implementation written in python3. See https://www.bittorrent.org/beps/bep_0003.html. 3 | For encode/decode a certain object, the decode function takes approximately the same time as the encode function. 4 | Requires sys.version_info >= (3, 6) 5 | """ 6 | 7 | from io import BufferedReader, BytesIO 8 | from typing import Union 9 | 10 | 11 | class BdecodeError(Exception): 12 | pass 13 | 14 | 15 | class BencodeError(Exception): 16 | pass 17 | 18 | 19 | def bencode(obj): 20 | fp = [] 21 | write = fp.append 22 | 23 | def _bencode(_obj): 24 | t = type(_obj) 25 | if t is int: 26 | write(b'i') 27 | write(str(_obj).encode()) 28 | write(b'e') 29 | elif t is bytes: 30 | write(str(len(_obj)).encode()) 31 | write(b':') 32 | write(_obj) 33 | elif t is str: 34 | _obj = _obj.encode() 35 | write(str(len(_obj)).encode()) 36 | write(b':') 37 | write(_obj) 38 | elif t is list or t is tuple: 39 | write(b'l') 40 | for item in _obj: 41 | _bencode(item) 42 | write(b'e') 43 | elif t is dict: 44 | write(b'd') 45 | for key, val in sorted(_obj.items()): 46 | _bencode(key) 47 | _bencode(val) 48 | write(b'e') 49 | 50 | _bencode(obj) 51 | return b''.join(fp) 52 | 53 | 54 | def bdecode(_input: Union[bytes, BufferedReader, str]): 55 | """ 56 | Args: 57 | _input: A bytes object, or IO BufferedReader, or a file path 58 | Raises: 59 | AssertionError 60 | BdecodeError 61 | """ 62 | _bytes = _input 63 | if isinstance(_input, BufferedReader): 64 | _bytes = _input.read() 65 | assert isinstance(_bytes, bytes), 'Unsupported input stream' 66 | elif isinstance(_input, str) and len(_input) < 1024: 67 | with open(_input, 'rb') as _file: 68 | _bytes = _file.read() 69 | assert isinstance(_bytes, bytes), "Unsupported input arg" 70 | fp = BytesIO(_bytes) 71 | read = fp.read 72 | 73 | def _bdecode(): 74 | c = read(1) 75 | if c == b'e': 76 | return StopIteration 77 | elif c == b'i': 78 | values = [] 79 | ch = read(1) 80 | while ch != b'e': 81 | values.append(ch) 82 | ch = read(1) 83 | return int(b''.join(values)) 84 | elif c == b'l': 85 | result = [] 86 | while True: 87 | val = _bdecode() 88 | if val is StopIteration: 89 | return result 90 | result.append(val) 91 | elif c == b'd': 92 | result = {} 93 | while True: 94 | key = _bdecode() 95 | if key is StopIteration: 96 | return result 97 | val = _bdecode() 98 | result[key] = val 99 | else: 100 | size = 0 101 | while b'0' <= c <= b'9': 102 | size = size * 10 + (ord(c) - ord('0')) 103 | c = read(1) 104 | return read(size) 105 | 106 | return _bdecode() 107 | -------------------------------------------------------------------------------- /qb_del.py: -------------------------------------------------------------------------------- 1 | """删除赚 UC 效率不高的种子 2 | 3 | Notes: 4 | 1. 客户端只支持 qb 5 | 2. 只删不加 6 | 3. 种子必须处于做种状态,不然统计不到做种人数 7 | 4. 需要给要删的种子加标签 8 | 5. 可以根据体积,秒收,效率来设定指标 9 | 6. 先不要删太多,分几次来 10 | 7. 不要在站免时运行, 否则误差会很大 11 | 8. 谨慎操作, 如果出错重新运行 12 | """ 13 | import json 14 | import os 15 | from concurrent.futures import ThreadPoolExecutor, as_completed 16 | from time import time 17 | from datetime import datetime 18 | from typing import Union, Dict, Tuple 19 | 20 | import pytz 21 | import requests 22 | import qbittorrentapi 23 | 24 | # **********************填写配置*********************** 25 | tag = 'tmp' # 标签,不是这个标签的种子不删 26 | host = '127.0.0.1' 27 | port = 8080 28 | username = '' 29 | password = '' 30 | uid = 50096 31 | token = '' # u2-api token --> https://greasyfork.org/zh-CN/scripts/428545 32 | proxies = {'http': '', 'https': ''} # 代理 33 | max_seeder = 3 # 做种人数小于或等于这个值不删 34 | an_hour = False # 做种是否满一小时 35 | free_days = 4.5 # 估计平均一个月站免的天数, 之前算过一年半内是 5.2, 懒得写从优惠历史计算了, 反正这个值是不稳定的 36 | 37 | # 以下参数见 /mpseed.php, s0 和 sd0 tracker 在计算时是实时统计值(网页上的数字可能一天更新一次), 38 | # 相对来说 s0 比较稳定, sd0 有一定的历史因素. 其他值基本不变,除非 sysop 手动修改 39 | b = 14.5 40 | s0 = 34.106 41 | d = 0.3 42 | e = 0.001 43 | sd0 = 35.149 44 | l0 = 1096 45 | # ************************END************************* 46 | 47 | 48 | class DeleteTorrents: 49 | def __init__(self): 50 | self.client = qbittorrentapi.Client(host, port, username, password) 51 | 52 | self.info_file = f'{os.path.splitext(__file__)[0]}.torrents_info' 53 | info_t = Dict[str, Dict[str, Union[Tuple[float, float], str, int, float]]] 54 | self.torrents_info: info_t = {} 55 | if not os.path.exists(self.info_file): 56 | with open(self.info_file, 'a'): 57 | pass 58 | self.get_info_from_client() 59 | else: 60 | with open(self.info_file, 'r') as fp: 61 | self.torrents_info: info_t = json.load(fp) 62 | 63 | self.count = 0 64 | self.unhandled_hashes = [] 65 | 66 | def get_info_from_client(self): 67 | for torrent in self.client.torrents_info(): 68 | if 'daydream.dmhy.best' in torrent.magnet_uri and tag in torrent.tags: 69 | self.torrents_info[torrent.hash] = { 70 | 'name': torrent.name, 71 | 'total_size': torrent.total_size, 72 | 'total_seeds': torrent.num_complete if an_hour else torrent.num_complete + 1, 73 | } 74 | self.save_info() 75 | 76 | def save_info(self): 77 | with open(self.info_file, 'w') as fp: 78 | json.dump(self.torrents_info, fp) 79 | 80 | def update_info(self): 81 | with ThreadPoolExecutor(max_workers=10) as executor: 82 | futures = { 83 | executor.submit(self.search_id, _id): _id 84 | for _id in self.torrents_info if 'tid' not in self.torrents_info[_id] 85 | } 86 | for future in as_completed(futures): 87 | future.result() 88 | self.save_info() 89 | if self.unhandled_hashes: 90 | hash_name = '\n'.join([f"{_id} | {self.torrents_info[_id]['name']}" for _id in self.unhandled_hashes]) 91 | print(f'以下 hash 种子未被找到\n{hash_name}') 92 | 93 | def search_id(self, _id): 94 | _params = {'uid': uid, 'token': token} 95 | history_json = requests.get( 96 | 'https://u2.kysdm.com/api/v1/history', 97 | params={**_params, 'hash': _id}, proxies=proxies 98 | ).json() 99 | if history_json['data']['history']: 100 | data = history_json['data']['history'][0] 101 | tid = data['torrent_id'] 102 | 103 | pro_json = requests.get( 104 | 'https://u2.kysdm.com/api/v1/promotion_super', 105 | params={**_params, 'torrent_id': tid}, proxies=proxies 106 | ).json() 107 | pro = list(map(float, pro_json['data']['promotion_super'][0]['public_ratio'].split(' / '))) 108 | 109 | info = { 110 | 'tid': tid, 'name': data['torrent_name'], 'cat': data['category'], 111 | 'date': data['uploaded_at'].replace('T', ' '), 'pro': pro 112 | } 113 | print(f'Hash 值 {_id} 已找到相关信息: {info}') 114 | self.torrents_info[_id].update(info) 115 | 116 | self.count += 1 117 | if self.count % 100 == 0: 118 | self.save_info() 119 | else: 120 | self.unhandled_hashes.append(_id) 121 | print(f'Hash 值 {_id} 未找到相关信息') 122 | 123 | def sort_torrents(self): 124 | for _id, data in self.torrents_info.items(): 125 | if 'pro' in data: 126 | s = data['total_size'] / 1024 ** 3 127 | sd = data['total_seeds'] 128 | 129 | if data['cat'] in ['BDMV', 'DVDISO', 'Lossless Music']: 130 | p = 1 131 | elif data['pro'][0] >= 2 or data['pro'][1] <= 0.5: 132 | p = 0.5 133 | else: 134 | r = free_days / 30 135 | p = r * 0.5 + (1 - r) * max(0.5, max(2 - data['pro'][0], 0) * min(data['pro'][1], 1)) 136 | 137 | dt = datetime.strptime(data['date'], '%Y-%m-%d %H:%M:%S') 138 | ttl = int(time()) - pytz.timezone('Asia/Shanghai').localize(dt).timestamp() 139 | if ttl < 86400 * 60: 140 | l = 0 141 | else: 142 | l = 1096 if ttl >= 86400 * l0 else ttl / 86400 143 | 144 | data['x'] = d * s0 / b / s + p * (1 + e * sd0 * l * s0 / b / s / sd) 145 | '''考虑体积、数量和保种的种子单位体积一小时内获得的 uc, 相对于体积和做种人数无穷大的原盘的倍数''' 146 | else: 147 | data['x'] = 99999 148 | self.torrents_info = dict(sorted(self.torrents_info.items(), key=lambda tup: tup[1]['x'])) 149 | self.save_info() 150 | 151 | def main(self, test=False, target_size=None, target_speed=None, min_x=None): 152 | size = 0 153 | num = 0 154 | ms = 0 155 | 156 | for _id, data in self.torrents_info.items(): 157 | if data['x'] != 99999: 158 | size += data['total_size'] 159 | num += 1 160 | ms += data['x'] * data['total_size'] / 1024 ** 3 * b / s0 / 3600 161 | 162 | if target_size or target_speed or min_x: 163 | size1 = size 164 | num1 = num 165 | ms1 = ms 166 | delete_hashes = [] 167 | for _id, data in self.torrents_info.items(): 168 | if data['total_seeds'] > max_seeder and data['x'] != 99999: 169 | if ( 170 | target_size is not None and size1 > target_size 171 | or target_speed is not None and ms1 > target_speed 172 | or min_x is not None and data['x'] < min_x 173 | ): 174 | delete_hashes.append(_id) 175 | size1 -= data['total_size'] 176 | num1 -= 1 177 | ms1 -= data['x'] * data['total_size'] / 1024 ** 3 * b / s0 / 3600 178 | else: 179 | break 180 | avg = ms1 * 3600 * s0 / b * 1024 ** 3 / size1 181 | if test: 182 | print( 183 | f'预计删除 {len(delete_hashes)} 个种子, 删除后剩余 {num1} 个种子, ' 184 | f'总计大小 {size1}({self.show_size(size1)}), ' 185 | f'预计秒收 {ms1:.3f} UCoin, 平均效率 {avg:.3f}' 186 | ) 187 | else: 188 | self.client.torrents_delete(delete_files=True, torrent_hashes=delete_hashes) 189 | fn = f"{os.path.splitext(__file__)[0]}.delete_hashes.{datetime.now().__str__().replace(':', '-')}.txt" 190 | with open(fn, 'a') as fp: 191 | json.dump(delete_hashes, fp) 192 | for _id in delete_hashes: 193 | del self.torrents_info[_id] 194 | self.save_info() 195 | print(f'成功删除 {len(delete_hashes)} 个种子, 删除的种子 hash 保存在 {fn}') 196 | elif test: 197 | avg = ms * 3600 * s0 / b * 1024 ** 3 / size 198 | print( 199 | f'总共 {num} 个种子, 总计大小 {size}({self.show_size(size)}), ' 200 | f'估计秒收 {ms:.3f} UCoin, 平均效率 {avg:.3f}' 201 | ) 202 | 203 | @staticmethod 204 | def show_size(byte): 205 | units = {'B': 0, 'KiB': 1, 'MiB': 2, 'GiB': 3, 'TiB': 6, 'PiB': 9} 206 | for unit, digits in units.items(): 207 | if byte >= 1024: 208 | byte /= 1024 209 | else: 210 | return f'{round(byte, digits)} {unit}' 211 | 212 | @staticmethod 213 | def str_to_byte(st): 214 | try: 215 | return int(float(st)) 216 | except: 217 | try: 218 | num, unit = st.split(' ') 219 | units = ['b', 'kb', 'mb', 'gb', 'tb', 'pb', 'b', 'kib', 'mib', 'gib', 'tib', 'pib'] 220 | return int(float(num) * 1024 ** (units.index(unit.lower()) % 6)) 221 | except: 222 | pass 223 | 224 | def run(self): 225 | input(f'{__doc__}\n输入任意键继续:\n') 226 | self.update_info() 227 | self.sort_torrents() 228 | 229 | if not os.path.exists(f'{self.info_file}.bak'): 230 | with open(f'{self.info_file}.bak', 'w') as fp: 231 | json.dump(self.torrents_info, fp) 232 | 233 | self.main(test=True) 234 | while True: 235 | _ = input('输入操作: 0.退出 1.根据指定体积删种 2.根据指定秒收删种 3.根据效率删种\n') 236 | if _ == '0': 237 | exit() 238 | if _ == '1': 239 | while True: 240 | size = self.str_to_byte( 241 | input( 242 | '输入删种后目标体积, 字节数或者数字和单位用空格分开e.g. ' 243 | '1278399 | 1.3 tb | 578 GiB\n' 244 | ).strip() 245 | ) 246 | if size: 247 | self.main(test=True, target_size=size) 248 | if input('按 y 继续\n').lower() == 'y': 249 | self.main(target_size=size) 250 | break 251 | if _ == '2': 252 | speed = float(input('输入秒收\n').strip()) 253 | self.main(test=True, target_speed=speed) 254 | if input('按 y 继续\n').lower() == 'y': 255 | self.main(target_speed=speed) 256 | if _ == '3': 257 | x = float(input('输入最低效率\n').strip()) 258 | self.main(test=True, min_x=x) 259 | if input('按 y 继续\n').lower() == 'y': 260 | self.main(min_x=x) 261 | 262 | 263 | if __name__ == '__main__': 264 | DeleteTorrents().run() 265 | -------------------------------------------------------------------------------- /rename_torrents.py: -------------------------------------------------------------------------------- 1 | """ 2 | 种子文件按照网站标题重命名 3 | """ 4 | import asyncio 5 | import os 6 | import sys 7 | 8 | import aiohttp 9 | import qbittorrentapi 10 | from loguru import logger 11 | 12 | from my_bencoder import bdecode 13 | 14 | 15 | mode = 1 16 | # mode = 1: 既重命名 qb 的名称,也重命名文件 17 | # mode = 2: 只重命名 qb 的名称,不重命名文件 18 | # mode = 3: 还原 qb 的名称和实际文件名 19 | # mode = 4: 只还原实际文件名 20 | # mode = 5: 只还原 qb 的名称 21 | host = 'localhost' # ip 22 | port = 8080 # webui 端口 23 | username = '' # 用户名 24 | password = '' # 密码 25 | src_path = r'E:\Lossless Music' 26 | # 包含种子的文件夹,注意 Windows 下格式是反斜杠,linux 是正斜杠,否则不能匹配 27 | # 如果为空则匹配所有种子,另外如果种子在子文件夹则不改名,因为可能影响做种 28 | bt_backup = r'C:\Users\XXX\AppData\Local\qBittorrent\BT_backup' 29 | # qb 的备份文件夹(存放种子),将 xxx 改为 Windows 用户名,如果不填则每次都需要从 api 获取种子信息 30 | token = '' 31 | # u2 api: https://github.com/kysdm/u2_api 的 token, 自动获取 token: https://greasyfork.org/zh-CN/scripts/428545 32 | uid = 50096 # 自己的 uid 33 | proxy = '' # 'http://127.0.0.1:10809' # 代理,可不填 34 | char_map = { 35 | '?': '?', 36 | '*': '★', 37 | '<': '《', 38 | '>': '》', 39 | ':': ':', 40 | '"': "'", 41 | '/': '/', 42 | '\\': '/', 43 | '|': '│' 44 | } # Windows 文件名不支持的字符,冒号后为需要替换成的字符 45 | os_rename = False 46 | # 是否直接通过 os 重命名文件夹,如果使用qb重命名文件夹则不属于种子的部分不会移动到新文件夹 47 | # 如果是 BDrip 里面包含有外挂字幕(不属于种子内容),建议改为 True 48 | # 如果是无损音乐或者外挂结构,建议 False 49 | 50 | 51 | logger.add(level='DEBUG', sink=f'{os.getcwd()}\\logs\\rename_torrents-{{time}}.log') 52 | client = qbittorrentapi.Client(host=host, port=port, username=username, password=password) 53 | if mode not in range(6): 54 | logger.error('未知 mode') 55 | exit() 56 | 57 | 58 | async def rename_torrent(torrent: qbittorrentapi.TorrentDictionary, 59 | session: aiohttp.ClientSession, sem: asyncio.Semaphore): 60 | if 'daydream.dmhy.best' in torrent.magnet_uri and ( 61 | not src_path or torrent.save_path == src_path 62 | or src_path + '/' == torrent.save_path 63 | or src_path + '\\' == torrent.save_path 64 | ): 65 | params = {'uid': uid, 'token': token, 'hash': torrent.hash} 66 | history = [] 67 | try: 68 | old_path = torrent.content_path[len(torrent.save_path):].split('\\' if sys.platform == 'win32' else '/')[1] 69 | except: 70 | old_path = torrent.name 71 | old_name = torrent.name 72 | info_dict = {} 73 | try: 74 | info_dict = bdecode(bt_backup + '\\' + torrent.hash + '.torrent')[b'info'] 75 | origin_name = info_dict[b'name'].decode() 76 | origin_name = ''.join(char_map.get(char) or char for char in origin_name) 77 | except Exception as e: 78 | logger.exception(e) 79 | async with sem: 80 | async with session.get('https://u2.kysdm.com/api/v1/history', params=params, proxy=proxy) as resp: 81 | _json = await resp.json() 82 | history = _json['data']['history'] 83 | if not history: 84 | logger.warning(f'未找到种子 {torrent.hash} {old_name},api 未返回种子信息') 85 | return 86 | origin_name = history[0]['torrent_name'] 87 | origin_name = ''.join(char_map.get(char) or char for char in origin_name) 88 | new_name = origin_name 89 | 90 | if mode in (1, 3, 4) and not os.path.exists(torrent.content_path): 91 | logger.warning(f'意外的错误,种子 {torrent.hash} {old_name} 文件不存在') 92 | return 93 | if mode in (1, 2) and old_name != origin_name: 94 | logger.debug(f'种子 {torrent.hash} {old_name} 已经改名') 95 | return 96 | if mode == 1 and old_path != origin_name: 97 | logger.debug(f'种子 {torrent.hash} {old_name} 文件名已更改') 98 | return 99 | if mode in (3, 4) and old_path == origin_name: 100 | logger.debug(f'种子 {torrent.hash} {old_name} 不需要还原文件名') 101 | return 102 | if mode in (3, 5) and old_name == origin_name: 103 | logger.debug(f'种子 {torrent.hash} {old_name} 不需要还原 qb 名称') 104 | return 105 | 106 | if mode in (1, 2): 107 | if not history: 108 | async with sem: 109 | async with session.get('https://u2.kysdm.com/api/v1/history', params=params, proxy=proxy 110 | ) as resp: 111 | _json = await resp.json() 112 | history = _json['data']['history'] 113 | if not history: 114 | logger.warning(f'未找到种子 {torrent.hash} {old_name},api 未返回种子信息') 115 | return 116 | if not history: 117 | logger.warning(f'未找到种子 {torrent.hash} {torrent.name},api 未返回种子信息') 118 | return 119 | title = ''.join(char_map.get(char) or char for char in history[0]['title']) 120 | if os.path.isdir(torrent.content_path): 121 | new_name = title 122 | else: 123 | new_name = title + os.path.splitext(origin_name)[1] 124 | if info_dict and info_dict.get(b'files') and len(info_dict[b'files']) == 1: 125 | new_name = title + os.path.splitext(info_dict[b'files'][0][b'path'][0].decode())[1] 126 | 127 | if mode == 1 and origin_name == new_name: 128 | logger.debug(f'种子 {torrent.hash} {old_name} 不需要改名') 129 | return 130 | 131 | if mode in (1, 2, 3, 5): 132 | try: 133 | torrent.rename(new_name) 134 | except Exception as e: 135 | logger.error(f'重命名种子名称 {old_path} -> {new_name} 失败,原因 {e}') 136 | else: 137 | logger.info(f'成功重命名种子名称 {old_path} -> {new_name}') 138 | 139 | if mode in (1, 3, 4): 140 | if os.path.isdir(torrent.content_path): 141 | try: 142 | if os_rename: 143 | os.renames(src_path + '/' + old_path, src_path + '/' + new_name) 144 | torrent.rename_folder(old_path, new_name) 145 | except Exception as e: 146 | logger.error(f'重命名种子文件夹 {old_path} -> {new_name} 失败,原因 {e}') 147 | else: 148 | logger.info(f'成功重命名种子文件夹 {old_path} -> {new_name}') 149 | else: 150 | try: 151 | torrent.rename_file(0, new_name) 152 | except Exception as e: 153 | logger.trace(f'重命名种子文件 {old_path} -> {new_name} 失败,原因 {e}') 154 | else: 155 | logger.info(f'成功重命名种子文件 {old_path} -> {new_name}') 156 | 157 | 158 | async def main(): 159 | sem = asyncio.Semaphore(20) 160 | async with aiohttp.ClientSession() as session: 161 | tasks = (rename_torrent(torrent, session, sem) for torrent in client.torrents_info(status_filter='completed')) 162 | await asyncio.gather(*tasks) 163 | 164 | 165 | if __name__ == '__main__': 166 | asyncio.run(main()) 167 | -------------------------------------------------------------------------------- /u2_auxseed.py: -------------------------------------------------------------------------------- 1 | """ 2 | 将其他站的种子或者公网种子的source字段改为u2专用的 3 | 并且重新计算info_hash,如果匹配就下载对应种子辅种 4 | 适用于没用重新制种用转钟脚本转载的种子 5 | """ 6 | import asyncio 7 | import os 8 | from hashlib import sha1 9 | 10 | import aiohttp 11 | import qbittorrentapi 12 | from loguru import logger 13 | 14 | from my_bencoder import bencode, bdecode 15 | 16 | host = 'localhost' # ip 17 | port = 8080 # webui 端口 18 | username = '' # webui 用户名 19 | password = '' # webui 密码 20 | bt_backup = 'C:\\Users\\XXX\\AppData\\Local\\qBittorrent\\BT_backup' 21 | # qb 的备份文件夹(存放种子),此项必填,linux 下默认路径是 用户目录/.local/share/qBittorrent/BT_backup 22 | passkey = '' # 下载种子用 23 | token = '' 24 | # u2 api(第三方): https://github.com/kysdm/u2_api 的 token, 自动获取 token: https://greasyfork.org/zh-CN/scripts/428545 25 | uid = 50096 26 | proxy = '' # ‘http://127.0.0.1:10809’ # 可不填 27 | 28 | logger.add(level='DEBUG', sink=f'{os.getcwd()}\\logs\\auto_seed-{{time}}.log') 29 | client = qbittorrentapi.Client(host=host, port=port, username=username, password=password) 30 | 31 | 32 | async def aux_seed(torrent: qbittorrentapi.TorrentDictionary, session: aiohttp.ClientSession, sem: asyncio.Semaphore): 33 | if 'daydream.dmhy.best' not in torrent.magnet_uri: 34 | info_dict = bdecode(bt_backup + '/' + torrent.hash + '.torrent')[b'info'] 35 | info_dict[b'source'] = '[u2.dmhy.org] U2分享園@動漫花園' 36 | info_dict[b'private'] = 1 37 | torrent_hash = sha1(bencode(info_dict)).hexdigest() 38 | params = {'uid': uid, 'token': token, 'hash': torrent_hash} 39 | 40 | async with sem: 41 | async with session.get('https://u2.kysdm.com/api/v1/history', params=params, proxy=proxy) as resp: 42 | _json = await resp.json() 43 | 44 | history = _json['data']['history'] 45 | if history: 46 | torrent_id = history[0]['torrent_id'] 47 | logger.info(f'找到种子 {torrent_hash} {torrent.name},U2 种子 id 为 {torrent_id}, 将尝试辅种') 48 | async with sem: 49 | async with session.get( 50 | f'https://u2.dmhy.org/download.php?id={torrent_id}&passkey={passkey}&https=1', 51 | params=params, proxy=proxy 52 | ) as resp: 53 | content = await resp.read() 54 | client.torrents_add( 55 | torrent_files=content, save_path=os.path.split(torrent.content_path)[0], is_paused=True 56 | ) 57 | logger.info(f'已添加种子 {torrent_hash} {torrent.name}') 58 | else: 59 | logger.debug(f'未找到种子 {torrent_hash} {torrent.name},api 未返回种子信息') 60 | 61 | 62 | async def main(): 63 | sem = asyncio.Semaphore(20) 64 | async with aiohttp.ClientSession() as session: 65 | tasks = (aux_seed(torrent, session, sem) for torrent in client.torrents_info(status_filter='completed')) 66 | await asyncio.gather(*tasks) 67 | 68 | 69 | if __name__ == '__main__': 70 | asyncio.run(main()) 71 | --------------------------------------------------------------------------------