├── .devcontainer ├── Dockerfile ├── devcontainer.json └── requirements.txt ├── .gitattributes ├── .github └── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── .gitignore ├── BaseLive.py ├── BiliLive.py ├── BiliLiveRecorder.py ├── BiliVideoChecker.py ├── DanmuRecorder.py ├── Dockerfile ├── LICENSE ├── MainRunner.py ├── Processor.py ├── README.md ├── Uploader.py ├── config └── config.json ├── docker-compose.yml ├── icon.ico ├── main.py ├── requirements.txt └── utils.py /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.245.0/containers/python-3/.devcontainer/base.Dockerfile 2 | 3 | # [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster 4 | ARG VARIANT="3.10-bullseye" 5 | FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} 6 | 7 | # [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 8 | ARG NODE_VERSION="none" 9 | RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi 10 | 11 | # [Optional] If your pip requirements rarely change, uncomment this section to add them to the image. 12 | COPY requirements.txt /tmp/pip-tmp/ 13 | RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \ 14 | && rm -rf /tmp/pip-tmp 15 | 16 | # [Optional] Uncomment this section to install additional OS packages. 17 | # RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ 18 | # && apt-get -y install --no-install-recommends 19 | 20 | # [Optional] Uncomment this line to install global node packages. 21 | # RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.245.0/containers/python-3 3 | { 4 | "name": "Python 3", 5 | "build": { 6 | "dockerfile": "Dockerfile", 7 | "context": ".", 8 | "args": { 9 | // Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6 10 | // Append -bullseye or -buster to pin to an OS version. 11 | // Use -bullseye variants on local on arm64/Apple Silicon. 12 | "VARIANT": "3.10", 13 | // Options 14 | "NODE_VERSION": "none" 15 | } 16 | }, 17 | 18 | // Configure tool-specific properties. 19 | "customizations": { 20 | // Configure properties specific to VS Code. 21 | "vscode": { 22 | // Set *default* container specific settings.json values on container create. 23 | "settings": { 24 | "python.defaultInterpreterPath": "/usr/local/bin/python", 25 | "python.linting.enabled": true, 26 | "python.linting.pylintEnabled": true, 27 | "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", 28 | "python.formatting.blackPath": "/usr/local/py-utils/bin/black", 29 | "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", 30 | "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", 31 | "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", 32 | "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", 33 | "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", 34 | "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", 35 | "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" 36 | }, 37 | 38 | // Add the IDs of extensions you want installed when the container is created. 39 | "extensions": [ 40 | "ms-python.python", 41 | "ms-python.vscode-pylance" 42 | ] 43 | } 44 | }, 45 | 46 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 47 | // "forwardPorts": [], 48 | 49 | // Use 'postCreateCommand' to run commands after the container is created. 50 | // "postCreateCommand": "pip3 install --user -r requirements.txt", 51 | 52 | // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. 53 | "remoteUser": "vscode", 54 | "features": { 55 | "github-cli": "latest" 56 | } 57 | } -------------------------------------------------------------------------------- /.devcontainer/requirements.txt: -------------------------------------------------------------------------------- 1 | biliup>=0.2.11 2 | brotli>=1.0.9 3 | brotlipy>=0.7.0 4 | bypy>=1.7.12 5 | fastHan>=1.7 6 | jsonlines>=3.0.0 7 | lastversion>=2.4.2 8 | prettytable>=3.2.0 9 | requests>=2.27.1 10 | urllib3>=1.26.9 11 | websockets>=10.2 12 | ffmpeg-python>=0.2.0 -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # celery beat schedule file 95 | celerybeat-schedule 96 | 97 | # SageMath parsed files 98 | *.sage.py 99 | 100 | # Environments 101 | .env 102 | .venv 103 | env/ 104 | venv/ 105 | ENV/ 106 | env.bak/ 107 | venv.bak/ 108 | 109 | # Spyder project settings 110 | .spyderproject 111 | .spyproject 112 | 113 | # Rope project settings 114 | .ropeproject 115 | 116 | # mkdocs documentation 117 | /site 118 | 119 | # mypy 120 | .mypy_cache/ 121 | .dmypy.json 122 | dmypy.json 123 | 124 | # Pyre type checker 125 | .pyre/ 126 | 127 | data/ 128 | log/ 129 | config/* 130 | !config/config.json 131 | 132 | # vscode 133 | .vscode/ -------------------------------------------------------------------------------- /BaseLive.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import datetime 3 | import logging 4 | import traceback 5 | import requests 6 | import urllib3 7 | from requests.adapters import HTTPAdapter 8 | 9 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 10 | 11 | 12 | class BaseLive(metaclass=abc.ABCMeta): 13 | 14 | def __init__(self, config: dict): 15 | 16 | default_headers = { 17 | 'Accept': 'application/json, text/javascript, */*; q=0.01', 18 | 'Accept-Encoding': 'gzip, deflate', 19 | 'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.6,en;q=0.4,zh-TW;q=0.2', 20 | 'Connection': 'keep-alive', 21 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36' 22 | } 23 | self.headers = {**default_headers, ** 24 | config.get('root', {}).get('request_header', {})} 25 | self.session = requests.session() 26 | self.session.mount('https://', HTTPAdapter(max_retries=3)) 27 | self.room_id = '' 28 | self.site_name = '' 29 | self.site_domain = '' 30 | self.config = config 31 | self.__last_check_time = datetime.datetime.now( 32 | )+datetime.timedelta(seconds=-config.get('root', {}).get('check_interval', 60)) 33 | self.__live_status = False 34 | self.__allowed_check_interval = datetime.timedelta( 35 | seconds=config.get('root', {}).get('check_interval', 60)) 36 | 37 | def common_request(self, method: str, url: str, params: dict = None, data: dict = None) -> requests.Response: 38 | try: 39 | connection = None 40 | if method == 'GET': 41 | connection = self.session.get( 42 | url, headers=self.headers, params=params, verify=False, timeout=5) 43 | if method == 'POST': 44 | connection = self.session.post( 45 | url, headers=self.headers, params=params, data=data, verify=False, timeout=5) 46 | return connection 47 | except requests.exceptions.RequestException as e: 48 | logging.error(self.generate_log( 49 | "Request Error"+str(e)+traceback.format_exc())) 50 | 51 | @abc.abstractmethod 52 | def get_room_info(self): 53 | pass 54 | 55 | @abc.abstractmethod 56 | def get_live_urls(self): 57 | pass 58 | 59 | def __check_live_status(self) -> bool: 60 | self.room_info = self.get_room_info() 61 | if self.room_info['status']: 62 | logging.info(self.generate_log( 63 | "直播间标题:"+self.room_info['room_name'])) 64 | return True 65 | else: 66 | logging.info(self.generate_log("等待开播")) 67 | return False 68 | 69 | def check_live_status(self) -> bool: 70 | try: 71 | self.__live_status = self.__check_live_status() 72 | self.__last_check_time = datetime.datetime.now() 73 | except Exception as e: 74 | logging.error(self.generate_log( 75 | "Status Error" + str(e) + traceback.format_exc())) 76 | return self.__live_status 77 | 78 | @property 79 | def live_status(self) -> bool: 80 | if datetime.datetime.now()-self.__last_check_time >= self.__allowed_check_interval: 81 | logging.debug(self.generate_log("允许检查")) 82 | self.check_live_status() 83 | else: 84 | logging.debug(self.generate_log("间隔不足,使用过去状态")) 85 | return self.__live_status 86 | 87 | def generate_log(self, content: str = '') -> str: 88 | return f"[Site:{self.site_name} Room:{self.room_id}] {content}" 89 | -------------------------------------------------------------------------------- /BiliLive.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import urllib3 4 | 5 | from BaseLive import BaseLive 6 | 7 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 8 | 9 | 10 | class BiliLive(BaseLive): 11 | def __init__(self, config: dict): 12 | super().__init__(config) 13 | self.room_id = config['spec']['room_id'] 14 | self.site_name = 'BiliBili' 15 | self.site_domain = 'live.bilibili.com' 16 | self.get_room_info() 17 | self.check_live_status() 18 | 19 | def get_room_info(self) -> dict: 20 | data = {} 21 | room_info_url = 'https://api.live.bilibili.com/room/v1/Room/get_info' 22 | user_info_url = 'https://api.live.bilibili.com/live_user/v1/UserInfo/get_anchor_in_room' 23 | response = self.common_request('GET', room_info_url, { 24 | 'room_id': self.room_id 25 | }).json() 26 | logging.debug(self.generate_log("房间API消息:"+response['msg'])) 27 | if response['msg'] == 'ok': 28 | data['room_name'] = response['data']['title'] 29 | data['site_name'] = self.site_name 30 | data['site_domain'] = self.site_domain 31 | data['status'] = response['data']['live_status'] == 1 32 | self.room_id = str(response['data']['room_id']) # 解析完整 room_id 33 | response = self.common_request('GET', user_info_url, { 34 | 'roomid': self.room_id 35 | }).json() 36 | data['hostname'] = response['data']['info']['uname'] 37 | return data 38 | 39 | def get_live_urls(self) -> list: 40 | live_urls = [] 41 | url = 'https://api.live.bilibili.com/room/v1/Room/playUrl' 42 | stream_info = self.common_request('GET', url, { 43 | 'cid': self.room_id, 44 | 'otype': 'json', 45 | 'quality': 0, 46 | 'platform': 'web' 47 | }).json() 48 | best_quality = stream_info['data']['accept_quality'][0][0] 49 | stream_info = self.common_request( 50 | 'GET', url, { 51 | 'cid': self.room_id, 52 | 'otype': 'json', 53 | 'quality': best_quality, 54 | 'platform': 'web' 55 | }).json() 56 | for durl in stream_info['data']['durl']: 57 | logging.debug(self.generate_log("获取到以下地址:"+durl['url'])) 58 | live_urls.append(durl['url']) 59 | return live_urls 60 | 61 | def get_room_conf(self): 62 | data = {} 63 | url = 'https://api.live.bilibili.com/room/v1/Danmu/getConf' 64 | response = self.common_request('GET', url, { 65 | 'room_id': self.room_id 66 | }).json() 67 | logging.debug(self.generate_log("房间配置消息:"+response['msg'])) 68 | if response['msg'] == 'ok': 69 | data['available_hosts'] = response['data']['host_server_list'] 70 | data['token'] = response['data']['token'] 71 | return data -------------------------------------------------------------------------------- /BiliLiveRecorder.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import os 4 | import re 5 | import traceback 6 | 7 | import requests 8 | import urllib3 9 | 10 | import utils 11 | from BiliLive import BiliLive 12 | 13 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 14 | 15 | 16 | class BiliLiveRecorder(BiliLive): 17 | def __init__(self, config: dict, global_start: datetime.datetime): 18 | BiliLive.__init__(self, config) 19 | self.record_dir = utils.init_record_dir( 20 | self.room_id, global_start, config.get('root', {}).get('data_path', "./")) 21 | 22 | def record(self, record_url: str, output_filename: str) -> None: 23 | try: 24 | logging.info(self.generate_log('√ 正在录制...' + self.room_id)) 25 | default_headers = { 26 | 'Accept-Encoding': 'identity', 27 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36', 28 | 'Referer': 'https://live.bilibili.com/' 29 | } 30 | headers = {**default_headers, ** 31 | self.config.get('root', {}).get('request_header', {})} 32 | resp = requests.get(record_url, stream=True, 33 | headers=headers, 34 | timeout=self.config.get( 35 | 'root', {}).get('check_interval', 60)) 36 | with open(output_filename, "wb") as f: 37 | for chunk in resp.iter_content(chunk_size=1024): 38 | if chunk: 39 | f.write(chunk) 40 | except Exception as e: 41 | logging.error(self.generate_log( 42 | 'Error while recording:' + str(e))) 43 | 44 | def run(self) -> None: 45 | logging.basicConfig(level=utils.get_log_level(self.config), 46 | format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', 47 | datefmt='%a, %d %b %Y %H:%M:%S', 48 | handlers=[logging.FileHandler(os.path.join(self.config.get('root', {}).get('logger', {}).get('log_path', "./log"), "LiveRecoder_"+datetime.datetime.now( 49 | ).strftime('%Y-%m-%d_%H-%M-%S')+'.log'), "a", encoding="utf-8")]) 50 | while True: 51 | try: 52 | if self.live_status: 53 | urls = self.get_live_urls() 54 | filename = utils.generate_filename(self.room_id) 55 | c_filename = os.path.join(self.record_dir, filename) 56 | self.record(urls[0], c_filename) 57 | logging.info(self.generate_log('录制完成' + c_filename)) 58 | else: 59 | logging.info(self.generate_log('下播了')) 60 | break 61 | except Exception as e: 62 | logging.error(self.generate_log( 63 | 'Error while checking or recording:' + str(e)+traceback.format_exc())) 64 | 65 | if __name__ == "__main__": 66 | config = { 67 | "root": {}, 68 | "spec": { 69 | "room_id": "22603245" 70 | } 71 | } 72 | global_start = datetime.datetime.now() 73 | BiliLiveRecorder(config, global_start).run() -------------------------------------------------------------------------------- /BiliVideoChecker.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import os 4 | import time 5 | import threading 6 | import requests 7 | import urllib3 8 | import utils 9 | 10 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 11 | 12 | 13 | class BiliVideoChecker(threading.Thread): 14 | def __init__(self, bvid: str, path: str, config: dict): 15 | threading.Thread.__init__(self) 16 | default_headers = { 17 | 'Accept': 'application/json, text/javascript, */*; q=0.01', 18 | 'Accept-Encoding': 'gzip, deflate', 19 | 'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.6,en;q=0.4,zh-TW;q=0.2', 20 | 'Connection': 'keep-alive', 21 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36' 22 | } 23 | self.headers = {**default_headers, ** 24 | config.get('root', {}).get('request_header', {})} 25 | self.session = requests.session() 26 | self.bvid = bvid 27 | self.path = path 28 | self.config = config 29 | self.check_url = "https://api.bilibili.com/x/web-interface/view" 30 | self.check_interval = config['root']['check_interval'] 31 | 32 | def common_request(self, method: str, url: str, params: dict = None, data: dict = None) -> requests.Response: 33 | connection = None 34 | if method == 'GET': 35 | connection = self.session.get( 36 | url, headers=self.headers, params=params, verify=False) 37 | if method == 'POST': 38 | connection = self.session.post( 39 | url, headers=self.headers, params=params, data=data, verify=False) 40 | return connection 41 | 42 | def run(self) -> None: 43 | logging.basicConfig(level=utils.get_log_level(self.config), 44 | format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', 45 | datefmt='%a, %d %b %Y %H:%M:%S', 46 | handlers=[logging.FileHandler(os.path.join(self.config.get('root', {}).get('logger', {}).get('log_path', "./log"), "VideoChecker_"+datetime.datetime.now( 47 | ).strftime('%Y-%m-%d_%H-%M-%S')+'.log'), "a", encoding="utf-8")]) 48 | while True: 49 | video_info = self.common_request("GET", self.check_url, { 50 | 'bvid': self.bvid 51 | }).json() 52 | try: 53 | if video_info['code'] == 0 and video_info['data']['state'] == 0: 54 | logging.info("稿件%s 已开放浏览,准备删除 %s", self.bvid, self.path) 55 | utils.del_files_and_dir(self.path) 56 | return 57 | else: 58 | logging.info("稿件%s 未开放浏览", self.bvid) 59 | time.sleep(self.check_interval) 60 | except KeyError: 61 | pass 62 | -------------------------------------------------------------------------------- /DanmuRecorder.py: -------------------------------------------------------------------------------- 1 | import os 2 | import asyncio 3 | import datetime 4 | import time 5 | import json 6 | import logging 7 | import jsonlines 8 | 9 | import websockets 10 | import traceback 11 | import utils 12 | from BiliLive import BiliLive 13 | import brotli 14 | import struct 15 | 16 | 17 | class BiliDanmuRecorder(BiliLive): 18 | def __init__(self, config: dict, global_start: datetime.datetime): 19 | BiliLive.__init__(self, config) 20 | self.conf = self.get_room_conf() 21 | self.host_idx = 0 22 | self.room_server_api = f"wss://{self.conf['available_hosts'][self.host_idx]['host']}:{self.conf['available_hosts'][self.host_idx]['wss_port']}/sub" 23 | self.danmu_dir = utils.init_danmu_log_dir( 24 | self.room_id, global_start, config['root']['data_path']) 25 | 26 | def __pack(self, data: bytes, protocol_version: int, datapack_type: int): 27 | sendData = bytearray() 28 | sendData += struct.pack(">H", 16) 29 | sendData += struct.pack(">H", protocol_version) 30 | sendData += struct.pack(">I", datapack_type) 31 | sendData += struct.pack(">I", 1) 32 | sendData += data 33 | sendData = struct.pack(">I", len(sendData) + 4) + sendData 34 | return bytes(sendData) 35 | 36 | async def __send(self, data: bytes, protocol_version: int, datapack_type: int, websocket): 37 | data = self.__pack(data, protocol_version, datapack_type) 38 | logging.debug(self.generate_log(f'发送原始数据:{data}')) 39 | await websocket.send(data) 40 | 41 | async def __send_heart_beat(self, websocket): 42 | hb = self.__pack(b'[object Object]', 1, 2) 43 | while self.live_status: 44 | logging.debug(self.generate_log(f"弹幕接收器已发送心跳包,心跳包数据{hb}")) 45 | await websocket.send(hb) 46 | await asyncio.sleep(30) 47 | 48 | async def __receDM(self, websocket): 49 | while self.live_status: 50 | recv_text = await websocket.recv() 51 | if recv_text: 52 | self.__printDM(recv_text) 53 | 54 | async def __startup(self): 55 | verify_data = {"uid": 0, "roomid": int(self.room_id), 56 | "protover": 3, "platform": "web", "type": 2, "key": self.conf['token']} 57 | data = json.dumps(verify_data).encode() 58 | 59 | while self.live_status: 60 | logging.debug(self.generate_log(f"wss URL:{self.room_server_api}")) 61 | try: 62 | async with websockets.connect(self.room_server_api, origin="https://live.bilibili.com",extra_headers=self.headers) as aws: 63 | logging.info(self.generate_log("发送验证消息包")) 64 | await self.__send(data, 1, 7, aws) 65 | tasks = [asyncio.create_task(self.__receDM(aws)), asyncio.create_task(self.__send_heart_beat(aws))] 66 | await asyncio.wait(tasks) 67 | except KeyboardInterrupt: 68 | raise KeyboardInterrupt 69 | except Exception: 70 | pass 71 | if self.host_idx < len(self.conf['available_hosts']) - 1: 72 | self.host_idx += 1 73 | self.room_server_api = f"wss://{self.conf['available_hosts'][self.host_idx]['host']}:{self.conf['available_hosts'][self.host_idx]['wss_port']}/sub" 74 | 75 | 76 | def run(self): 77 | logging.basicConfig(level=utils.get_log_level(self.config), 78 | format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', 79 | datefmt='%a, %d %b %Y %H:%M:%S', 80 | handlers=[logging.FileHandler(os.path.join(self.config['root']['logger']['log_path'], "DanmuRecoder_"+datetime.datetime.now( 81 | ).strftime('%Y-%m-%d_%H-%M-%S')+'.log'), "a", encoding="utf-8")]) 82 | try: 83 | asyncio.run(self.__startup()) 84 | except KeyboardInterrupt: 85 | logging.info(self.generate_log("键盘指令退出")) 86 | 87 | def __printDM(self, data): 88 | # 获取数据包的长度,版本和操作类型 89 | header = struct.unpack(">IHHII", data[:16]) 90 | packetLen = header[0] 91 | ver = header[2] 92 | op = header[3] 93 | if ver == 3: 94 | data = brotli.decompress(data[16:]) 95 | self.__printDM(data) 96 | return 97 | 98 | if ver == 1: 99 | if op == 3: 100 | logging.debug(self.generate_log( 101 | '[RENQI] {}\n'.format(struct.unpack(">I", data[16:20])[0]))) 102 | return 103 | 104 | # 有的时候可能会两个数据包连在一起发过来,所以利用前面的数据包长度判断, 105 | if len(data) > packetLen: 106 | self.__printDM(data[packetLen:]) 107 | data = data[:packetLen] 108 | 109 | # 有时会发送过来 zlib 压缩的数据包,这个时候要去解压。 110 | # if ver == 2: 111 | # data = zlib.decompress(data[16:]) 112 | # self.__printDM(data) 113 | # return 114 | 115 | # ver 为 1 的时候为进入房间后或心跳包服务器的回应。op 为 3 的时候为房间的人气值。 116 | if ver == 1: 117 | if op == 8: 118 | logging.debug(self.generate_log( 119 | '[VERIFY] {}\n'.format(json.loads(data[16:].decode('utf-8', errors='ignore'))))) 120 | 121 | # ver 不为 2 也不为 1 目前就只能是 0 了,也就是普通的 json 数据。 122 | # op 为 5 意味着这是通知消息,cmd 基本就那几个了。 123 | if (ver == 0 or ver == 2) and op == 5: 124 | try: 125 | jd = json.loads(data[16:].decode('utf-8', errors='ignore')) 126 | logging.debug(self.generate_log(jd['cmd']+'\t'+str(jd)+'\n')) 127 | if jd['cmd'] == 'DANMU_MSG': 128 | info = dict(enumerate(jd.get("info", []))) 129 | prop = dict(enumerate(info.get(0, []))) 130 | user_info = dict(enumerate(info.get(2, []))) 131 | medal_info = dict(enumerate(info.get(3, []))) 132 | ul_info = dict(enumerate(info.get(4, []))) 133 | danmu_writer = jsonlines.open(os.path.join( 134 | self.danmu_dir, "danmu.jsonl"), mode="a") 135 | danmu_writer.write({ 136 | "raw": info, 137 | "properties": { 138 | "type": prop.get(1, 1), 139 | "size": prop.get(2, 25), 140 | "color": prop.get(3, 0xFFFFFF), 141 | "time": prop.get(4, int(round(time.time()*1000))) 142 | }, 143 | "text": info.get(1, ""), 144 | "user_info": { 145 | "user_id": user_info.get(0, 0), 146 | "user_name": user_info.get(1, ""), 147 | "user_isAdmin": user_info.get(2, 0) == 1, 148 | "user_isVip": user_info.get(3, 0) == 1, 149 | }, 150 | "medal_info": { 151 | "medal_level": medal_info.get(0, 0), 152 | "medal_name": medal_info.get(1, ""), 153 | "medal_liver_name": medal_info.get(2, ""), 154 | "medal_liver_roomid": medal_info.get(3, 0), 155 | "medal_liver_uid": medal_info.get(12, 0), 156 | "medal_is_lighted": medal_info.get(11, 0) == 1, 157 | "medal_guard_level": medal_info.get(10, 0) 158 | }, 159 | "ul_info": { 160 | "ul_level": ul_info.get(0, 0), 161 | }, 162 | "title_info": info.get(5, []), 163 | "guard_level": info.get(7, 0) 164 | }) 165 | elif jd['cmd'] == 'SEND_GIFT': 166 | data = jd.get("data", {}) 167 | medal_info = data.get("medal_info", {}) 168 | gift_writer = jsonlines.open(os.path.join( 169 | self.danmu_dir, "gift.jsonl"), mode="a") 170 | gift_writer.write({ 171 | "raw": data, 172 | "user_id": data.get("uid", 0), 173 | "user_name": data.get("uname", ""), 174 | "time": data.get("timestamp", int(round(time.time()))), 175 | "gift_name": data.get("giftName", ""), 176 | "gift_id": data.get("giftId", 0), 177 | "gift_type": data.get("giftType", 0), 178 | "price": data.get("price", 0), 179 | "num": data.get("num", 0), 180 | "total_coin": data.get("total_coin", 0), 181 | "coin_type": data.get("coin_type", "silver"), 182 | "medal_info": { 183 | "medal_level": medal_info.get("medal_level", 0), 184 | "medal_name": medal_info.get("medal_name", ""), 185 | "medal_liver_uid": medal_info.get("target_id", 0), 186 | "medal_is_lighted": medal_info.get("is_lighted", 0) == 1, 187 | "medal_guard_level": medal_info.get("guard_level", 0) 188 | }, 189 | }) 190 | # elif jd['cmd'] == 'GUARD_BUY': 191 | # data = jd.get("data",{}) 192 | # guard_writer = jsonlines.open(os.path.join(self.danmu_dir,"guard.jsonl"),mode="a") 193 | # guard_writer.write({ 194 | # "raw":data, 195 | # "user_id":data.get("uid",0), 196 | # "user_name":data.get("username",""), 197 | # "time":data.get("start_time",int(round(time.time()))), 198 | # "guard_level":data.get("guard_level",0), 199 | # "gift_id":data.get("gift_id",0), 200 | # "gift_name":data.get("gift_name",0), 201 | # "price":data.get("price",0), 202 | # "num":data.get("num",0) 203 | # }) 204 | elif jd['cmd'] == 'USER_TOAST_MSG': 205 | data = jd.get("data", {}) 206 | guard_writer = jsonlines.open(os.path.join( 207 | self.danmu_dir, "guard.jsonl"), mode="a") 208 | guard_writer.write({ 209 | "raw": data, 210 | "user_id": data.get("uid", 0), 211 | "user_name": data.get("username", ""), 212 | "time": data.get("start_time", int(round(time.time()))), 213 | "guard_level": data.get("guard_level", 0), 214 | "role_name": data.get("role_name", 0), 215 | "price": data.get("price", 0), 216 | "num": data.get("num", 0) 217 | }) 218 | elif jd['cmd'] == 'LIVE': 219 | logging.info(self.generate_log( 220 | '[Notice] LIVE Start!\n')) 221 | elif jd['cmd'] == 'PREPARING': 222 | logging.info(self.generate_log( 223 | '[Notice] LIVE Ended!\n')) 224 | with open(os.path.join(self.danmu_dir, "live_end_time"), "w", encoding="utf-8") as f: 225 | f.write(str(int(round(time.time())))) 226 | elif jd['cmd'] == 'INTERACT_WORD': 227 | data = jd.get("data", {}) 228 | medal_info = data.get("fans_medal", {}) 229 | interact_writer = jsonlines.open(os.path.join( 230 | self.danmu_dir, "interaction.jsonl"), mode="a") 231 | interact_writer.write({ 232 | "raw": data, 233 | "user_id": data.get("uid", 0), 234 | "user_name": data.get("uname", ""), 235 | "msg_type": data.get("msg_type", 1), 236 | "room_id": data.get("room_id", 0), 237 | "time": data.get("timestamp", int(round(time.time()))), 238 | "medal_info": { 239 | "medal_level": medal_info.get("medal_level", 0), 240 | "medal_name": medal_info.get("medal_name", ""), 241 | "medal_liver_uid": medal_info.get("target_id", 0), 242 | "medal_is_lighted": medal_info.get("is_lighted", 0) == 1, 243 | "medal_guard_level": medal_info.get("guard_level", 0) 244 | }, 245 | }) 246 | elif jd['cmd'] == 'SUPER_CHAT_MESSAGE': 247 | data = jd.get("data", {}) 248 | medal_info = data.get("medal_info", {}) 249 | superchat_writer = jsonlines.open(os.path.join( 250 | self.danmu_dir, "superchat.jsonl"), mode="a") 251 | superchat_writer.write({ 252 | "raw": data, 253 | "text": data.get("message", ""), 254 | "user_id": data.get("uid", 0), 255 | "user_name": data.get("user_info", {}).get("uname", ""), 256 | "time": data.get("timestamp", int(round(time.time()))), 257 | "price": data.get("price", 0), 258 | "SCkeep_time": data.get("time", 0), 259 | "medal_info": { 260 | "medal_level": medal_info.get("medal_level", 0), 261 | "medal_name": medal_info.get("medal_name", ""), 262 | "medal_liver_name": medal_info.get("anchor_uname", ""), 263 | "medal_liver_uid": medal_info.get("target_id", 0), 264 | "medal_is_lighted": medal_info.get("is_lighted", 0) == 1, 265 | "medal_guard_level": medal_info.get("guard_level", 0) 266 | }, 267 | }) 268 | except Exception as e: 269 | logging.error(self.generate_log( 270 | 'Error while parsing danmu data:'+str(e)+traceback.format_exc())) 271 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | RUN apt-get update && apt-get install -y ffmpeg 3 | WORKDIR /usr/src/app 4 | COPY requirements.txt ./ 5 | RUN pip install --no-cache-dir -r requirements.txt 6 | COPY . . 7 | CMD ["python", "-u", "main.py", "config/config.json"] 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2020-2023 AsaChiri 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MainRunner.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import threading 4 | import time 5 | import traceback 6 | from multiprocessing import Process, Value 7 | 8 | import utils 9 | from BiliLive import BiliLive 10 | from BiliLiveRecorder import BiliLiveRecorder 11 | from BiliVideoChecker import BiliVideoChecker 12 | from DanmuRecorder import BiliDanmuRecorder 13 | from Processor import Processor 14 | from Uploader import Uploader 15 | 16 | 17 | class MainRunner(): 18 | def __init__(self, config): 19 | self.config = config 20 | self.prev_live_status = False 21 | self.current_state = Value( 22 | 'i', int(utils.state.WAITING_FOR_LIVE_START)) 23 | self.state_change_time = Value('f', time.time()) 24 | if self.config.get('root', {}).get('enable_baiduyun', False): 25 | from bypy import ByPy 26 | _ = ByPy() 27 | self.bl = BiliLive(self.config) 28 | self.blr = None 29 | self.bdr = None 30 | 31 | def proc(self, config: dict, record_dir: str, danmu_path: str, current_state, state_change_time) -> None: 32 | p = Processor(config, record_dir, danmu_path) 33 | p.run() 34 | 35 | if config.get('spec', {}).get('uploader', {}).get('record', {}).get('upload_record', False) or config.get('spec', {}).get('uploader', {}).get('clips', {}).get('upload_clips', False): 36 | current_state.value = int(utils.state.UPLOADING_TO_BILIBILI) 37 | state_change_time.value = time.time() 38 | try: 39 | u = Uploader(p.outputs_dir, p.splits_dir, config) 40 | d = u.upload(p.global_start) 41 | except Exception as e: 42 | current_state.value = int(utils.state.ERROR) 43 | state_change_time.value = time.time() 44 | 45 | if d is None: 46 | current_state.value = int(utils.state.ERROR) 47 | state_change_time.value = time.time() 48 | else: 49 | if not config.get('spec', {}).get('uploader', {}).get('record', {}).get('keep_record_after_upload', True) and d.get("record", None) is not None and not config.get('root', {}).get('uploader', {}).get('upload_by_edit', False): 50 | rc = BiliVideoChecker(d['record']['bvid'], 51 | p.splits_dir, config) 52 | rc.start() 53 | if not config.get('spec', {}).get('uploader', {}).get('clips', {}).get('keep_clips_after_upload', True) and d.get("clips", None) is not None and not config.get('root', {}).get('uploader', {}).get('upload_by_edit', False): 54 | cc = BiliVideoChecker(d['clips']['bvid'], 55 | p.outputs_dir, config) 56 | cc.start() 57 | 58 | if config.get('root', {}).get('enable_baiduyun', False) and config.get('spec', {}).get('backup', False): 59 | current_state.value = int(utils.state.UPLOADING_TO_BAIDUYUN) 60 | state_change_time.value = time.time() 61 | try: 62 | from bypy import ByPy 63 | bp = ByPy() 64 | bp.upload(p.merged_file_path) 65 | except Exception as e: 66 | logging.error('Error when uploading to Baiduyun:' + 67 | str(e)+traceback.format_exc()) 68 | current_state.value = int(utils.state.ERROR) 69 | state_change_time.value = time.time() 70 | return 71 | 72 | if current_state.value != int(utils.state.LIVE_STARTED): 73 | current_state.value = int(utils.state.WAITING_FOR_LIVE_START) 74 | state_change_time.value = time.time() 75 | 76 | def run(self): 77 | try: 78 | while True: 79 | if not self.prev_live_status and self.bl.live_status: 80 | start = datetime.datetime.now() 81 | self.blr = BiliLiveRecorder(self.config, start) 82 | self.bdr = BiliDanmuRecorder(self.config, start) 83 | record_process = Process( 84 | target=self.blr.run) 85 | danmu_process = Process( 86 | target=self.bdr.run) 87 | danmu_process.start() 88 | record_process.start() 89 | 90 | self.current_state.value = int(utils.state.LIVE_STARTED) 91 | self.state_change_time.value = time.time() 92 | self.prev_live_status = True 93 | 94 | record_process.join() 95 | danmu_process.join() 96 | 97 | self.current_state.value = int( 98 | utils.state.PROCESSING_RECORDS) 99 | self.state_change_time.value = time.time() 100 | 101 | self.prev_live_status = False 102 | proc_process = Process(target=self.proc, args=( 103 | self.config, self.blr.record_dir, self.bdr.danmu_dir, self.current_state, self.state_change_time)) 104 | proc_process.start() 105 | try: 106 | self.bl.check_live_status() 107 | except Exception as e: 108 | logging.error( 109 | "Status Error"+str(e)+traceback.format_exc()) 110 | else: 111 | time.sleep(self.config.get( 112 | 'root', {}).get('check_interval', 60)) 113 | except KeyboardInterrupt: 114 | return 115 | except Exception as e: 116 | logging.error('Error in Mainrunner:' + 117 | str(e)+traceback.format_exc()) 118 | self.current_state.value = int(utils.state.ERROR) 119 | self.state_change_time.value = time.time() 120 | return 121 | 122 | 123 | class MainThreadRunner(threading.Thread): 124 | def __init__(self, config): 125 | threading.Thread.__init__(self) 126 | self.mr = MainRunner(config) 127 | 128 | def run(self): 129 | self.mr.run() 130 | -------------------------------------------------------------------------------- /Processor.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import datetime 3 | import shutil 4 | import os 5 | import subprocess 6 | import traceback 7 | from typing import Dict, List, Tuple, Union 8 | import logging 9 | import ffmpeg 10 | import utils 11 | from BiliLive import BiliLive 12 | from itertools import groupby 13 | import jsonlines 14 | 15 | 16 | def parse_danmu(dir_name): 17 | danmu_list = [] 18 | if os.path.exists(os.path.join(dir_name, 'danmu.jsonl')): 19 | with jsonlines.open(os.path.join(dir_name, 'danmu.jsonl')) as reader: 20 | for obj in reader: 21 | danmu_list.append({ 22 | "text": obj['text'], 23 | "time": obj['properties']['time']//1000, 24 | "uid": str(obj['user_info']['user_id']) 25 | }) 26 | if os.path.exists(os.path.join(dir_name, 'superchat.jsonl')): 27 | with jsonlines.open(os.path.join(dir_name, 'superchat.jsonl')) as reader: 28 | for obj in reader: 29 | danmu_list.append({ 30 | "text": obj['text'], 31 | "time": obj['time'], 32 | "uid": str(obj['user_id']) 33 | }) 34 | danmu_list = sorted(danmu_list, key=lambda x: x['time']) 35 | return danmu_list 36 | 37 | 38 | def get_cut_points(time_dict: Dict[datetime.datetime, List[str]], up_ratio: float = 2, down_ratio: float = 0.75, topK: int = 5) -> List[Tuple[datetime.datetime, datetime.datetime, List[str]]]: 39 | status = 0 40 | cut_points = [] 41 | prev_num = None 42 | start_time = None 43 | temp_texts = [] 44 | for time, texts in time_dict.items(): 45 | if prev_num is None: 46 | start_time = time 47 | temp_texts = copy.copy(texts) 48 | elif status == 0 and len(texts) >= prev_num*up_ratio: 49 | status = 1 50 | temp_texts.extend(texts) 51 | elif status == 1 and len(texts) < prev_num*down_ratio: 52 | tags = utils.get_words(texts, topK=topK) 53 | cut_points.append((start_time, time, tags)) 54 | status = 0 55 | start_time = time 56 | temp_texts = copy.copy(texts) 57 | elif status == 0: 58 | start_time = time 59 | temp_texts = copy.copy(texts) 60 | prev_num = len(texts) 61 | return cut_points 62 | 63 | 64 | def get_manual_cut_points(danmu_list: List[Dict], uid: str, command: str) -> List[Tuple[datetime.datetime, datetime.datetime, List[str]]]: 65 | cut_points = [] 66 | count = 0 67 | for danmu_obj in danmu_list: 68 | # /COMMAND DURATION [HINT] 69 | if danmu_obj['uid'] == uid and danmu_obj['text'].startswith(command): 70 | count += 1 71 | args = danmu_obj['text'].removeprefix(command).strip().split() 72 | duration = int(args[0]) 73 | end_time = datetime.datetime.fromtimestamp(danmu_obj['time']) 74 | start_time = end_time - datetime.timedelta(seconds=duration) 75 | hint_text = f"手动切片_{count}" 76 | if len(args) >= 2: 77 | hint_text = " ".join(args[1:]) 78 | cut_points.append((start_time, end_time, [hint_text])) 79 | return cut_points 80 | 81 | 82 | def get_true_timestamp(video_times: List[Tuple[datetime.datetime, float]], point: datetime.datetime) -> float: 83 | time_passed = 0 84 | for t, d in video_times: 85 | if point < t: 86 | return time_passed 87 | elif point - t <= datetime.timedelta(seconds=d): 88 | return time_passed + (point - t).total_seconds() 89 | else: 90 | time_passed += d 91 | return time_passed 92 | 93 | 94 | def count(danmu_list: List, live_start: datetime.datetime, live_duration: float, interval: int = 60) -> Dict[datetime.datetime, List[str]]: 95 | start_timestamp = int(live_start.timestamp()) 96 | return_dict = {} 97 | for k, g in groupby(danmu_list, key=lambda x: (x['time']-start_timestamp)//interval): 98 | return_dict[datetime.datetime.fromtimestamp( 99 | k*interval+start_timestamp)] = [] 100 | for o in list(g): 101 | return_dict[datetime.datetime.fromtimestamp( 102 | k*interval+start_timestamp)].append(o['text']) 103 | return return_dict 104 | 105 | 106 | def flv2ts(input_file: str, output_file: str, ffmpeg_logfile_hander) -> Union[subprocess.CompletedProcess, subprocess.CalledProcessError]: 107 | try: 108 | ret = subprocess.run( 109 | f"ffmpeg -y -fflags +discardcorrupt -i {input_file} -c copy -bsf:v h264_mp4toannexb -acodec aac -f mpegts {output_file}", shell=True, check=True, stdout=ffmpeg_logfile_hander, stderr=ffmpeg_logfile_hander) 110 | return ret 111 | except subprocess.CalledProcessError as err: 112 | traceback.print_exc() 113 | return err 114 | 115 | 116 | def concat(merge_conf_path: str, merged_file_path: str, ffmpeg_logfile_hander) -> Union[subprocess.CompletedProcess, subprocess.CalledProcessError]: 117 | try: 118 | ret = subprocess.run( 119 | f"ffmpeg -y -f concat -safe 0 -i {merge_conf_path} -c copy -fflags +igndts -avoid_negative_ts make_zero {merged_file_path}", shell=True, check=True, stdout=ffmpeg_logfile_hander, stderr=ffmpeg_logfile_hander) 120 | return ret 121 | except subprocess.CalledProcessError as err: 122 | traceback.print_exc() 123 | return err 124 | 125 | 126 | def get_start_time(filename: str) -> datetime.datetime: 127 | base = os.path.splitext(filename)[0] 128 | return datetime.datetime.strptime( 129 | " ".join(base.split("_")[1:3]), '%Y-%m-%d %H-%M-%S') 130 | 131 | 132 | class Processor(BiliLive): 133 | def __init__(self, config: Dict, record_dir: str, danmu_path: str): 134 | super().__init__(config) 135 | self.record_dir = record_dir 136 | self.danmu_path = danmu_path 137 | self.global_start = utils.get_global_start_from_records( 138 | self.record_dir) 139 | self.merge_conf_path = utils.get_merge_conf_path( 140 | self.room_id, self.global_start, config.get('root', {}).get('data_path', "./")) 141 | self.merged_file_path = utils.get_merged_filename( 142 | self.room_id, self.global_start, config.get('root', {}).get('data_path', "./")) 143 | self.outputs_dir = utils.init_outputs_dir( 144 | self.room_id, self.global_start, config.get('root', {}).get('data_path', "./")) 145 | self.splits_dir = utils.init_splits_dir( 146 | self.room_id, self.global_start, self.config.get('root', {}).get('data_path', "./")) 147 | self.times = [] 148 | self.live_start = self.global_start 149 | self.live_duration = 0 150 | logging.basicConfig(level=utils.get_log_level(config), 151 | format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', 152 | datefmt='%a, %d %b %Y %H:%M:%S', 153 | handlers=[ 154 | logging.FileHandler( 155 | os.path.join( 156 | config.get('root', {}).get('logger', {}).get('log_path', "./log"), 157 | "Processor_"+datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'.log', 158 | ), 159 | mode='a', 160 | encoding='utf-8', 161 | ) 162 | ], 163 | force=True) 164 | self.ffmpeg_logfile_hander = open(os.path.join(config.get('root', {}).get('logger', {}).get('log_path', "./log"), "FFMpeg_"+datetime.datetime.now( 165 | ).strftime('%Y-%m-%d_%H-%M-%S')+'.log'), mode="a", encoding="utf-8") 166 | 167 | def pre_concat(self) -> Union[subprocess.CompletedProcess, subprocess.CalledProcessError]: 168 | filelist = sorted(os.listdir(self.record_dir),key=get_start_time) 169 | with open(self.merge_conf_path, "w", encoding="utf-8") as f: 170 | for filename in filelist: 171 | if os.path.splitext( 172 | os.path.join(self.record_dir, filename))[1] == ".flv" and os.path.getsize(os.path.join(self.record_dir, filename)) > 1024*1024: 173 | ts_path = os.path.splitext(os.path.join( 174 | self.record_dir, filename))[0]+".ts" 175 | ret = flv2ts(os.path.join( 176 | self.record_dir, filename), ts_path, self.ffmpeg_logfile_hander) 177 | if isinstance(ret, subprocess.CompletedProcess) and not self.config.get('spec', {}).get('recorder', {}).get('keep_raw_record', False): 178 | os.remove(os.path.join(self.record_dir, filename)) 179 | # ts_path = os.path.join(self.record_dir, filename) 180 | duration = float(ffmpeg.probe(ts_path)[ 181 | 'format']['duration']) 182 | start_time = get_start_time(filename) 183 | self.times.append((start_time, duration)) 184 | f.write( 185 | f"file '{os.path.abspath(ts_path)}'\n") 186 | if len(self.times) == 0: 187 | logging.error("No valid record found.") 188 | return None 189 | ret = concat(self.merge_conf_path, self.merged_file_path, 190 | self.ffmpeg_logfile_hander) 191 | self.times.sort(key=lambda x: x[0]) 192 | self.live_start = self.times[0][0] 193 | self.live_duration = ( 194 | self.times[-1][0]-self.times[0][0]).total_seconds()+self.times[-1][1] 195 | return ret 196 | 197 | def __cut_video(self, outhint: List[str], start_time: int, delta: int) -> Union[subprocess.CompletedProcess, subprocess.CalledProcessError]: 198 | hours, remainder = divmod(start_time, 3600) 199 | minutes, seconds = divmod(remainder, 60) 200 | output_file = os.path.join( 201 | self.outputs_dir, f"{self.room_id}_{self.global_start.strftime('%Y-%m-%d_%H-%M-%S')}_{hours:02}-{minutes:02}-{seconds:02}_{outhint}.mp4") 202 | cmd = f'ffmpeg -y -ss {start_time} -t {delta} -accurate_seek -i "{self.merged_file_path}" -c copy -avoid_negative_ts 1 "{output_file}"' 203 | try: 204 | ret = subprocess.run(cmd, shell=True, check=True, 205 | stdout=self.ffmpeg_logfile_hander, stderr=self.ffmpeg_logfile_hander) 206 | return ret 207 | except subprocess.CalledProcessError as err: 208 | traceback.print_exc() 209 | return err 210 | 211 | def cut(self, cut_points: List[Tuple[datetime.datetime, datetime.datetime, List[str]]], min_length: int = 60) -> bool: 212 | success = True 213 | duration = float(ffmpeg.probe(self.merged_file_path) 214 | ['format']['duration']) 215 | for cut_start, cut_end, tags in cut_points: 216 | start = get_true_timestamp(self.times, 217 | cut_start) + self.config.get('spec', {}).get('clipper', {}).get('start_offset', 0) 218 | end = min(get_true_timestamp(self.times, 219 | cut_end) + self.config.get('spec', {}).get('clipper', {}).get('end_offset', 0), duration) 220 | delta = end-start 221 | outhint = " ".join(tags) 222 | logging.debug("Cut point [%s, %s] hint=%s", start, end, outhint) 223 | if delta >= min_length: 224 | ret = self.__cut_video(outhint, max( 225 | 0, int(start)), int(delta)) 226 | success = success and isinstance( 227 | ret, subprocess.CompletedProcess) 228 | return success 229 | 230 | def split(self, split_interval: int = 3600) -> bool: 231 | success = True 232 | if split_interval <= 0: 233 | shutil.copy2(self.merged_file_path, os.path.join( 234 | self.splits_dir, f"{self.room_id}_{self.global_start.strftime('%Y-%m-%d_%H-%M-%S')}_0000.mp4")) 235 | return success 236 | 237 | duration = float(ffmpeg.probe(self.merged_file_path) 238 | ['format']['duration']) 239 | num_splits = int(duration) // split_interval + 1 240 | for i in range(num_splits): 241 | output_file = os.path.join( 242 | self.splits_dir, f"{self.room_id}_{self.global_start.strftime('%Y-%m-%d_%H-%M-%S')}_{i:04}.mp4") 243 | cmd = f'ffmpeg -y -ss {i*split_interval} -t {split_interval} -accurate_seek -i "{self.merged_file_path}" -c copy -avoid_negative_ts 1 "{output_file}"' 244 | try: 245 | _ = subprocess.run(cmd, shell=True, check=True, 246 | stdout=self.ffmpeg_logfile_hander, stderr=self.ffmpeg_logfile_hander) 247 | except subprocess.CalledProcessError: 248 | traceback.print_exc() 249 | success = False 250 | return success 251 | 252 | def run(self) -> bool: 253 | try: 254 | ret = self.pre_concat() 255 | success = isinstance(ret, subprocess.CompletedProcess) 256 | if success and not self.config.get('spec', {}).get('recorder', {}).get('keep_raw_record', False): 257 | if os.path.exists(self.merged_file_path): 258 | utils.del_files_and_dir(self.record_dir) 259 | # duration = float(ffmpeg.probe(self.merged_file_path)[ 260 | # 'format']['duration']) 261 | # start_time = get_start_time(self.merged_file_path) 262 | # self.times.append((start_time, duration)) 263 | # self.live_start = self.times[0][0] 264 | # self.live_duration = ( 265 | # self.times[-1][0]-self.times[0][0]).total_seconds()+self.times[-1][1] 266 | 267 | if not self.config.get('spec', {}).get('clipper', {}).get('enable_clipper', False) and not self.config.get('spec', {}).get('manual_clipper', {}).get('enabled', False): 268 | logging.info('Clipper and manual clipper are both disabled') 269 | os.rmdir(self.outputs_dir) 270 | 271 | if not self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('upload_record', False): 272 | logging.info('Record uploader disabled') 273 | os.rmdir(self.splits_dir) 274 | 275 | if self.config.get('spec', {}).get('clipper', {}).get('enable_clipper', False): 276 | logging.info('Processing clipper') 277 | danmu_list = parse_danmu(self.danmu_path) 278 | counted_danmu_dict = count( 279 | danmu_list, self.live_start, self.live_duration, self.config.get('spec', {}).get('parser', {}).get('interval', 60)) 280 | cut_points = get_cut_points(counted_danmu_dict, self.config.get('spec', {}).get('parser', {}).get('up_ratio', 2.5), 281 | self.config.get('spec', {}).get('parser', {}).get('down_ratio', 0.75), self.config.get('spec', {}).get('parser', {}).get('topK', 5)) 282 | ret = self.cut(cut_points, self.config.get('spec', {}).get( 283 | 'clipper', {}).get('min_length', 60)) 284 | success = success and ret 285 | if self.config.get('spec', {}).get('manual_clipper', {}).get('enabled', False): 286 | logging.info('Processing manual clipper') 287 | danmu_list = parse_danmu(self.danmu_path) 288 | cut_points = get_manual_cut_points( 289 | danmu_list, 290 | self.config.get('spec', {}).get('manual_clipper', {}).get('uid', ""), 291 | self.config.get('spec', {}).get('manual_clipper', {}).get('command', '/DDR clip'), 292 | ) 293 | ret = self.cut(cut_points, 0) 294 | success = success and ret 295 | if self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('upload_record', False): 296 | ret = self.split(self.config.get('spec', {}).get('uploader', {}) 297 | .get('record', {}).get('split_interval', 3600)) 298 | success = success and ret 299 | return success 300 | except: 301 | traceback.print_exc() 302 | return False 303 | 304 | 305 | if __name__ == "__main__": 306 | danmu_list = parse_danmu("data/data/danmu/22603245_2022-04-17_08-40-42") 307 | counted_danmu_dict = count( 308 | danmu_list, datetime.datetime.strptime("2022-04-17_08-40-42", "%Y-%m-%d_%H-%M-%S"), (datetime.datetime.strptime("2022-04-17_13-14-37", "%Y-%m-%d_%H-%M-%S")-datetime.datetime.strptime("2021-03-13_11-20-16", "%Y-%m-%d_%H-%M-%S")).total_seconds(), 30) 309 | cut_points = get_cut_points(counted_danmu_dict, 2.5, 310 | 0.75, 5) 311 | # cut_points = get_manual_cut_points(danmu_list, "8559982") 312 | print(cut_points) 313 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DDRecorder 2 | 3 | Headless 全自动 B 站直播录播、切片、上传一体工具 4 | 5 | **>=1.1.2 版本增加了自动检查更新功能,需要连接至 Github 服务器,敬请留意。** 6 | 7 | 8 | ## 感谢 9 | FortuneDayssss/BilibiliUploader 10 | MoyuScript/bilibili-api 11 | ForgQi/biliup-rs 12 | fastnlp/fastHan 13 | 14 | ## 参考文献 15 | ``` 16 | @inproceedings{geng-etal-2021-fasthan, 17 | author = {Geng, Zhichao and Yan, Hang and Qiu, Xipeng and Huang, Xuanjing}, 18 | title = {fastHan: A BERT-based Multi-Task Toolkit for Chinese NLP}, 19 | booktitle = {Proceedings of the 59th Annual Meeting of the Association for Computational Linguistics and the 11th International Joint Conference on Natural Language Processing: System Demonstrations}, 20 | year = {2021}, 21 | pages = {99--106}, 22 | url = {https://aclanthology.org/2021.acl-demo.12} 23 | } 24 | ``` 25 | 26 | ## 安装指南(Windows) 27 | 1. 在 Release 下载 zip 包解压。 28 | 2. 修改配置文件 config.json 可以选择使用自带的 DDRecorderConfigManager(源码:AsaChiri/DDRecorderConfigManager) 29 | 3. 双击运行 DDRecorder.exe(这将使用 config.json)或 DDRecorder.exe <配置文件> 30 | 31 | 32 | ## 安装指南(MacOS/Linux) 33 | 1. 安装 Python >= **3.9** https://www.python.org/downloads/release/python-3104/ 34 | 2. 安装 ffmpeg https://ffmpeg.org/download.html 35 | 3. 执行 pip install -r requirements.txt 36 | 4. 修改 config 文件夹下的配置文件 config.json 37 | 5. 执行 python main.py <配置文件> 38 | 39 | ## 配置文件字段解释 40 | 41 | ### 关于登录 42 | 43 | **由于 B 站风控原因,账号密码登录失败(被风控)的可能性极高,建议获取 access_token,refresh_token 和 cookies 项目填入配置文件中;目前推荐使用 [biliup-rs](https://github.com/ForgQi/biliup-rs) 进行一次登录获取 access_token,refresh_token 和 cookies 项目后填入配置文件中。如果您使用 DDRecorderConfigManager,则相应功能已经集成。** 44 | 45 | ### 关于占位符 46 | 目前可以在配置文件里使用的占位符如下: 47 | - {date} :日期(格式为“2022 年 02 月 02 日”) 48 | - {room_name} :**最近的**直播间标题 49 | - {year},{month},{day},{hour},{minute},{second}:直播开始时间的年、月、日、时、分、秒 50 | - {rough_time}:直播开始时间的大概描述(凌晨、上午、下午、晚上) 51 | 52 | ### 关于手动切片 53 | 手动切片功能类似于 Nvidia 的 ShadowPlay 功能,在配置文件中的"manual_clipper"部分可以找到手动切片器相关设置。 54 | 55 | 在启用相关功能并指定接受指令的用户 UID,该用户可以在直播时发送特定弹幕来控制 DDRecorder 进行切片。 56 | 57 | 特定弹幕格式为```/DDR clip <回溯时间(秒)> [描述文本]```。其中描述文本可选,如果不指定描述文本,将会自动设置一个“手动切片_{编号}”的表述文本。 58 | 59 | 比如说,如果想要截取当前时间向前 5 分钟的内容,只需要发送```/DDR clip 300```。 60 | 61 | 如果想要指定描述文本为“主播锐评 ylg”,发送```/DDR clip 60 主播锐评ylg```即可。 62 | 63 | 手动切片的将会输出到与自动切片相同的文件夹,因此受到 uploader-clippers 中的设置项控制。即如果打开了自动上传切片功能,手动切片同样也会上传,不过分 P 名将不再是自动采集的弹幕关键词,而是上面所述的描述文本。 64 | 65 | ### 全局设置(root 部分) 66 | - check_interval: 直播间开播状态检查间隔,单位为秒,每个监控直播间单独计数,因此如果监控直播间较多,建议适当调大。由于 B 站 API 访问次数限制,建议不要小于 30。默认:100 67 | - print_interval:控制台消息打印间隔,单位为秒。 68 | - data_path: 数据文件路径。默认:"./"(即程序所在路径) 69 | - logger: 日志相关设置 70 | - log_path: 日志文件路径。默认:"./log" 71 | - log_level: 日志级别,可选 DEBUG\INFO\WARN 72 | - request_header: 请求时使用的头。代码中已经包含了一个默认的,在这里进行调整将会覆盖默认值,如无必要请留空。 73 | - uploader: 上传器相关设置 74 | 77 | - lines: 上传使用的线路,可以选择 bda2, kodo, ws, qn, cos, cos-internal(支持腾讯云内网免流 + 提速) 78 | - enable_baiduyun:是否开启百度云功能。 79 | 80 | ### 直播间特定设置(spec 部分,此部分是一个数组,如果需要同时监控多个直播间,依次添加至数组中即可) 81 | - room_id: 房间号 82 | - recorder: 录制器相关设置 83 | - keep_raw_record: 是否保留原始录像(flv)文件(录制器最后会合并所有 flv 文件导出 mp4)。默认:true 84 | - parser: 弹幕分析器相关设置 85 | - interval: 弹幕计数间隔,单位秒。默认:30. 86 | - up_ratio: 开始切片位置弹幕数量与上一个时段弹幕数量之比的阈值。默认:2.5 87 | - down_ratio: 结束切片位置弹幕数量与上一个时段弹幕数量之比的阈值。默认:0.75 88 | - topK: 提取弹幕关键词的数量。默认:5 89 | - clipper: 切片器相关设置 90 | - enable_clipper: 启用切片功能。默认:true 91 | - min_length: 切片最短长度,单位秒。默认:60 92 | - start_offset: 切片开始时间偏移量,正为向后偏移,负为向前偏移,单位秒。默认:0。建议根据直播间弹幕延迟调整。 93 | - end_offset: 切片结束时间偏移量,正为向后偏移,负为向前偏移,单位秒。默认:0。建议根据直播间弹幕延迟调整。 94 | - manual_clipper: 手动切片器相关设置 95 | - enabled:启用手动切片器功能。默认:false 96 | - uid:手动切片器接受指令的用户 UID。 97 | - command: 自定义手动切片触发指令。 98 | - uploader: 上传器相关设置 99 | - account: 上传账户信息 100 | - username: 用户名 101 | - password: 密码 102 | - access_token: Access token 103 | - refresh_token: Refresh token 104 | - cookies: 105 | - SESSDATA: your SESSDATA 106 | - bili_jct: your bili_jct 107 | - DedeUserID: your DedeUserID 108 | - DedeUserID__ckMd5: your DedeUserID__ckMd5 109 | - sid: your sid 110 | - copyright: 稿件类型(1:自制,2:转载)**警告!未经授权投稿“自制”可能导致稿件无法通过审核!** 111 | - record: 录播上传设置 112 | - upload_record: 是否上传录播。默认:true 113 | - keep_record_after_upload: 是否在上传过审后保留录播。默认:true 114 | - split_interval: 录播划分间隔,单位秒。由于 B 站无法一次上传大文件,因此长录播需要分片才能上传。默认:3600。**如设为 0,表示不划分,如此请保证账号具有上传超大文件权限。** 115 | - title:上传视频的标题,可以使用占位符。 116 | - tid:分区编号,可在 https://github.com/FortuneDayssss/BilibiliUploader/wiki/Bilibili%E5%88%86%E5%8C%BA%E5%88%97%E8%A1%A8 查询 117 | - tags:上传视频的标签 118 | - desc:上传视频的描述,可以使用占位符。 119 | - cover:上传视频使用的封面文件路径。 120 | - clippers: 切片上传设置 121 | - upload_clippers: 是否上传切片。默认:true 122 | - keep_clippers_after_upload: 是否在上传过审后保留切片。默认:true 123 | - title:上传视频的标题,可以使用占位符。 124 | - tid:分区编号,可在 https://github.com/FortuneDayssss/BilibiliUploader/wiki/Bilibili%E5%88%86%E5%8C%BA%E5%88%97%E8%A1%A8 查询 125 | - tags:上传视频的标签 126 | - desc:上传视频的描述,可以使用占位符。 127 | - cover:上传视频使用的封面文件路径。 128 | - backup:是否将录像备份到百度云。 129 | 130 | ## 已知问题 131 | - record 文件夹下产生大量空文件夹。(Work-around patch) 132 | - 自动上传无法登录,电磁力不足无法投稿分 P 稿件。(已更新) 133 | - 被 B 站风控系统 412 后会无法工作。(预期下个功能更新优化。) 134 | - PK 导致分辨率不正确出现花屏。(正在调查。) 135 | 136 | ## 预期更新 137 | - 弹幕 jsonl 转为 ass 字幕并自动压入录播文件的功能。(预期下个功能更新。) 138 | - 不上传百度也会在切片和或录播上传完成后删去 merge 文件的功能。(_预期下个功能更新。说实话不是很想加这个功能,可能这就是仓鼠症患者吧……_) 139 | - 增加斗鱼、Twitch 和油管支持。(预期当前功能稳定后加入。) 140 | -------------------------------------------------------------------------------- /Uploader.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import logging 3 | import os 4 | import traceback 5 | import json 6 | from biliup.plugins.bili_webup import BiliBili, Data 7 | 8 | import utils 9 | from BiliLive import BiliLive 10 | 11 | 12 | class Uploader(BiliLive): 13 | def __init__(self, output_dir: str, splits_dir: str, config: dict): 14 | super().__init__(config) 15 | self.output_dir = output_dir 16 | self.splits_dir = splits_dir 17 | 18 | self.uploader = BiliBili(Data()) 19 | self.lines = config.get("root", {}).get( 20 | "uploader", {}).get("lines", "AUTO") 21 | 22 | account = get_account(self.config.get('spec', {}), config.get("root", {})) 23 | 24 | try: 25 | self.uploader.login(utils.get_cred_filename( 26 | self.room_id, self.config.get('root', {}).get('data_path', "./")), account) 27 | except Exception as e: 28 | logging.error(self.generate_log( 29 | 'Error while login:' + str(e)+traceback.format_exc())) 30 | raise(e) 31 | 32 | def upload(self, global_start: datetime.datetime) -> dict: 33 | logging.basicConfig(level=utils.get_log_level(self.config), 34 | format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', 35 | datefmt='%a, %d %b %Y %H:%M:%S', 36 | filename=os.path.join(self.config.get('root', {}).get('logger', {}).get('log_path', "./log"), "Uploader_"+datetime.datetime.now( 37 | ).strftime('%Y-%m-%d_%H-%M-%S')+'.log'), 38 | filemode='a') 39 | return_dict = {} 40 | datestr = global_start.strftime( 41 | '%Y{y}%m{m}%d{d}').format(y='年', m='月', d='日') 42 | format_dict = {"date": datestr, 43 | "year": global_start.year, 44 | "month": global_start.month, 45 | "day": global_start.day, 46 | "hour": global_start.hour, 47 | "minute": global_start.minute, 48 | "second": global_start.second, 49 | "rough_time": utils.get_rough_time(global_start.hour), 50 | "room_name": self.room_info['room_name']} 51 | try: 52 | if self.config.get('spec', {}).get('uploader', {}).get('clips', {}).get('upload_clips', False): 53 | clips_video_data = Data() 54 | clips_video_data.copyright = self.config.get('spec', {}).get( 55 | 'uploader', {}).get('copyright', 2) 56 | clips_video_data.title = self.config.get('spec', {}).get( 57 | 'uploader', {}).get('clips', {}).get('title', "").format(**format_dict) 58 | clips_video_data.desc = self.config.get('spec', {}).get( 59 | 'uploader', {}).get('clips', {}).get('desc', "").format(**format_dict) 60 | clips_video_data.source = "https://live.bilibili.com/"+self.room_id 61 | clips_video_data.tid = self.config.get('spec', {}).get( 62 | 'uploader', {}).get('clips', {}).get('tid', 27) 63 | clips_video_data.set_tag(self.config.get('spec', {}).get( 64 | 'uploader', {}).get('clips', {}).get('tags', [])) 65 | 66 | self.uploader.video = clips_video_data 67 | filelists = os.listdir(self.output_dir) 68 | filelists.sort(key=lambda x: int( 69 | "".join(os.path.splitext(x)[0].split("_")[3].split("-")))) 70 | for filename in filelists: 71 | if os.path.getsize(os.path.join(self.output_dir, filename)) < 1024*1024: 72 | continue 73 | file_path = os.path.join(self.output_dir, filename) 74 | video_part = self.uploader.upload_file( 75 | file_path, lines=self.lines) 76 | video_part['title'] = os.path.splitext(filename)[ 77 | 0].split("_")[-1] 78 | video_part['desc'] = self.config.get('spec', {}).get('uploader', {}).get( 79 | 'clips', {}).get('desc', "").format(**format_dict) 80 | clips_video_data.append(video_part) 81 | if len(clips_video_data.videos) == 0: 82 | logging.warn(self.generate_log( 83 | '没有可用于上传的自动切片!')) 84 | self.uploader.video = None 85 | else: 86 | if os.path.exists(self.config.get('spec', {}).get( 87 | 'uploader', {}).get('clips', {}).get('cover', "")): 88 | clips_video_data.cover = self.uploader.cover_up(self.config.get('spec', {}).get( 89 | 'uploader', {}).get('clips', {}).get('cover', "")) 90 | 91 | clips_video_ret = self.uploader.submit() 92 | if clips_video_ret['code'] == 0 and clips_video_ret['data'] is not None: 93 | return_dict["clips"] = { 94 | "avid": clips_video_ret['data']['aid'], 95 | "bvid": clips_video_ret['data']['bvid'] 96 | } 97 | 98 | if self.config.get('spec', {}).get('uploader', {}).get('record', {}).get('upload_record', False): 99 | record_video_data = Data() 100 | record_video_data.copyright = self.config.get('spec', {}).get( 101 | 'uploader', {}).get('copyright', 2) 102 | record_video_data.title = self.config.get('spec', {}).get( 103 | 'uploader', {}).get('record', {}).get('title', "").format(**format_dict) 104 | record_video_data.desc = self.config.get('spec', {}).get( 105 | 'uploader', {}).get('record', {}).get('desc', "").format(**format_dict) 106 | record_video_data.source = "https://live.bilibili.com/"+self.room_id 107 | record_video_data.tid = self.config.get('spec', {}).get( 108 | 'uploader', {}).get('record', {}).get('tid', 27) 109 | record_video_data.set_tag(self.config.get('spec', {}).get( 110 | 'uploader', {}).get('record', {}).get('tags', [])) 111 | 112 | self.uploader.video = record_video_data 113 | 114 | filelists = os.listdir(self.splits_dir) 115 | filelists.sort(key=lambda x: int( 116 | os.path.splitext(x)[0].split("_")[-1])) 117 | for filename in filelists: 118 | if os.path.getsize(os.path.join(self.splits_dir, filename)) < 1024*1024: 119 | continue 120 | file_path = os.path.join(self.splits_dir, filename) 121 | video_part = self.uploader.upload_file( 122 | file_path, lines=self.lines) 123 | video_part['title'] = os.path.splitext(filename)[ 124 | 0].split("_")[-1] 125 | video_part['desc'] = self.config.get('spec', {}).get('uploader', {}).get( 126 | 'record', {}).get('desc', "").format(**format_dict) 127 | record_video_data.append(video_part) 128 | if len(record_video_data.videos) == 0: 129 | logging.warn(self.generate_log( 130 | '没有可用于上传的录播分段!')) 131 | self.uploader.video = None 132 | else: 133 | if os.path.exists(self.config.get('spec', {}).get( 134 | 'uploader', {}).get('record', {}).get('cover', "")): 135 | record_video_data.cover = self.uploader.cover_up(self.config.get('spec', {}).get( 136 | 'uploader', {}).get('record', {}).get('cover', "")) 137 | record_video_ret = self.uploader.submit() 138 | if record_video_ret['code'] == 0 and record_video_ret['data'] is not None: 139 | return_dict["record"] = { 140 | "avid": record_video_ret['data']['aid'], 141 | "bvid": record_video_ret['data']['bvid'] 142 | } 143 | 144 | except Exception as e: 145 | logging.error(self.generate_log( 146 | 'Error while uploading:' + str(e)+traceback.format_exc())) 147 | return None 148 | finally: 149 | self.uploader.close() 150 | 151 | return return_dict 152 | 153 | 154 | def get_account(spec_config: dict, root_config: dict = None) -> dict: 155 | account_config = spec_config.get('uploader', {}).get('account', 'default') 156 | if isinstance(account_config, str) or not account_config: 157 | account_config = get_root_account_by_name(root_config, account_config) 158 | 159 | return { 160 | 'account': 161 | { 162 | 'username': account_config.get('username', ""), 163 | 'password': account_config.get('password', "") 164 | }, 165 | 'access_token': account_config.get('access_token', ''), 166 | 'refresh_token': account_config.get('refresh_token', ''), 167 | 'cookies': account_config.get('cookies', None) 168 | } 169 | 170 | 171 | def get_root_account_by_name(root_config: dict, name: str = None) -> dict: 172 | if not name: 173 | name = 'default' 174 | 175 | account = root_config.get('account', {}).get(name, {}) 176 | if isinstance(account, str): 177 | with open(account, encoding='utf-8') as cookie_file: 178 | account = {"cookies": {}} 179 | cookie_json = json.load(cookie_file) 180 | for i in cookie_json["cookie_info"]["cookies"]: 181 | name = i["name"] 182 | account["cookies"][name] = i["value"] 183 | account["access_token"] = cookie_json["token_info"]["access_token"] 184 | return account 185 | 186 | 187 | if __name__ == '__main__': 188 | import argparse 189 | parser = argparse.ArgumentParser(description='DDRecorder uploader') 190 | parser.add_argument('-c', '--config', type=str, default=None, required=True, help='配置文件路径') 191 | parser.add_argument('-o', '--output_dir', type=str, default='', help='切片的保存目录') 192 | parser.add_argument('-s', '--splits_dir', type=str, default='', help='splits dir') 193 | 194 | args = parser.parse_args() 195 | 196 | with open(args.config, "r", encoding="UTF-8") as f: 197 | all_config = json.load(f) 198 | 199 | media_path = args.output_dir or args.splits_dir 200 | media_name_split = os.path.basename(media_path).split('_') 201 | rom_id = media_name_split[0] 202 | for temp_spec in all_config['spec']: 203 | if str(temp_spec['room_id']) == rom_id: 204 | spec = temp_spec 205 | break 206 | else: 207 | logging.error(f'找到不rom id {rom_id}对应的配置') 208 | exit(1) 209 | 210 | # noinspection PyUnboundLocalVariable 211 | config = { 212 | 'root': all_config.get('root', {}), 213 | 'spec': spec 214 | } 215 | uploader = Uploader( 216 | output_dir=args.output_dir, splits_dir=args.splits_dir, config=config) 217 | 218 | time_str = '_'.join(media_name_split[1:]) 219 | start_time = datetime.datetime.strptime(time_str, '%Y-%m-%d_%H-%M-%S') 220 | uploader.upload(global_start=start_time) 221 | -------------------------------------------------------------------------------- /config/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "root": { 3 | "check_interval": 60, 4 | "print_interval": 10, 5 | "data_path": "./", 6 | "logger": { "log_path": "./log", "log_level": "DEBUG" }, 7 | "request_header": {}, 8 | "uploader": { 9 | "upload_by_edit": true, 10 | "thread_pool_workers": 1, 11 | "max_retry": 30, 12 | "lines":"AUTO" 13 | }, 14 | "enable_baiduyun": false 15 | }, 16 | "spec": [ 17 | { 18 | "room_id": "566227", 19 | "recorder": { "keep_raw_record": false }, 20 | "parser": { 21 | "interval": 30, 22 | "up_ratio": 2.5, 23 | "down_ratio": 0.75, 24 | "topK": 5 25 | }, 26 | "clipper": { 27 | "enable_clipper": false, 28 | "min_length": 30, 29 | "start_offset": -20, 30 | "end_offset": 10 31 | }, 32 | "manual_clipper": { 33 | "enabled": false, 34 | "uid": "" 35 | }, 36 | "uploader": { 37 | "account": { 38 | "username": "account@example.com", 39 | "password": "password", 40 | "access_token": "your access_token", 41 | "refresh_token": "your refresh_token", 42 | "cookies": { 43 | "SESSDATA": "your SESSDATA", 44 | "bili_jct": "your bili_jct", 45 | "DedeUserID": "your DedeUserID", 46 | "DedeUserID__ckMd5": "your DedeUserID__ckMd5", 47 | "sid": "your sid" 48 | } 49 | }, 50 | "copyright": 2, 51 | "record": { 52 | "upload_record": true, 53 | "keep_record_after_upload": true, 54 | "split_interval": 3600, 55 | "title": "【阳向心美】{date}直播录播", 56 | "tid": 27, 57 | "tags": ["直播录播", "阳向心美", "虚拟主播", "VTuber", "虚拟UP主"], 58 | "desc": "切片自{date} 阳向心美 Official 直播\n---------------------------------------\n本视频由全自动切片型智能 DD 机上传~", 59 | "cover": "" 60 | }, 61 | "clips": { 62 | "upload_clips": false, 63 | "keep_clips_after_upload": true, 64 | "title": "【阳向心美】{date}直播精彩剪辑【自动切片】", 65 | "tid": 27, 66 | "tags": [ 67 | "直播录播", 68 | "剪辑", 69 | "阳向心美", 70 | "虚拟主播", 71 | "VTuber", 72 | "虚拟UP主" 73 | ], 74 | "desc": "切片自{date} 阳向心美 Official 直播\n---------------------------------------\n本视频由全自动切片型智能 DD 机剪辑上传~", 75 | "cover": "" 76 | } 77 | }, 78 | "backup": false 79 | }, 80 | { 81 | "room_id": "22637920", 82 | "recorder": { "keep_raw_record": false }, 83 | "parser": { 84 | "interval": 60, 85 | "up_ratio": 4.5, 86 | "down_ratio": 0.6, 87 | "topK": 5 88 | }, 89 | "clipper": { 90 | "enable_clipper": true, 91 | "min_length": 60, 92 | "start_offset": -20, 93 | "end_offset": 10 94 | }, 95 | "manual_clipper": { 96 | "enabled": false, 97 | "uid": "" 98 | }, 99 | "uploader": { 100 | "account": { 101 | "username": "account@example.com", 102 | "password": "password", 103 | "cookies": { 104 | "SESSDATA": "your SESSDATA", 105 | "bili_jct": "your bili_jct", 106 | "DedeUserID": "your DedeUserID", 107 | "DedeUserID__ckMd5": "your DedeUserID__ckMd5", 108 | "sid": "your sid" 109 | } 110 | }, 111 | "copyright": 2, 112 | "record": { 113 | "upload_record": false, 114 | "keep_record_after_upload": true, 115 | "split_interval": 3600, 116 | "title": "【桃井最中 Monaka】{date}直播录播", 117 | "tid": 27, 118 | "tags": [ 119 | "直播录播", 120 | "桃井最中Monaka", 121 | "虚拟主播", 122 | "VTuber", 123 | "虚拟UP主" 124 | ], 125 | "desc": "切片自{date} 桃井最中 Monaka 直播\n---------------------------------------\n本视频由全自动切片型智能 DD 机上传~", 126 | "cover": "" 127 | }, 128 | "clips": { 129 | "upload_clips": true, 130 | "keep_clips_after_upload": true, 131 | "title": "【桃井最中 Monaka】{date}直播精彩剪辑【自动切片】", 132 | "tid": 27, 133 | "tags": [ 134 | "直播录播", 135 | "剪辑", 136 | "桃井最中Monaka", 137 | "虚拟主播", 138 | "VTuber", 139 | "虚拟UP主" 140 | ], 141 | "desc": "切片自{date} 桃井最中 Monaka 直播\n---------------------------------------\n本视频由全自动切片型智能 DD 机剪辑上传~", 142 | "cover": "" 143 | } 144 | }, 145 | "backup": false 146 | } 147 | ] 148 | } 149 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | recorder: 4 | build: . 5 | volumes: 6 | - ./config:/usr/src/app/config 7 | - ./log:/usr/src/app/log 8 | - ./data:/usr/src/app/data 9 | restart: always 10 | -------------------------------------------------------------------------------- /icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AsaChiri/DDRecorder/f19722173dfeac975f17a10c27359f5512e9adc2/icon.ico -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import logging 4 | import os 5 | import sys 6 | import time 7 | import threading 8 | from multiprocessing import freeze_support 9 | from lastversion import lastversion 10 | 11 | import utils 12 | from MainRunner import MainThreadRunner 13 | 14 | 15 | CURRENT_VERSION = "1.3.3" 16 | 17 | 18 | 19 | class versionThread(threading.Thread): 20 | def __init__(self): 21 | threading.Thread.__init__(self) 22 | 23 | def run(self): 24 | try: 25 | latest_version = lastversion.has_update( 26 | repo="AsaChiri/DDRecorder", current_version=CURRENT_VERSION) 27 | if latest_version: 28 | print('DDRecorder 有更新,版本号:{} 请尽快到 https://github.com/AsaChiri/DDRecorder/releases 下载最新版'.format(str(latest_version))) 29 | else: 30 | print('DDRecorder 已是最新版本!') 31 | except: 32 | print('无法获取 DDRecorder 的版本信息,当前版本号:{},请到 https://github.com/AsaChiri/DDRecorder/releases 检查最新版本'.format(CURRENT_VERSION)) 33 | 34 | 35 | 36 | if __name__ == "__main__": 37 | freeze_support() 38 | vt = versionThread() 39 | vt.start() 40 | 41 | if utils.is_windows(): 42 | utils.add_path("./ffmpeg/bin") 43 | 44 | try: 45 | if len(sys.argv) > 1: 46 | all_config_filename = sys.argv[1] 47 | with open(all_config_filename, "r", encoding="UTF-8") as f: 48 | all_config = json.load(f) 49 | else: 50 | with open("config.json", "r", encoding="UTF-8") as f: 51 | all_config = json.load(f) 52 | except Exception as e: 53 | print("解析配置文件时出现错误,请检查配置文件!") 54 | print("错误详情:"+str(e)) 55 | os.system('pause') 56 | 57 | utils.check_and_create_dir(all_config.get( 58 | 'root', {}).get('data_path', "./")) 59 | utils.check_and_create_dir(all_config.get('root', {}).get( 60 | 'logger', {}).get('log_path', './log')) 61 | logfile_name = "Main_"+datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'.log' 62 | logging.basicConfig(level=utils.get_log_level(all_config), 63 | format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', 64 | datefmt='%a, %d %b %Y %H:%M:%S', 65 | handlers=[logging.FileHandler(os.path.join(all_config.get('root', {}).get('logger', {}).get('log_path', "./log"), logfile_name), "a", encoding="utf-8")]) 66 | utils.init_data_dirs(all_config.get('root', {}).get('data_path', "./")) 67 | if all_config.get('root', {}).get('enable_baiduyun', False): 68 | from bypy import ByPy 69 | bp = ByPy() 70 | 71 | runner_dict = {} 72 | for spec_config in all_config.get('spec', []): 73 | config = { 74 | 'root': all_config.get('root', {}), 75 | 'spec': spec_config 76 | } 77 | tr = MainThreadRunner(config) 78 | tr.setDaemon(True) 79 | runner_dict[spec_config['room_id']] = tr 80 | 81 | for tr in runner_dict.values(): 82 | tr.start() 83 | time.sleep(10) 84 | 85 | while True: 86 | old_config = all_config 87 | try: 88 | if len(sys.argv) > 1: 89 | all_config_filename = sys.argv[1] 90 | with open(all_config_filename, "r", encoding="UTF-8") as f: 91 | all_config = json.load(f) 92 | else: 93 | with open("config.json", "r", encoding="UTF-8") as f: 94 | all_config = json.load(f) 95 | except Exception as e: 96 | print("解析配置文件时出现错误,请检查配置文件!已使用最后一次正确的配置") 97 | print("错误详情:"+str(e)) 98 | all_config = old_config 99 | utils.check_and_create_dir(all_config.get( 100 | 'root', {}).get('data_path', "./")) 101 | utils.check_and_create_dir(all_config.get('root', {}).get( 102 | 'logger', {}).get('log_path', './log')) 103 | logging.basicConfig(level=utils.get_log_level(all_config), 104 | format='%(asctime)s %(thread)d %(threadName)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', 105 | datefmt='%a, %d %b %Y %H:%M:%S', 106 | handlers=[logging.FileHandler(os.path.join(all_config.get('root', {}).get('logger', {}).get('log_path', "./log"), logfile_name), "a", encoding="utf-8")]) 107 | utils.init_data_dirs(all_config.get('root', {}).get('data_path', "./")) 108 | if all_config.get('root', {}).get('enable_baiduyun', False): 109 | from bypy import ByPy 110 | bp = ByPy() 111 | for spec_config in all_config.get('spec', []): 112 | config = { 113 | 'root': all_config.get('root', {}), 114 | 'spec': spec_config 115 | } 116 | if spec_config['room_id'] in runner_dict: 117 | runner_dict[spec_config['room_id']].mr.config = config 118 | else: 119 | tr = MainThreadRunner(config) 120 | tr.setDaemon(True) 121 | runner_dict[spec_config['room_id']] = tr 122 | tr.start() 123 | 124 | utils.print_log(runner_dict) 125 | time.sleep(all_config.get('root', {}).get('print_interval', 60)) 126 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | biliup>=0.2.11 2 | brotli>=1.0.9 3 | brotlipy>=0.7.0 4 | bypy>=1.8.2 5 | FastNLP>=1.0.0 6 | fastHan>=2.0 7 | pandas>=1.5.1 8 | datasets>=2.7.0 9 | transformers>=4.0.0 10 | jsonlines>=3.0.0 11 | lastversion>=2.4.5 12 | prettytable>=3.2.0 13 | requests>=2.27.1 14 | urllib3>=1.26.9 15 | websockets>=10.2 16 | ffmpeg-python>=0.2.0 17 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | from collections import Counter 2 | 3 | import ctypes 4 | import datetime 5 | import logging 6 | import os 7 | import platform 8 | import threading 9 | from enum import Enum 10 | 11 | import prettytable as pt 12 | from fastHan import FastHan 13 | 14 | model = FastHan() 15 | model.set_cws_style('wtb') 16 | 17 | 18 | def is_windows() -> bool: 19 | plat_sys = platform.system() 20 | return plat_sys == "Windows" 21 | 22 | 23 | if is_windows(): 24 | import winreg 25 | 26 | 27 | def get_log_level(config: dict) -> int: 28 | if config.get('root', {}).get('logger', {}).get('log_level','DEBUG') == 'DEBUG': 29 | return logging.DEBUG 30 | if config.get('root', {}).get('logger', {}).get('log_level','DEBUG') == 'INFO': 31 | return logging.INFO 32 | if config.get('root', {}).get('logger', {}).get('log_level','DEBUG') == 'WARN': 33 | return logging.WARN 34 | if config.get('root', {}).get('logger', {}).get('log_level','DEBUG') == 'ERROR': 35 | return logging.ERROR 36 | return logging.INFO 37 | 38 | 39 | def check_and_create_dir(dirs: str) -> None: 40 | if not os.path.exists(dirs): 41 | os.mkdir(dirs) 42 | 43 | 44 | def init_data_dirs(root_dir: str = os.getcwd()) -> None: 45 | check_and_create_dir(os.path.join(root_dir, 'data')) 46 | check_and_create_dir(os.path.join(root_dir, 'data', 'records')) 47 | check_and_create_dir(os.path.join(root_dir, 'data', 'merged')) 48 | check_and_create_dir(os.path.join(root_dir, 'data', 'merge_confs')) 49 | check_and_create_dir(os.path.join(root_dir, 'data', 'danmu')) 50 | check_and_create_dir(os.path.join(root_dir, 'data', 'outputs')) 51 | check_and_create_dir(os.path.join(root_dir, 'data', 'splits')) 52 | check_and_create_dir(os.path.join(root_dir, 'data', 'cred')) 53 | 54 | 55 | def init_record_dir(room_id: str, global_start: datetime.datetime, root_dir: str = os.getcwd()) -> str: 56 | dirs = os.path.join(root_dir, 'data', 'records', 57 | f"{room_id}_{global_start.strftime('%Y-%m-%d_%H-%M-%S')}") 58 | check_and_create_dir(dirs) 59 | return dirs 60 | 61 | 62 | def init_danmu_log_dir(room_id: str, global_start: datetime.datetime, root_dir: str = os.getcwd()) -> str: 63 | log_dir = os.path.join( 64 | root_dir, 'data', 'danmu', f"{room_id}_{global_start.strftime('%Y-%m-%d_%H-%M-%S')}") 65 | check_and_create_dir(log_dir) 66 | return log_dir 67 | 68 | 69 | def generate_filename(room_id: str) -> str: 70 | return f"{room_id}_{datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.flv" 71 | 72 | 73 | def get_global_start_from_records(record_dir: str) -> datetime.datetime: 74 | base = os.path.basename(record_dir) 75 | return datetime.datetime.strptime(" ".join(base.split("_")[1:3]), '%Y-%m-%d %H-%M-%S') 76 | 77 | 78 | def get_merged_filename(room_id: str, global_start: datetime.datetime, root_dir: str = os.getcwd()) -> str: 79 | filename = os.path.join(root_dir, 'data', 'merged', 80 | f"{room_id}_{global_start.strftime('%Y-%m-%d_%H-%M-%S')}_merged.mp4") 81 | return filename 82 | 83 | 84 | def init_outputs_dir(room_id: str, global_start: datetime.datetime, root_dir: str = os.getcwd()) -> str: 85 | dirs = os.path.join(root_dir, 'data', 'outputs', 86 | f"{room_id}_{global_start.strftime('%Y-%m-%d_%H-%M-%S')}") 87 | check_and_create_dir(dirs) 88 | return dirs 89 | 90 | 91 | def init_splits_dir(room_id: str, global_start: datetime.datetime, root_dir: str = os.getcwd()) -> str: 92 | dirs = os.path.join(root_dir, 'data', 'splits', 93 | f"{room_id}_{global_start.strftime('%Y-%m-%d_%H-%M-%S')}") 94 | check_and_create_dir(dirs) 95 | return dirs 96 | 97 | 98 | def get_merge_conf_path(room_id: str, global_start: datetime.datetime, root_dir: str = os.getcwd()) -> str: 99 | filename = os.path.join(root_dir, 'data', 'merge_confs', 100 | f"{room_id}_{global_start.strftime('%Y-%m-%d_%H-%M-%S')}_merge_conf.txt") 101 | return filename 102 | 103 | 104 | def get_cred_filename(room_id: str, root_dir: str = os.getcwd()) -> str: 105 | filename = os.path.join(root_dir, 'data', 'cred', 106 | f"{room_id}_cred.json") 107 | return filename 108 | 109 | 110 | def del_files_and_dir(dirs: str) -> None: 111 | for filename in os.listdir(dirs): 112 | os.remove(os.path.join(dirs, filename)) 113 | os.rmdir(dirs) 114 | 115 | 116 | def get_rough_time(hour: int) -> str: 117 | if 0 <= hour < 6: 118 | return "凌晨" 119 | elif 6 <= hour < 12: 120 | return "上午" 121 | elif 12 <= hour < 18: 122 | return "下午" 123 | else: 124 | return "晚上" 125 | 126 | 127 | def refresh_reg() -> None: 128 | HWND_BROADCAST = 0xFFFF 129 | WM_SETTINGCHANGE = 0x1A 130 | 131 | SMTO_ABORTIFHUNG = 0x0002 132 | 133 | result = ctypes.c_long() 134 | SendMessageTimeoutW = ctypes.windll.user32.SendMessageTimeoutW 135 | SendMessageTimeoutW(HWND_BROADCAST, WM_SETTINGCHANGE, 0, 136 | u'Environment', SMTO_ABORTIFHUNG, 5000, ctypes.byref(result)) 137 | 138 | 139 | def add_path(path: str) -> None: 140 | abs_path = os.path.abspath(path) 141 | path_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, 142 | 'Environment', 0, winreg.KEY_ALL_ACCESS) 143 | path_value = winreg.QueryValueEx(path_key, 'Path') 144 | if path_value[0].find(abs_path) == -1: 145 | winreg.SetValueEx(path_key, "Path", 0, 146 | winreg.REG_EXPAND_SZ, path_value[0]+(";" if path_value[0][-1] != ";" else "")+abs_path+";") 147 | refresh_reg() 148 | 149 | 150 | class state(Enum): 151 | ERROR = -1 152 | WAITING_FOR_LIVE_START = 0 153 | LIVE_STARTED = 1 154 | PROCESSING_RECORDS = 2 155 | UPLOADING_TO_BILIBILI = 3 156 | UPLOADING_TO_BAIDUYUN = 4 157 | 158 | def __str__(self): 159 | if self.value == -1: 160 | return "错误!" 161 | if self.value == 0: 162 | return "摸鱼中" 163 | if self.value == 1: 164 | return "正在录制" 165 | if self.value == 2: 166 | return "正在处理视频" 167 | if self.value == 3: 168 | return "正在上传至Bilibili" 169 | if self.value == 4: 170 | return "正在上传至百度网盘" 171 | 172 | def __int__(self): 173 | return self.value 174 | 175 | 176 | def print_log(runner_list: list) -> str: 177 | tb = pt.PrettyTable() 178 | tb.field_names = ["TID", "平台", "房间号", "直播状态", "程序状态", "状态变化时间"] 179 | for runner in runner_list.values(): 180 | tb.add_row([runner.native_id, runner.mr.bl.site_name, runner.mr.bl.room_id, "是" if runner.mr.bl.live_status else "否", 181 | str(state(runner.mr.current_state.value)), datetime.datetime.fromtimestamp(runner.mr.state_change_time.value)]) 182 | print( 183 | f" DDRecorder 当前时间:{datetime.datetime.now()} 正在工作线程数:{threading.activeCount()}\n") 184 | print(tb) 185 | print("\n") 186 | 187 | 188 | def get_words(txts, topK=5): 189 | seg_list = [] 190 | for txt in txts: 191 | seg_list.extend(model(txt)[0]) 192 | c = Counter() 193 | for x in seg_list: # 进行词频统计 194 | if len(x) > 1 and x != '\r\n' and x != '\n': 195 | c[x] += 1 196 | try: 197 | return list(list(zip(*c.most_common(topK)))[0]) 198 | except IndexError: 199 | return [] 200 | --------------------------------------------------------------------------------