├── .gitattributes ├── .github └── workflows │ ├── main.yml │ └── pyinstall-win.yml ├── .gitignore ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── console.py ├── console.spec ├── cron.py ├── dockerfile ├── frontend ├── assets │ ├── index-BsMP4CtQ.css │ └── index-yGtPj0fc.js └── index.html ├── job.py ├── lib.py ├── log.py ├── main.py ├── poetry.lock ├── pyproject.toml ├── requirements.txt ├── server.py └── watch.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: docker images cicd 2 | # 触发器设置 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | # 项目任务,任务之间可以并行调度 10 | jobs: 11 | build: 12 | # 选择云端运行的环境 13 | runs-on: ubuntu-latest 14 | steps: 15 | # uses代表使用一个模块,此处使用的是checkout模块,将github项目文件导入到当前环境中 16 | - uses: actions/checkout@v3 17 | # 使用with跟在后面来为前面的模块输入参数 18 | with: 19 | submodules: 'true' 20 | - name: Set up QEMU 21 | uses: docker/setup-qemu-action@v2 22 | - name: Set up Docker Buildx 23 | uses: docker/setup-buildx-action@v2 24 | - name: Login to DockerHub 25 | uses: docker/login-action@v2 26 | with: 27 | # 这里用到了github的secrets功能,避免账户和密码随仓库泄露 28 | username: ${{ secrets.DOCKER_USERNAME }} 29 | password: ${{ secrets.DOCKER_TOKEN }} 30 | # 开始构建镜像 31 | - name: Build and push 32 | uses: docker/build-push-action@v2 33 | with: 34 | context: . 35 | file: dockerfile 36 | build-args: | 37 | GITHUB_TOKEN=${{ secrets.RELEASE_TOKEN }} 38 | platforms: | 39 | linux/amd64 40 | linux/arm64 41 | push: true 42 | # 指定用户/仓库名 43 | tags: | 44 | ${{ secrets.DOCKER_USERNAME }}/115strm:latest 45 | # 这里是通过md文件自动生成dockerhub描述的模块,也可以不需要 46 | - name: Docker Hub Description 47 | uses: peter-evans/dockerhub-description@v3 48 | with: 49 | username: ${{ secrets.DOCKER_USERNAME }} 50 | password: ${{ secrets.DOCKER_TOKEN }} 51 | repository: ${{ secrets.DOCKER_USERNAME }}/115strm 52 | readme-filepath: ./README.md -------------------------------------------------------------------------------- /.github/workflows/pyinstall-win.yml: -------------------------------------------------------------------------------- 1 | name: PyInstaller 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | build: 10 | runs-on: ${{ matrix.os }} 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | os: ['windows-latest', 'ubuntu-latest'] 15 | 16 | env: 17 | MAIN_PY_FILE: 'console.spec' # Define the path to your main.py file here 18 | 19 | steps: 20 | - name: Checkout code 21 | uses: actions/checkout@v3 22 | 23 | - name: Set up Python 24 | uses: actions/setup-python@v4 25 | with: 26 | python-version: 3.12 27 | 28 | - name: Install Python dependencies 29 | run: | 30 | pip install -r requirements.txt 31 | working-directory: ./ 32 | 33 | - name: Install PyInstaller 34 | run: | 35 | pip install pyinstaller 36 | working-directory: ./ 37 | 38 | - name: Build executable 39 | run: | 40 | pyinstaller ${{ env.MAIN_PY_FILE }} 41 | working-directory: ./ 42 | 43 | - name: Zip the app (Windows) 44 | if: matrix.os == 'windows-latest' 45 | uses: vimtor/action-zip@v1.2 46 | with: 47 | files: dist/q115strm.exe 48 | dest: dist/windows-x86.zip 49 | 50 | - uses: actions/upload-artifact@v4 51 | if: matrix.os == 'windows-latest' 52 | with: 53 | name: windows app 54 | path: dist/windows-x86.zip 55 | 56 | - name: Zip the app (Linux) 57 | if: matrix.os == 'ubuntu-latest' 58 | uses: vimtor/action-zip@v1.2 59 | with: 60 | files: dist/q115strm 61 | dest: dist/linux-x86.zip 62 | 63 | - uses: actions/upload-artifact@v4 64 | if: matrix.os == 'ubuntu-latest' 65 | with: 66 | name: linux app 67 | path: dist/linux-x86.zip 68 | 69 | # - name: create release 70 | # id: create_release 71 | # uses: actions/create-release@v1 72 | # env: 73 | # GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} 74 | # with: 75 | # tag_name: ${{ github.run_number }} 76 | # release_name: Release ${{ github.run_number }} 77 | # body: | 78 | # Test Release 79 | # draft: false 80 | # prerelease: false 81 | 82 | # - name: Upload release asset 83 | # id: upload-release-asset 84 | # uses: actions/upload-release-asset@v1 85 | # env: 86 | # GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} 87 | # with: 88 | # upload_url: ${{ steps.create_release.outputs.upload_url }} 89 | # asset_path: dist/windowx-x86.zip 90 | # asset_name: windowx-x86.zip 91 | # asset_content_type: application/zip 92 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | /build 3 | /dist 4 | 115-cookies.txt 5 | *.exe 6 | *.log 7 | poetry.lock 8 | /data/config/* 9 | __pycache__ 10 | .input 11 | a.json 12 | rmstrm.py -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.REPL.enableREPLSmartSend": false 3 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 作者已经放弃网盘,此项目已归档 2 | 3 | ## 介绍 4 | ##### 基于[p115client](https://github.com/ChenyangGao/p115client)开发,通过生成115目录树来快速完成STRM文件创建,由于只有一次请求所以不会触发风控 5 | ##### 默认用户名密码都是admin 6 | 7 | ## 注意事项 8 | 1. 同一个账号同一时间只能有一个生成目录树的操作,请不要添加多个相同账号的cookie 9 | 1. 115网盘中的目录名不能包含媒体文件扩展名,否则会被识别为文件而不是目录 10 | > 比如战狼电影:Media/Movie/战狼.FLAC.MP4/战狼.FLAC.MP4,这个目录会被识别为两个MP4文件 11 | - Media/Movie/战狼.FLAC.MP4 12 | - Media/Movie/战狼.FLAC.MP4/战狼.FLAC.MP4 13 | > 这是由于115目录树不包含文件元数据,只能通过是否有媒体文件扩展名来确定到底是文件还是目录 14 | 1. 如果文件很多,建议添加多个同步目录,这样处理速度更快 15 | 1. 如果同一账号的多个目录都使用定时同步方式,那么执行时间需要错开,间隔5分钟为佳 16 | - 目录1每天0点0分执行:0 0 * * * 17 | - 目录2每天0点5分执行:5 0 * * * 18 | - 目录3每天0点10分执行:10 0 * * * 19 | 1. 监控变更依赖于CD2的会员功能,请确保使用CD2并且开通了会员 20 | 1. alist302方式要求emby/jellyfin + emby2alist配合,否则无法直接播放 21 | 1. 如果配置电报通知并且服务器在国内,需要配置代理,docke添加环境变量PROXY_HOST=http://ip:port 22 | 1. 如果需要编程触发任务执行,请调用:http://ip:port/api/job/{path},path参数指添加同步目录时的同步路径字段的值 23 | 24 | ## TODO 25 | - [x] STRM生成 26 | - [x] 元数据复制 27 | - [x] 支持源文件不存在时删除目标文件 28 | - [x] 支持CD2本地挂载,STRM内存放媒体文件的本地路径 29 | - [x] 支持WebDAV,STRM内存放WebDAV Url,可供播放器直接播放 30 | - [x] 支持Alist 302,STRM内存放Alist链接(http://ip:port/d/115/xxxxx.mkv) ,配合emby2alist插件,客户端可播放115真实链接节省服务器流量(v0.3.2版本) 31 | - [x] 元数据增加软链接处理方式 32 | - [x] docker支持 + 简单的web ui (v0.2版本) 33 | - [x] docker版本增加监控文件变更,自动生成STRM,CD2 only (v0.2版本) 34 | - [x] docker版本定时同步 (v0.2版本) 35 | - [x] docker版本支持添加多个同步目录,每个同步目录都可以单独设置类型(local,webdav),strm_ext, meta_ext,以及使用不同的115账号(v0.2版本) 36 | - [x] docker版本监控服务使用队列来进行精细化操作,减少对115目录树的生成请求(v0.3版本) 37 | - [x] 可执行文件采用交互式命令行来创建配置文件(v0.3.1版本) 38 | - [x] 支持其他网盘的STRM生成,但是需要本地挂载软件如CD或RClone支持(v0.3.4版本) 39 | - [x] Web UI支持简易HTTP AUTH (v0.4版本) 40 | - [x] 支持发送电报通知 (v0.4版本) 41 | 42 | 43 | ## 一、可执行文件运行: 44 | 1. 下载对应平台的压缩包,并解压 45 | 2. 打开终端切换到项目目录执行命令,比如解压到了D盘q115-strm目录: 46 | ```console 47 | cd D:\q115-strm 48 | // 查看同步目录列表 49 | q115strm.exe list 50 | // 添加115账号 51 | q115strm.exe add115 52 | // 添加同步目录 53 | q115strm.exe create 54 | // 执行全部同步任务 55 | q115strm.exe run 56 | // 执行单个同步任务 57 | q115strm.exe run -k=xxx 58 | ``` 59 | 60 | ## 二、DOCKER 61 | ```bash 62 | docker run -d \ 63 | --name q115strm \ 64 | -e "TZ=Asia/Shanghai" \ 65 | -v /vol1/1000/docker/q115strm/data:/app/data \ 66 | -v /vol1/1000/docker/clouddrive2/shared/115:/vol1/1000/docker/clouddrive2/shared/115:shared \ 67 | -v /vol1/1000/视频/网盘/115:/115 \ 68 | -p 12123:12123 \ 69 | --restart unless-stopped \ 70 | qicfan/115strm:latest 71 | ``` 72 | 73 | 或者compose 74 | 75 | ``` 76 | services: 77 | 115strm: 78 | image: qicfan/115strm 79 | container_name: q115strm 80 | environment: 81 | - TZ=Asia/Shanghai 82 | ports: 83 | - target: 12123 84 | published: 12123 85 | protocol: tcp 86 | volumes: 87 | - /vol1/1000/docker/q115strm/data:/app/data # 运行日志和数据 88 | - /vol1/1000/docker/clouddrive2/shared/115:/vol1/1000/docker/clouddrive2/shared/115:shared # CD2挂载115的的绝对路径,必须完整映射到容器中,如果使用WebDAV则不需要这个映射 89 | - /vol1/1000/视频/网盘/115:/115 # 存放STRM文件的根目录 90 | 91 | restart: unless-stopped 92 | ``` 93 | 94 | #### Docker 配置解释 95 | - `-v /vol1/1000/docker/q115strm/data:/app/data`: 该目录用来存放程序运行的日志和数据,建议映射,后续重装可以直接恢复数据 96 | - `-v /vol1/1000/docker/clouddrive2/shared/115:/vol1/1000/docker/clouddrive2/shared/115:shared`: CD2挂载115的的绝对路径,必须完整映射到容器中,如果使用WebDAV则不需要这个映射。 97 | - `-v /vol1/1000/视频/网盘/115:/115` 存放STRM文件的根目录,必须存在这个映射 98 | - `-p 12123:12123`: 映射12123端口,一个简易的web ui。 99 | - `--restart unless-stopped` 设置容器在退出时自动重启。 100 | - `-e "TZ=Asia/Shanghai"` 时区变量,可以根据所在地设置;会影响记录的任务执行时间,定时执行任务的时间 101 | - `-e "PROXY_HOST=http://192.168.1.1:10808"` 102 | 103 | ## 关键词解释: 104 | - 同步路径:115网盘中的目录,跟alist无关,请到115网盘app或者浏览器中查看实际的目录,多个目录用 / 分隔,比如:Media/电影/华语电影 105 | - AList根文件夹:Alist -> 管理 -> 存储 -> 115网盘 -> 编辑 -> 拉倒最下面找到根文件夹ID 106 | - 如果是0,则该字段留空 107 | - 如果不为0则输入该ID对应的文件夹路径,多个目录用 / 分隔,如:Media/电影 108 | - 115挂载路径:Alist -> 管理 -> 存储 -> 115网盘 -> 编辑 -> 挂载路径 109 | > 挂载路径是什么就填什么,去掉开头和结尾的/(不去也行,程序已经做了处理) 110 | - 元数据选项:如果网盘中存放了字幕文件、封面、nfo文件等,可以通过选择的操作来讲元数据同步到strm跟路径的对应文件夹内 111 | -------------------------------------------------------------------------------- /console.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import json 3 | import os 4 | import sys 5 | 6 | if not os.path.exists('./data/logs'): 7 | os.makedirs('./data/logs') 8 | if not os.path.exists('./data/config'): 9 | os.makedirs('./data/config') 10 | 11 | from job import StartJob 12 | from lib import OO5, Lib, Libs, OO5List 13 | from rich import print as rprint 14 | from rich.prompt import Prompt, Confirm, FloatPrompt 15 | from rich.console import Console 16 | from rich.table import Table 17 | 18 | LIBS = Libs() 19 | o5List = OO5List() 20 | 21 | def listLib(): 22 | libList = LIBS.list() 23 | if len(libList) == 0: 24 | rprint('[bold red]还没有添加任何同步目录[/]') 25 | return 26 | table = Table(title="同步目录列表") 27 | 28 | table.add_column("KEY", justify="left", style="cyan", no_wrap=True) 29 | table.add_column("网盘类型",) 30 | table.add_column("名称", style="magenta") 31 | table.add_column("目录树路径", justify="right", style="green") 32 | table.add_column("方式", justify="right", style="red") 33 | for lib in libList: 34 | table.add_row(lib.key, lib.cloud_type, lib.name, lib.path, lib.type) 35 | console = Console() 36 | console.print(table) 37 | 38 | def run(key: str | None = None): 39 | if key != None: 40 | StartJob(key, logStream=True) 41 | return 42 | # 循环执行所有目录 43 | libs = LIBS.list() 44 | for lib in libs: 45 | StartJob(lib.key, logStream=True) 46 | 47 | def add115(): 48 | # 添加115账号 49 | oo5 = {} 50 | oo5['cookie'] = Prompt.ask("[green]cookie[/] 请输入115的cookie,您可以通过其他途径获取") 51 | if oo5['cookie'] == '': 52 | rprint("[bold red]cookie必须输入/]") 53 | return 54 | oo5['name'] = Prompt.ask("[green]name[/] 请输入该cookie的名字,好记就行,如:账号1") 55 | if oo5['name'] == '': 56 | rprint("[bold red]名字必须输入/]") 57 | return 58 | rs, msg = o5List.add(oo5) 59 | if not rs: 60 | rprint("添加失败: [bold red]{0}[/]".format(msg)) 61 | return 62 | rprint("115账号{0}已添加".format(oo5['name'])) 63 | rprint('如果cookie失效,您可以在[bold]./data/config/115.json[/]文件中修改对应的cookie') 64 | return 65 | 66 | def create(): 67 | tmpFile = './.input' 68 | def saveTmp(): 69 | with open(tmpFile, mode='w', encoding='utf-8') as f: 70 | json.dump(lib.getJson(), f) 71 | pass 72 | def readTmp(): 73 | if not os.path.exists(tmpFile): 74 | return {} 75 | with open(tmpFile, mode='r', encoding='utf-8') as f: 76 | dict = json.load(f) 77 | return dict 78 | isWin = sys.platform.startswith('win') 79 | tmp = readTmp() 80 | if tmp.get('path') is not None: 81 | rprint("已经将上一次输入的值设置为每一项的默认值,[bold]如果没有改动可以直接回车[/],直到未完成的输入项") 82 | lib = Lib(tmp) 83 | # 选择网盘类型 84 | o5s: list[OO5] = o5List.getList() 85 | if len(o5s) == 0: 86 | rprint("[bold red]请先添加115账号,执行:q115strm.exe add115[/]") 87 | return 88 | # 生成选择项 89 | oo5Choices = [] 90 | oo5Default = o5s[0].name 91 | for o in o5s: 92 | if lib.id_of_115 != '' and lib.id_of_115 == o.key: 93 | oo5Default = o.name 94 | oo5Choices.append(o.name) 95 | oo5Name = Prompt.ask("[green]id_of_115[/] 请选择要使用的115账号", choices=oo5Choices, default=oo5Default) 96 | for o in o5s: 97 | if oo5Name == o.name: 98 | lib.id_of_115 = o.key 99 | lib.path = Prompt.ask("[green]path[/] 请输入要生成目录树的115路径,如:media/movie", default=lib.path) 100 | if lib.path == '': 101 | rprint("[bold red]路径必须输入[/]") 102 | return 103 | lib.path = lib.path.strip('/') 104 | saveTmp() 105 | lib.name = Prompt.ask("[green]name[/] 请输入该路径的名称,如:电影", default=lib.name if lib.name != '' else "默认目录") 106 | saveTmp() 107 | strm_root_path_example = '/115' 108 | if isWin: 109 | strm_root_path_example = 'F:\\115' 110 | lib.strm_root_path = Prompt.ask("[green]strm_root_path[/] 请输入存放STRM文件的根目录,如:%s" % strm_root_path_example, default=lib.strm_root_path) 111 | if lib.strm_root_path == '': 112 | rprint("[bold red]STRM文件的根目录必须输入/]") 113 | return 114 | lib.strm_root_path = lib.strm_root_path.rstrip(os.sep) 115 | if not os.path.exists(lib.strm_root_path): 116 | mk_strm_root_path = Confirm.ask("[bold red]{0}不存在[/],是否创建该目录?".format(lib.strm_root_path), default=True) 117 | if mk_strm_root_path: 118 | os.makedirs(lib.strm_root_path) 119 | else: 120 | rprint("[bold red]请输入正确的strm根目录[/]") 121 | return 122 | saveTmp() 123 | lib.type = Prompt.ask("[green]type[/] 请选择STRM类型", choices=["本地路径", "WebDAV", "alist302"], default=lib.type) 124 | saveTmp() 125 | lib.mount_path = Prompt.ask("[green]mount_path[/] 如果使用Alist请输入Alist创建存储时输入的根文件夹ID对应的路径", default=lib.mount_path) 126 | lib.mount_path = lib.mount_path.strip('/') 127 | saveTmp() 128 | if lib.type == '本地路径': 129 | lib.path_of_115 = Prompt.ask("[green]path_of_115[/] 请输入挂载115的目录,例如CD2的/CloudNAS/115", default=lib.path_of_115) 130 | if (lib.path_of_115 == ''): 131 | rprint("[bold red]115挂载目录必须输入[/]") 132 | return 133 | if not os.path.exists(lib.path_of_115): 134 | rprint("[bold red]{0}不存在,请检查CD2或其他挂载服务是否正常启动,挂载目录是否输入正确[/]".format(lib.path_of_115)) 135 | return 136 | lib.path_of_115 = lib.path_of_115.rstrip(os.sep) 137 | saveTmp() 138 | lib.copy_meta_file= Prompt.ask("[green]copy_meta_file[/] 是否复制元数据?", default=lib.copy_meta_file, choices=["关闭", "复制", "软连接"]) 139 | if lib.copy_meta_file == '复制': 140 | lib.copy_delay = FloatPrompt.ask("[green]copy_delay[/] 每个元数据复制的间隔秒数,支持两位小数如:0.01, 默认1秒?", default=float(lib.copy_delay)) 141 | saveTmp() 142 | if lib.type == 'WebDAV': 143 | lib.webdav_url = Prompt.ask("[green]webdav_url[/] 请输入webdav服务中的115挂载路径, 格式:http[s]//ip:port/[dav/115]", default=lib.webdav_url) 144 | if (lib.webdav_url == ''): 145 | rprint("[bold red]webdav服务的url必须输入[/]") 146 | return 147 | lib.webdav_url = lib.webdav_url.rstrip('/') 148 | saveTmp() 149 | lib.webdav_username = Prompt.ask("[green]webdav_username[/] 请输入webdav服务的登录用户名,只是用字母和数字不要包含特殊字符", default=lib.webdav_url) 150 | if (lib.webdav_username == ''): 151 | rprint("[bold red]webdav服务的登录用户名必须输入[/]") 152 | return 153 | saveTmp() 154 | lib.webdav_password = Prompt.ask("[green]webdav_password[/] 请输入webdav服务的登录密码,只是用字母和数字不要包含特殊字符", default=lib.webdav_url) 155 | if (lib.webdav_password == ''): 156 | rprint("[bold red]webdav服务的登录密码必须输入[/]") 157 | return 158 | saveTmp() 159 | if lib.type == 'alist302': 160 | lib.alist_server = Prompt.ask("[green]alist_server[/] 请输入alist地址, 格式:http[s]//ip:port", default=lib.alist_server) 161 | if (lib.alist_server == ''): 162 | rprint("[bold red]alist地址l必须输入[/]") 163 | return 164 | lib.alist_server = lib.alist_server.rstrip('/') 165 | saveTmp() 166 | lib.alist_115_path = Prompt.ask("[green]alist_115_path[/] 请输入alist存储中115的挂载路径", default=lib.alist_115_path) 167 | if (lib.alist_115_path == ''): 168 | rprint("[bold red]webdav服务的登录用户名必须输入[/]") 169 | return 170 | lib.alist_115_path = lib.alist_115_path.strip('/') 171 | saveTmp() 172 | strmExtStr = ';'.join(lib.strm_ext) 173 | newStrmExtStr = Prompt.ask("[green]strm_ext[/] 请输入要生成STRM的文件扩展名,分号分隔,可以直接复制默认值来修改", default=strmExtStr) 174 | strmExtList = newStrmExtStr.split(';') 175 | i = 0 176 | for ext in strmExtList: 177 | if not ext.startswith('.'): 178 | strmExtList[i] = ".{0}".format(ext).strip() 179 | i += 1 180 | lib.strm_ext = strmExtList 181 | saveTmp() 182 | if lib.copy_meta_file != '关闭': 183 | metaExtStr = ';'.join(lib.meta_ext) 184 | newMetaExtStr = Prompt.ask("[green]strm_ext[/] 请输入元数据的文件扩展名,分号分隔,可以直接复制默认值来修改", default=metaExtStr) 185 | metaExtList = newMetaExtStr.split(';') 186 | i = 0 187 | for ext in metaExtList: 188 | if not ext.startswith('.'): 189 | metaExtList[i] = ".{0}".format(ext).strip() 190 | i += 1 191 | lib.meta_ext = metaExtList 192 | saveTmp() 193 | lib.makeKey() 194 | rs, msg = LIBS.add(lib.getJson()) 195 | if not rs: 196 | rprint("添加失败:[bold red]{0}[/]".format(msg)) 197 | return 198 | rprint("已添加同步目录: %s" % lib.key) 199 | rprint("您也可以在 [bold]data/config/libs.json[/] 中手动修改需要的参数") 200 | if isWin: 201 | rprint("稍后可执行 .\\q115strm.exe run -k={0} 执行单个同步任务 或者 .\\q115strm.exe run 执行全部同步任务".format(lib.key)) 202 | else: 203 | rprint("稍后可执行 ./q115strm run -k={0} 执行单个同步任务 或者 ./q115strm run 执行全部同步任务".format(lib.key)) 204 | os.unlink('.input') 205 | 206 | 207 | if __name__ == '__main__': 208 | action: str | None = None 209 | key: str | None = None 210 | parser = argparse.ArgumentParser(prog='115-STRM', description='将挂载的115网盘目录生成STRM', formatter_class=argparse.RawTextHelpFormatter) 211 | parser.add_argument('action', help='要执行的操作\nlist 列出所有已添加的同步目录\nadd115 添加115账号的cookie \ncreate 添加同步目录\nrun 执行同步任务') 212 | parser.add_argument('-k', '--key', help='要处理的同步目录') 213 | args, unknown = parser.parse_known_args() 214 | if args.action != None: 215 | action = args.action 216 | if args.key != None: 217 | key = args.key 218 | if action == '': 219 | sys.exit(0) 220 | if action == 'list': 221 | listLib() 222 | if action == 'create': 223 | create() 224 | if action == 'run': 225 | run(key) 226 | if action == 'add115': 227 | add115() -------------------------------------------------------------------------------- /console.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | 4 | a = Analysis( 5 | ['console.py'], 6 | pathex=[], 7 | binaries=[], 8 | datas=[], 9 | hiddenimports=[], 10 | hookspath=[], 11 | hooksconfig={}, 12 | runtime_hooks=[], 13 | excludes=[], 14 | noarchive=False, 15 | optimize=0, 16 | ) 17 | pyz = PYZ(a.pure) 18 | 19 | exe = EXE( 20 | pyz, 21 | a.scripts, 22 | a.binaries, 23 | a.datas, 24 | [], 25 | name='q115strm', 26 | debug=False, 27 | bootloader_ignore_signals=False, 28 | strip=False, 29 | upx=True, 30 | upx_exclude=[], 31 | runtime_tmpdir=None, 32 | console=True, 33 | disable_windowed_traceback=False, 34 | argv_emulation=False, 35 | target_arch=None, 36 | codesign_identity=None, 37 | entitlements_file=None, 38 | ) -------------------------------------------------------------------------------- /cron.py: -------------------------------------------------------------------------------- 1 | 2 | import hashlib 3 | from multiprocessing import Process 4 | import os 5 | import time 6 | from crontab import CronTab 7 | 8 | from lib import TABFILE, Libs 9 | from log import getLogger 10 | 11 | cronSubProc: Process | None = None 12 | logger = getLogger(name='cron', rotating=True, stream=True) 13 | 14 | def get_file_md5(file_path): 15 | """ 16 | 获取文件md5值 17 | :param file_path: 文件路径名 18 | :return: 文件md5值 19 | """ 20 | with open(file_path, 'rb') as f: 21 | md5obj = hashlib.md5() 22 | md5obj.update(f.read()) 23 | _hash = md5obj.hexdigest() 24 | return str(_hash).upper() 25 | 26 | def startCronSub(): 27 | logger.info('启动Crontab守护进程') 28 | tab = CronTab(tabfile=TABFILE) 29 | try: 30 | for result in tab.run_scheduler(): 31 | logger.info("Return code: {0}".format(result.returncode)) 32 | logger.info("Standard Out: {0}".format(result.stdout)) 33 | logger.info("Standard Err: {0}".format(result.stderr)) 34 | except: 35 | pass 36 | 37 | def StartCron(): 38 | if not os.path.exists(TABFILE): 39 | with open(TABFILE, mode='w', encoding='utf-8') as f: 40 | f.write('') 41 | LIBS = Libs() 42 | # 启动定时任务服务 43 | LIBS.initCron() 44 | logger.info("启动定时任务监控进程") 45 | cronSubProc = Process(target=startCronSub) 46 | cronSubProc.start() 47 | md5 = get_file_md5(TABFILE) 48 | logger.info("记录cron文件的指纹:{0}".format(md5)) 49 | while(True): 50 | newmd5 = get_file_md5(TABFILE) 51 | if md5 != newmd5: 52 | logger.info("cron文件有变化,重新加载定时任务{0} : {1}".format(md5, newmd5)) 53 | # 有变化,重启进程 54 | cronSubProc.terminate() 55 | cronSubProc = Process(target=startCronSub) 56 | cronSubProc.start() 57 | md5 = newmd5 58 | # else: 59 | # print("cron文件没有变化,等待10秒重试") 60 | try: 61 | # logger.info('已启动所有定时任务,开始10s一次检测任务执行状态') 62 | time.sleep(10) 63 | except: 64 | break 65 | 66 | if __name__ == '__main__': 67 | StartCron() 68 | -------------------------------------------------------------------------------- /dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim 2 | EXPOSE 12123 3 | WORKDIR /app 4 | 5 | ENV PATH=/app:$PATH 6 | ENV TZ="Asia/Shanghai" 7 | 8 | # RUN cp /etc/apt/sources.list.d/debian.sources /etc/apt/sources.list.d/debian.sources.bak \ 9 | # && sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list.d/debian.sources 10 | RUN apt update && apt install -y git cron 11 | 12 | COPY . . 13 | RUN chmod -R 0755 /app/frontend/* 14 | 15 | RUN pip install -r requirements.txt 16 | 17 | VOLUME ["/app/data"] 18 | 19 | ENTRYPOINT ["python", "main.py"] -------------------------------------------------------------------------------- /frontend/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 10 | 11 | Q115-STRM 12 | 13 | 14 | 15 | 16 |
17 | 18 | 19 | -------------------------------------------------------------------------------- /job.py: -------------------------------------------------------------------------------- 1 | 2 | import argparse 3 | import shutil 4 | import signal 5 | import textwrap 6 | import time 7 | import urllib 8 | import urllib.parse 9 | import psutil 10 | 11 | from p115client import P115Client, tool 12 | import telegramify_markdown 13 | from lib import OO5, GetNow, Lib, Libs, OO5List, Setting, TGBot 14 | import os, logging, sys 15 | from telegramify_markdown import customize 16 | 17 | from log import getLogger 18 | from telebot import apihelper 19 | proxyHost = os.getenv('PROXY_HOST', '') 20 | if proxyHost != '': 21 | apihelper.proxy = {'http': proxyHost, 'https': proxyHost} 22 | LIBS = Libs() 23 | o5List = OO5List() 24 | 25 | 26 | class Job: 27 | key: str 28 | lib: Lib 29 | oo5Account: OO5 30 | logger: logging 31 | 32 | copyList: list[str] 33 | 34 | def __init__(self, key: str = None, logStream: bool = False): 35 | if key is None: 36 | return 37 | self.key = key 38 | self.lib = LIBS.getLib(key) 39 | if self.lib is None: 40 | raise ValueError('要执行的同步目录不存在,请刷新同步目录列表检查是否存在') 41 | self.logger = getLogger(name = self.lib.key, clear=True, stream=logStream) 42 | if self.lib.cloud_type == '115': 43 | self.oo5Account = o5List.get(self.lib.id_of_115) 44 | if self.oo5Account is None: 45 | self.logger.error('无法找到所选的115账号,请检查115账号列表中是否存在此项: %s' % self.lib.id_of_115) 46 | raise ValueError('无法找到所选的115账号,请检查115账号列表中是否存在此项') 47 | try: 48 | if self.lib.extra.pid > 0 and psutil.pid_exists(self.lib.extra.pid): 49 | self.logger.error('正在同步中,跳过本次执行') 50 | raise ValueError('正在同步中,跳过本次执行') 51 | except: 52 | pass 53 | 54 | def notify(self, msg): 55 | settings = Setting() 56 | if settings.telegram_bot_token == '' or settings.telegram_user_id == '': 57 | return 58 | bot = TGBot() 59 | markdown_text = textwrap.dedent(msg) 60 | can_be_sent = telegramify_markdown.markdownify(markdown_text) 61 | rs, msg = bot.sendMsg(can_be_sent) 62 | if rs: 63 | lm = "成功发送通知" 64 | if msg != "": 65 | lm = "无法发送通知:{0}".format(msg) 66 | self.logger.info(lm) 67 | else: 68 | self.logger.warning("无法发送通知: {0}".format(msg)) 69 | 70 | def start(self): 71 | # 记录开始时间 72 | # 记录进程号 73 | self.lib.extra.last_sync_at = GetNow() 74 | self.lib.extra.pid = os.getpid() 75 | self.lib.extra.status = 2 76 | # 保存 77 | LIBS.saveExtra(self.lib) 78 | self.notify("*{0}* 开始同步".format(self.lib.name)) 79 | self.lib = LIBS.getLib(self.key) 80 | try: 81 | self.work() 82 | self.lib.extra.status = 1 83 | customize.strict_markdown = False 84 | tgmesage = """ 85 | *{0}* 已完成同步 86 | 87 | - *STRM文件*: 本次找到 {1} 个, 生成 {2} 个 88 | - *元数据*: 本次找到 {3} 个, 成功 {4} 个 89 | - *删除文件*: 本次找到 {5} 个,成功 {6} 个 90 | """ 91 | 92 | tgmessage = tgmesage.format(self.lib.name, self.lib.extra.last_sync_result['strm'][1], self.lib.extra.last_sync_result['strm'][0], self.lib.extra.last_sync_result['meta'][0], self.lib.extra.last_sync_result['meta'][0], self.lib.extra.last_sync_result['delete'][0], self.lib.extra.last_sync_result['delete'][0]) 93 | self.notify(tgmessage) 94 | except Exception as e: 95 | self.logger.error('%s' % e) 96 | self.lib.extra.status = 3 97 | self.notify("*{0}* 同步发生错误: {1}".format(self.lib.name, e)) 98 | self.lib.extra.pid = 0 99 | LIBS.saveExtra(self.lib) 100 | return True 101 | 102 | def stop(self, sig, frame): 103 | self.lib.extra.status = 3 104 | self.lib.extra.pid = 0 105 | LIBS.saveExtra(self.lib) 106 | self.logger.info("*{0}* 中断同步".format(self.lib.name)) 107 | self.notify("*{0}* 中断同步".format(self.lib.name)) 108 | sys.exit(1) 109 | 110 | def parseTree(self, src_tree_list: list, dest_tree_list: list) -> tuple[list, list, list]: 111 | copy_list = [] 112 | added = [] 113 | for src_item in src_tree_list: 114 | if src_item in dest_tree_list: 115 | # 已存在,从dest中删除 116 | dest_tree_list.remove(src_item) 117 | continue 118 | filename, ext = os.path.splitext(src_item) 119 | if ext.lower() in self.lib.strm_ext: 120 | strm_file = filename + '.strm' 121 | if strm_file in dest_tree_list: 122 | # 如果strm文件已存在,则从dest中删除 123 | dest_tree_list.remove(strm_file) 124 | continue 125 | else: 126 | added.append(src_item) 127 | continue 128 | if ext.lower() in self.lib.meta_ext: 129 | # 如果是元数据,则加入复制列表 130 | copy_list.append(src_item) 131 | return dest_tree_list, added, copy_list 132 | 133 | def doAdded(self, added): 134 | c = 0 135 | at = len(added) 136 | asuc = 0 137 | af = 0 138 | for item in added: 139 | c += 1 140 | rs = self.strm(item) 141 | if rs == '': 142 | # 成功 143 | asuc += 1 144 | self.logger.info('[%d / %d] STRM:%s' % (c, at, item)) 145 | else: 146 | af += 1 147 | self.logger.error('[%d / %d] 错误:%s \n %s' % (c, at, item, rs)) 148 | self.lib.extra.last_sync_result['strm'] = [asuc, at] 149 | return True 150 | 151 | def doDelete(self, dest_tree_list): 152 | c = 0 153 | dt = len(dest_tree_list) 154 | ds = 0 155 | df = 0 156 | for delete_item in dest_tree_list: 157 | c += 1 158 | delete_real_file = os.path.join(self.lib.strm_root_path, delete_item) 159 | if not os.path.exists(delete_real_file): 160 | self.logger.error('[%d / %d] %s \n %s' % (c, dt, delete_item, '文件已经删除')) 161 | ds += 1 162 | continue 163 | # 处理文件,只删除strm文件 164 | _, deleted_ext = os.path.splitext(delete_item) 165 | if deleted_ext == '.strm': 166 | try: 167 | os.unlink(delete_real_file) 168 | self.logger.info('[%d / %d] 删除网盘不存在的文件:%s' % (c, dt, delete_item)) 169 | ds += 1 170 | except OSError as e: 171 | self.logger.error('[%d / %d] 错误:%s \n %s' % (c, dt, delete_item, e)) 172 | df += 1 173 | continue 174 | if self.lib.delete == "是": 175 | if os.path.isdir(delete_real_file): 176 | # 文件夹直接删除 177 | shutil.rmtree(delete_real_file) 178 | self.logger.info('[%d / %d] 删除网盘不存在的文件夹:%s' % (c, dt, delete_real_file)) 179 | ds += 1 180 | continue 181 | else: 182 | # 删除文件 183 | os.unlink(delete_real_file) 184 | self.logger.info('[%d / %d] 删除网盘不存在的文件:%s' % (c, dt, delete_item)) 185 | ds += 1 186 | self.lib.extra.last_sync_result['delete'] = [ds, dt] 187 | 188 | def doMeta(self, copy_list: list): 189 | if self.lib.type == 'WebDAV': 190 | self.logger.info('webdav不处理元数据') 191 | return 192 | if self.lib.copy_meta_file == '关闭': 193 | return 194 | c = 0 195 | ct = len(copy_list) 196 | cs = 0 197 | cf = 0 198 | for item in copy_list: 199 | c += 1 200 | src_file = "" 201 | if self.lib.cloud_type == '115': 202 | src_file = os.path.join(self.lib.path_of_115, item) 203 | else: 204 | src_file = os.path.join(self.lib.path, item) 205 | dest_file = os.path.join(self.lib.strm_root_path, item) 206 | dirname = os.path.dirname(dest_file) 207 | if not os.path.exists(dirname): 208 | os.makedirs(dirname) 209 | if not os.path.exists(src_file): 210 | cf += 1 211 | self.logger.error('[%d / %d] 元数据 - 源文件不存在:%s' % (c, ct, src_file)) 212 | continue 213 | try: 214 | if self.lib.copy_meta_file == '复制': 215 | self.logger.info('[%d / %d] 元数据 - 复制:%s => %s' % (c, ct, src_file, dest_file)) 216 | if not os.path.exists(dest_file): 217 | shutil.copy(src_file, dest_file) 218 | time.sleep(self.lib.copy_delay) 219 | if self.lib.copy_meta_file == '软链接': 220 | self.logger.info('[%d / %d] 元数据 - 软链:%s' % (c, ct, item)) 221 | if not os.path.exists(dest_file): 222 | os.symlink(src_file, dest_file) 223 | cs += 1 224 | except OSError as e: 225 | self.logger.error('[%d / %d] 元数据 - 复制错误:%s \n %s' % (c, ct, item, e)) 226 | cf += 1 227 | self.lib.extra.last_sync_result['meta'] = [cs, ct] 228 | 229 | def work(self): 230 | src_tree_list = [] 231 | dest_tree_list = [] 232 | if self.lib.cloud_type == '115': 233 | src_tree_list = self.get_src_tree_list() 234 | strm_base_dir = os.path.join(self.lib.strm_root_path, self.lib.path.replace('/', os.sep)) 235 | dest_tree_list = self.get_dest_tree_list(self.lib.strm_root_path, strm_base_dir, [self.lib.path.replace('/', os.sep)]) 236 | else: 237 | src_tree_list = self.get_dest_tree_list(self.lib.path, self.lib.path, []) 238 | dest_tree_list = self.get_dest_tree_list(self.lib.strm_root_path, self.lib.strm_root_path, []) 239 | dest_tree_list, added, copy_list = self.parseTree(src_tree_list, dest_tree_list) 240 | # # added是要添加的, dest_tree_list剩下的是要删除的, copy_list是要复制的元数据 241 | # # 处理删除 242 | self.doDelete(dest_tree_list) 243 | # # 处理添加 244 | self.doAdded(added) 245 | # # 处理元数据 246 | self.doMeta(copy_list) 247 | self.logger.info('删除结果:成功: {0}, 总共: {1}'.format(self.lib.extra.last_sync_result['delete'][0], self.lib.extra.last_sync_result['delete'][1])) 248 | self.logger.info('元数据结果:成功: {0}, 总共: {1}'.format(self.lib.extra.last_sync_result['meta'][0], self.lib.extra.last_sync_result['meta'][1])) 249 | self.logger.info('STRM结果:成功: {0}, 总共: {1}'.format(self.lib.extra.last_sync_result['strm'][0], self.lib.extra.last_sync_result['strm'][1])) 250 | 251 | def get_src_tree_list(self): 252 | ### 解析115目录树,生成目录数组 253 | try: 254 | client = P115Client(self.oo5Account.cookie) 255 | it = tool.export_dir_parse_iter(client=client, export_file_ids=self.lib.path, target_pid=self.lib.path, parse_iter=tool.parse_export_dir_as_dict_iter, 256 | delete=True, async_=False, show_clock=True) 257 | i = 0 258 | path_index = {} 259 | src_tree_list = [] 260 | for item in it: 261 | i += 1 262 | parent = path_index.get(item['parent_key']) 263 | if parent is None: 264 | item['path'] = '' 265 | else: 266 | if i == 2 and self.lib.path.endswith(item['name']): 267 | item['path'] = self.lib.path 268 | else: 269 | item['path'] = "{0}/{1}".format(parent['path'], item['name']) 270 | path_index[item['key']] = item 271 | if item['path'] != '': 272 | src_tree_list.append(item['path'].replace('/', os.sep)) 273 | return src_tree_list 274 | except Exception as e: 275 | self.logger.error('生成目录树出错: %s' % e) 276 | raise e 277 | 278 | def get_dest_tree_list(self, base_dir: str, root_dir: str, dest_tree_list: list): 279 | ### 获取目标路径目录树,用于处理差异 280 | if not os.path.exists(root_dir): 281 | return dest_tree_list 282 | dirs = os.listdir(root_dir) 283 | for dir in dirs: 284 | item = os.path.join(root_dir, dir) 285 | dest_tree_list.append(item.lstrip(base_dir + os.sep)) 286 | if os.path.isfile(item): 287 | # 如果是文件,则不用递归 288 | continue 289 | self.get_dest_tree_list(base_dir, item, dest_tree_list) 290 | return dest_tree_list 291 | 292 | def strm(self, path: str): 293 | try: 294 | path = path.replace('/', os.sep) 295 | dirname = os.path.dirname(path) 296 | real_dirname = os.path.join(self.lib.strm_root_path, dirname) 297 | if not os.path.exists(real_dirname): 298 | os.makedirs(real_dirname) 299 | filename, ext = os.path.splitext(path) 300 | # 生成STRM文件 301 | strm_file = filename + '.strm' 302 | strm_real_file = os.path.join(self.lib.strm_root_path, strm_file) 303 | if os.path.exists(strm_real_file): 304 | # 如果已存在,则不处理 305 | return "" 306 | strm_content = '' 307 | if self.lib.type == '本地路径': 308 | if self.lib.cloud_type == '115': 309 | strm_content = os.path.join(self.lib.path_of_115, path) 310 | else: 311 | strm_content = os.path.join(self.lib.path, path) 312 | else: 313 | path = path.replace(os.sep, '/') 314 | if self.lib.mount_path != '': 315 | path = path.lstrip(self.lib.mount_path) 316 | print("path replace mount: {0}".format(path)) 317 | if path.startswith('/'): 318 | path.lstrip('/') 319 | pathList = path.split('/') 320 | newPath = [] 321 | for p in pathList: 322 | newPath.append(urllib.parse.quote(p)) 323 | if self.lib.type == 'WebDAV': 324 | url = self.lib.webdav_url 325 | if not url.startswith('http'): 326 | url = "http://{0}".format(url) 327 | url = self.lib.webdav_url.replace('//', '//{0}:{1}@'.format(self.lib.webdav_username, self.lib.webdav_password)) 328 | if url.endswith('/'): 329 | url = url.rstrip('/') 330 | strm_content = '{0}/{1}'.format(url, '/'.join(newPath)) 331 | else: 332 | url = self.lib.alist_server 333 | if url.endswith('/'): 334 | url = url.rstrip('/') 335 | alist_115_path = self.lib.alist_115_path.strip('/') 336 | strm_content = '{0}/d/{1}/{2}'.format(url, alist_115_path, '/'.join(newPath)) 337 | with open(strm_real_file, 'w', encoding='utf-8') as f: 338 | f.write(strm_content) 339 | return "" 340 | except Exception as e: 341 | return e 342 | 343 | def StartJob(key: str, logStream: bool = False): 344 | job = Job(key, logStream) 345 | signal.signal(signal.SIGINT, job.stop) 346 | signal.signal(signal.SIGTERM, job.stop) 347 | job.start() 348 | 349 | if __name__ == '__main__': 350 | key: str = '' 351 | parser = argparse.ArgumentParser(prog='115-STRM', description='将挂载的115网盘目录生成STRM', formatter_class=argparse.RawTextHelpFormatter) 352 | parser.add_argument('-k', '--key', help='要处理的同步目录') 353 | args, unknown = parser.parse_known_args() 354 | if args.key != None: 355 | key = args.key 356 | if key == '': 357 | sys.exit(0) 358 | StartJob(key, True) 359 | -------------------------------------------------------------------------------- /lib.py: -------------------------------------------------------------------------------- 1 | import datetime, pytz 2 | import json 3 | import hashlib, os 4 | from typing import List, Mapping 5 | from crontab import CronTab 6 | import telebot 7 | from telebot import apihelper 8 | from telebot import apihelper 9 | proxyHost = os.getenv('PROXY_HOST', '') 10 | if proxyHost != '': 11 | apihelper.proxy = {'http': proxyHost, 'https': proxyHost} 12 | TABFILE = './data/config/cron.tab' 13 | 14 | 15 | 16 | def GetNow(): 17 | # 获取当前时间 18 | now = datetime.datetime.now() 19 | # 创建一个表示北京时区的对象 20 | beijing = pytz.timezone('Asia/Shanghai') 21 | # 将当前时间转换为北京时区 22 | now_beijing = now.astimezone(beijing) 23 | return now_beijing.strftime("%Y-%m-%d %H:%M:%S") 24 | 25 | 26 | class LibExtra: 27 | pid: str # 正在运行的进程ID 28 | status: int # 运行状态: 1-正常,2-运行中,3-中断 29 | last_sync_at: str # 最后运行时间 30 | last_sync_result: Mapping[str, List[int]] 31 | 32 | def __init__(self, pid: int = 0, status: int = 1, last_sync_at: str = '', last_sync_result: Mapping[str, List[int]] = {'strm': [0,0], 'meta': [0,0],'delete': [0,0]}): 33 | self.pid = pid 34 | self.status = status 35 | self.last_sync_at = last_sync_at 36 | self.last_sync_result = last_sync_result 37 | 38 | def getJson(self): 39 | dict = self.__dict__ 40 | return dict 41 | 42 | class LibBase: 43 | key: str # 标识 44 | cloud_type: str # 网盘类型,分为:115, other 45 | name: str # 名称 46 | path: str # 路径 47 | type: str # strm类型,'本地路径' | 'WebDAV' | 'alist302' 48 | strm_root_path: str # strm根目录 49 | mount_path: str # alist挂载根文件夹,cd2留空 50 | alist_server: str # alist服务器地址,格式:http[s]://ip:port 51 | alist_115_path: str # alist中115路径,一般都是:115 52 | path_of_115: str # 115挂载根目录 53 | copy_meta_file: int # 元数据选项:1-关闭,2-复制,3-软链接 54 | copy_delay: int | float # 元数据复制间隔 55 | webdav_url: str # webdav服务器链接 56 | webdav_username: str # webdav服务器用户名 57 | webdav_password: str # webdav服务器密码 58 | sync_type: str # 同步类型,'定时' | '监控变更' 59 | cron_str: str # 定时同步规则 60 | id_of_115: str # 115账号标识 61 | strm_ext: list[str] # strm扩展名 62 | meta_ext: list[str] # 元数据扩展名 63 | delete: str # 是否删除网盘不存在的本地文件:是 | 否 64 | 65 | def __init__(self, data: None | dict): 66 | if data is None: 67 | return 68 | self.key = data.get('key') if data.get('key') is not None else '' 69 | self.cloud_type = data.get('cloud_type') if data.get('cloud_type') is not None else '115' # 默认115 70 | self.name = data.get('name') if data.get('name') is not None else '' 71 | self.path = data.get('path') if data.get('path') is not None else '' 72 | self.type = data.get('type') if data.get('type') is not None else '本地路径' 73 | self.strm_root_path = data.get('strm_root_path') if data.get('strm_root_path') is not None else '' 74 | self.mount_path = data.get('mount_path') if data.get('mount_path') is not None else '' 75 | self.alist_server = data.get('alist_server') if data.get('alist_server') is not None else '' 76 | self.alist_115_path = data.get('alist_115_path') if data.get('alist_115_path') is not None else '' 77 | self.path_of_115 = data.get('path_of_115') if data.get('path_of_115') is not None else '' 78 | self.copy_meta_file = data.get('copy_meta_file') if data.get('copy_meta_file') is not None else '关闭' 79 | self.copy_delay = float(data.get('copy_delay')) if data.get('copy_delay') is not None else 1 80 | self.webdav_url = data.get('webdav_url') if data.get('webdav_url') is not None else '' 81 | self.webdav_username = data.get('webdav_username') if data.get('webdav_username') is not None else '' 82 | self.webdav_password = data.get('webdav_password') if data.get('webdav_password') is not None else '' 83 | self.sync_type = data.get('sync_type') if data.get('sync_type') is not None else '手动' 84 | self.cron_str = data.get('cron_str') if data.get('cron_str') is not None else '' 85 | self.id_of_115 = data.get('id_of_115') if data.get('id_of_115') is not None else '' 86 | self.strm_ext = data.get('strm_ext') if data.get('strm_ext') is not None else [ 87 | '.mkv', 88 | '.mp4', 89 | '.ts', 90 | '.avi', 91 | '.mov', 92 | '.mpeg', 93 | '.mpg', 94 | '.wmv', 95 | '.3gp', 96 | '.m4v', 97 | '.flv', 98 | '.m2ts', 99 | '.f4v', 100 | '.tp', 101 | ] 102 | self.meta_ext = data.get('meta_ext') if data.get('meta_ext') is not None else [ 103 | '.jpg', 104 | '.jpeg', 105 | '.png', 106 | '.webp', 107 | '.nfo', 108 | '.srt', 109 | '.ass', 110 | '.svg', 111 | '.sup', 112 | '.lrc', 113 | ] 114 | self.delete = data.get('delete') if data.get('delete') is not None else '否' 115 | newStrmExt = [] 116 | for ext in self.strm_ext: 117 | newStrmExt.append(ext.lower()) 118 | newMetaExt = [] 119 | for ext in self.meta_ext: 120 | newMetaExt.append(ext.lower()) 121 | self.strm_ext = newStrmExt 122 | self.meta_ext = newMetaExt 123 | if self.key == '': 124 | self.makeKey() 125 | 126 | def makeKey(self): 127 | m = hashlib.md5() 128 | m.update(self.path.encode(encoding='UTF-8')) 129 | self.key = m.hexdigest() 130 | 131 | 132 | class Lib(LibBase): 133 | extra: LibExtra 134 | 135 | def __init__(self, data: None | dict): 136 | super().__init__(data) 137 | hasExtra = False 138 | if data is not None: 139 | extra = data.get('extra') 140 | if extra is not None: 141 | hasExtra = True 142 | self.extra = LibExtra( 143 | pid=data['extra']['pid'], 144 | status=data['extra']['status'], 145 | last_sync_at=data['extra']['last_sync_at'], 146 | last_sync_result=data['extra']['last_sync_result'] 147 | ) 148 | if hasExtra == False: 149 | self.extra = LibExtra() 150 | 151 | 152 | def validate(self) -> tuple[bool, str]: 153 | # 验证路径是否存在 154 | # 验证STRM根目录是否存在 155 | if not os.path.exists(self.strm_root_path): 156 | return False, 'STRM根目录不存在,请检查文件系统中是否存在该目录:%s' % self.strm_root_path 157 | # 验证115挂载根目录是否存在 158 | if self.path_of_115 != '' and not os.path.exists(self.path_of_115): 159 | return False, '115挂载根目录不存在,请检查文件系统中是否存在该目录:%s' % self.path_of_115 160 | if self.cloud_type == 'other' and not os.path.exists(self.path): 161 | return False, '同步路径不存在,请检查文件系统中是否存在该目录:%s' % self.path 162 | return True, '' 163 | 164 | def cron(self): 165 | # 处理定时任务 166 | cron = CronTab(tabfile=TABFILE) 167 | iter = cron.find_comment(self.key) 168 | existsJob = None 169 | for i in iter: 170 | existsJob = i 171 | if self.sync_type == '定时': 172 | if existsJob is not None: 173 | cron.remove(existsJob) 174 | cron.write(filename=TABFILE) 175 | jobFile = os.path.abspath('./job.py') 176 | job = cron.new(comment="%s" % self.key, command="python3 %s -k %s" % (jobFile, self.key)) 177 | job.setall(self.cron_str) 178 | cron.write(filename=TABFILE) 179 | else: 180 | if existsJob is not None: 181 | # 删除定时任务 182 | cron.remove(existsJob) 183 | cron.write(filename=TABFILE) 184 | return True 185 | 186 | def getJson(self): 187 | dict = self.__dict__ 188 | if isinstance(self.extra, LibExtra): 189 | dict['extra'] = self.extra.getJson() 190 | else: 191 | dict['extra'] = self.extra 192 | return dict 193 | 194 | 195 | def jsonHook(obj): 196 | return obj.getJson() 197 | 198 | class Libs: 199 | libs_file: str = os.path.abspath("./data/config/libs.json") 200 | libList: Mapping[str, List[Lib]] # 同步目录列表 201 | 202 | def __init__(self): 203 | self.libList = {} 204 | self.loadFromFile() 205 | 206 | def loadFromFile(self): 207 | libs = {} 208 | if os.path.exists(self.libs_file): 209 | with open(self.libs_file, mode='r', encoding='utf-8') as fd_libs: 210 | jsonLibs = json.load(fd_libs) 211 | for k in jsonLibs: 212 | libs[k] = Lib(jsonLibs[k]) 213 | self.libList = libs 214 | return True 215 | 216 | def list(self) -> List[Lib]: 217 | self.loadFromFile() 218 | l: list[Lib] = [] 219 | for key in self.libList: 220 | l.append(self.libList.get(key)) 221 | return l 222 | 223 | def save(self) -> bool: 224 | with open(self.libs_file, mode='w', encoding='utf-8') as fd_libs: 225 | json.dump(self.libList, fd_libs, default=jsonHook) 226 | return True 227 | 228 | def getLib(self, key: str) -> Lib | None: 229 | self.loadFromFile() 230 | return self.libList.get(key) 231 | 232 | def getByPath(self, path: str) -> Lib | None: 233 | self.loadFromFile() 234 | for key in self.libList: 235 | item = self.libList.get(key) 236 | if item.path == path: 237 | return item 238 | return None 239 | 240 | def add(self, data: dict) -> tuple[bool, str]: 241 | for k, v in self.libList.items(): 242 | if v.path == data['path']: 243 | return False, '同步目录已存在' 244 | if v.name == data['name']: 245 | return False, '同步目录名称已存在' 246 | data['extra'] = { 247 | 'pid': 0, 248 | 'status': 1, 249 | 'last_sync_at': '', 250 | 'last_sync_result': { 251 | 'strm': [0, 0], 252 | 'meta': [0, 0], 253 | 'delete': [0, 0] 254 | } 255 | } 256 | lib = Lib(data) 257 | rs, msg = lib.validate() 258 | if rs is False: 259 | return rs, msg 260 | self.libList[lib.key] = lib 261 | self.save() 262 | lib.cron() 263 | return True, '' 264 | 265 | def updateLib(self, key: str, data: dict) -> tuple[bool, str]: 266 | lib = self.getLib(key) 267 | if lib is None: 268 | return False, '同步目录不存在' 269 | del data['extra'] 270 | for k in data: 271 | lib.__setattr__(k, data[k]) 272 | self.libList[key] = lib 273 | self.save() 274 | lib.cron() 275 | return True, '' 276 | 277 | def saveExtra(self, lib: Lib): 278 | self.libList[lib.key] = lib 279 | self.save() 280 | pass 281 | 282 | def deleteLib(self, key: str) -> tuple[bool, str]: 283 | lib = self.getLib(key) 284 | lib.sync_type = '手动' 285 | lib.cron() 286 | del self.libList[key] 287 | self.save() 288 | return True, '' 289 | 290 | def initCron(self): 291 | # 每次启动服务时,检查定时任务是否存在,不存在的创建 292 | libs = self.list() 293 | for item in libs: 294 | item.cron() 295 | return True 296 | 297 | 298 | class OO5: 299 | key: str 300 | name: str 301 | cookie: str 302 | status: int 303 | created_at: str 304 | updated_at: str 305 | 306 | def __init__(self, data: dict): 307 | self.name = data['name'] 308 | self.cookie = data['cookie'] 309 | self.status = data['status'] 310 | self.created_at = data['created_at'] 311 | self.updated_at = data['updated_at'] 312 | self.key = data['key'] 313 | 314 | def getJson(self): 315 | dict = self.__dict__ 316 | return dict 317 | 318 | 319 | class OO5List: 320 | oo5_files = os.path.abspath("./data/config/115.json") 321 | list: Mapping[str, OO5] # 115账号列表 322 | 323 | def __init__(self): 324 | self.list = {} 325 | self.loadFromFile() 326 | 327 | def loadFromFile(self): 328 | list = {} 329 | if os.path.exists(self.oo5_files): 330 | with open(self.oo5_files, mode='r', encoding='utf-8') as fd_oo5: 331 | jsonList = json.load(fd_oo5) 332 | for k in jsonList: 333 | list[k] = OO5(jsonList[k]) 334 | self.list = list 335 | return True 336 | 337 | def save(self) -> bool: 338 | with open(self.oo5_files, mode='w', encoding='utf-8') as o: 339 | json.dump(self.list, o, default=jsonHook) 340 | return True 341 | 342 | def get(self, key: str) -> OO5 | None: 343 | self.loadFromFile() 344 | return self.list.get(key) 345 | 346 | def getByCookie(self, cookies: str) -> OO5 | None: 347 | self.loadFromFile() 348 | 349 | def getList(self) -> List[OO5]: 350 | self.loadFromFile() 351 | l: list[OO5] = [] 352 | for key in self.list: 353 | l.append(self.list.get(key)) 354 | return l 355 | 356 | def add(self, data: dict) -> tuple[bool, str]: 357 | self.loadFromFile() 358 | for key in self.list: 359 | if self.list[key].name == data['name'] or self.list[key].cookie == data['cookie']: 360 | return False, '名称或者cookie已存在' 361 | data['created_at'] = GetNow() 362 | data['updated_at'] = '' 363 | data['status'] = 0 364 | m = hashlib.md5() 365 | m.update(data['name'].encode(encoding='UTF-8')) 366 | data['key'] = m.hexdigest() 367 | oo5 = OO5(data) 368 | self.list[oo5.key] = oo5 369 | self.save() 370 | return True, '' 371 | 372 | def updateOO5(self, key: str, data: dict): 373 | self.loadFromFile() 374 | oo5 = self.get(key) 375 | if oo5 is None: 376 | return False, '115账号不存在' 377 | oo5.name = data['name'] 378 | oo5.cookie = data['cookie'] 379 | oo5.updated_at = GetNow() 380 | self.list[key] = oo5 381 | self.save() 382 | return True, '' 383 | 384 | def delOO5(self, key: str): 385 | self.loadFromFile() 386 | oo5 = self.get(key) 387 | if oo5 is None: 388 | return True, '' 389 | # 检查是否有在使用 390 | libs = Libs() 391 | libList = libs.list() 392 | for item in libList: 393 | if item.id_of_115 == key: 394 | return False, '该账号使用中' 395 | del self.list[key] 396 | self.save() 397 | return True, '' 398 | 399 | class Setting: 400 | username: str = "admin" 401 | password: str = "admin" 402 | telegram_bot_token: str = "" 403 | telegram_user_id: str = "" 404 | 405 | def __init__(self): 406 | self.loadFromFile() 407 | 408 | def loadFromFile(self): 409 | if not os.path.exists("./data/config/setting.json"): 410 | return False 411 | try: 412 | with open("./data/config/setting.json", mode='r', encoding='utf-8') as fd: 413 | jsonSetting: dict = json.load(fd) 414 | self.username = jsonSetting.get("username") 415 | self.password = jsonSetting.get("password") 416 | self.telegram_bot_token = jsonSetting.get("telegram_bot_token") 417 | self.telegram_user_id = jsonSetting.get("telegram_user_id") 418 | except: 419 | return False 420 | return True 421 | 422 | def save(self) -> tuple[bool, str]: 423 | try: 424 | with open('./data/config/setting.json', mode='w', encoding='utf-8') as f: 425 | json.dump(self.__dict__, f) 426 | except Exception as e: 427 | return False, e 428 | return True, "" 429 | 430 | class TGBot: 431 | bot = None | telebot.TeleBot 432 | 433 | def __init__(self): 434 | setting = Setting() 435 | if setting.telegram_bot_token != "": 436 | self.bot = telebot.TeleBot(setting.telegram_bot_token) 437 | 438 | def sendMsg(self, msg: str, parse_mode: str = "MarkdownV2") -> tuple[bool, str]: 439 | if self.bot is None: 440 | return True, "没有配置机器人" 441 | setting = Setting() 442 | if setting.telegram_user_id == "": 443 | return True, "没有配置用户ID" 444 | try: 445 | self.bot.send_message(setting.telegram_user_id, msg, parse_mode) 446 | return True, "" 447 | except Exception as e: 448 | return False, e 449 | 450 | 451 | 452 | if __name__ == '__main__': 453 | # with open('./data/config/libs.json', mode='r', encoding='utf-8') as fd_libs: 454 | # jsonLibs = json.load(fd_libs) 455 | # newLibs = {} 456 | # for item in jsonLibs: 457 | # newLibs[item['key']] = item 458 | # with open('./data/config/libs.json', mode='w', encoding='utf-8') as fd_libs: 459 | # json.dump(newLibs, fd_libs) 460 | pass -------------------------------------------------------------------------------- /log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from logging.handlers import RotatingFileHandler 4 | 5 | class DetailedFormatter(logging.Formatter): 6 | """如果日志记录包含异常信息,则将传递给异常的参数一起记录下来""" 7 | def __init__(self, fmt='%(asctime)s %(levelname)s: %(message)s', 8 | datefmt='%Y-%m-%d %H:%M:%S', *args) -> None: 9 | super().__init__(fmt, datefmt, *args) 10 | 11 | def getLogger(name: str, clear: bool = False, stream: bool = False, rotating: bool = False): 12 | logger = logging.getLogger(name) 13 | logger.setLevel(logging.INFO) 14 | logfile = os.path.abspath("./data/logs/{0}.log".format(name)) 15 | if clear: 16 | with open(logfile, mode='w', encoding='utf-8') as f: 17 | f.write('') 18 | if rotating: 19 | file_handler = RotatingFileHandler(filename=logfile, mode='a', encoding='utf-8', maxBytes=1048576, backupCount=5) 20 | else: 21 | file_handler = logging.FileHandler(filename=logfile, mode='a', encoding='utf-8') 22 | file_handler.setFormatter(DetailedFormatter()) 23 | if stream: 24 | stream_handler = logging.StreamHandler() 25 | stream_handler.setFormatter(DetailedFormatter()) 26 | logger.addHandler(stream_handler) 27 | logger.addHandler(file_handler) 28 | return logger -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import json 2 | from multiprocessing import Process 3 | import signal 4 | import os, sys 5 | import time 6 | 7 | cronProcess: Process | None = None 8 | watchProcess: Process | None = None 9 | webProcess: Process | None = None 10 | 11 | def stop(sig, frame): 12 | global cronProcess 13 | global watchProcess 14 | global webProcess 15 | print("收到终止信号:{0}".format(sig)) 16 | try: 17 | if watchProcess is not None: 18 | watchProcess.terminate() 19 | print("等待停止监控服务") 20 | watchProcess.join() 21 | print("监控服务已停止") 22 | watchProcess = None 23 | except Exception as e: 24 | print("监控服务停止出错:{0}".format(e)) 25 | try: 26 | if cronProcess is not None: 27 | cronProcess.terminate() 28 | print("等待停止定时任务服务") 29 | cronProcess.join() 30 | print("定时任务服务已停止") 31 | cronProcess = None 32 | except Exception as e: 33 | print("定时任务服务停止出错:{0}".format(e)) 34 | try: 35 | if webProcess is not None: 36 | webProcess.terminate() 37 | print("等待停止Web服务") 38 | webProcess.join() 39 | print("Web服务已停止") 40 | webProcess = None 41 | except Exception as e: 42 | print("Web服务停止出错:{0}".format(e)) 43 | sys.exit(0) 44 | 45 | if not os.path.exists('./data/logs'): 46 | os.makedirs('./data/logs') 47 | if not os.path.exists('./data/config'): 48 | os.makedirs('./data/config') 49 | if not os.path.exists('./data/config/setting.json'): 50 | # 初始化settting.json 51 | setting = {"username": "admin", "password": "admin", "telegram_bot_token": "", "telegram_user_id": ""} 52 | with open('./data/config/setting.json', mode='w', encoding='utf-8') as f: 53 | json.dump(setting, f) 54 | 55 | from cron import StartCron 56 | from server import StartServer 57 | from watch import StartWatch 58 | 59 | if __name__ == '__main__': 60 | # 启动监控服务 61 | watchProcess = Process(target=StartWatch) 62 | watchProcess.start() 63 | cronProcess = Process(target=StartCron) 64 | cronProcess.start() 65 | # 启动web服务 66 | webProcess = Process(target=StartServer) 67 | webProcess.start() 68 | print("所有服务启动完毕,阻塞主进程并等待其他信号") 69 | signal.signal(signal.SIGINT, stop) 70 | signal.signal(signal.SIGTERM, stop) 71 | while(True): 72 | try: 73 | time.sleep(2) 74 | except: 75 | break 76 | stop(None, None) 77 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "q115-strm" 3 | version = "0.3.1" 4 | description = "基于p115-client自动生成目录树,然后通过解析目录树生成strm文件,可以快速大批量扫盘且不会触发风控" 5 | authors = ["qicfan "] 6 | license = "MIT" 7 | readme = "README.md" 8 | 9 | [tool.poetry.dependencies] 10 | python = "^3.11" 11 | p115client = "^0.0.3.8.2" 12 | httpx = "^0.28.0" 13 | httpx_request="^0.1" 14 | watchdog = "^6.0.0" 15 | flask = "^3.1.0" 16 | python-crontab = "^3.2.0" 17 | psutil = "^6.1.1" 18 | 19 | 20 | [build-system] 21 | requires = ["poetry-core"] 22 | build-backend = "poetry.core.masonry.api" 23 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | p115client 2 | flask 3 | flask_restful 4 | flask_httpauth 5 | python-crontab 6 | Croniter 7 | watchdog 8 | pytz 9 | rich 10 | pyTelegramBotAPI 11 | telegramify-markdown 12 | psutil -------------------------------------------------------------------------------- /server.py: -------------------------------------------------------------------------------- 1 | import json 2 | from multiprocessing import Process 3 | import os 4 | import signal 5 | from flask import Flask 6 | from flask_restful import Resource, Api, request 7 | from flask_httpauth import HTTPBasicAuth 8 | from job import StartJob 9 | from lib import Libs, Lib, OO5List, Setting, TGBot 10 | from telebot import apihelper 11 | proxyHost = os.getenv('PROXY_HOST', '') 12 | if proxyHost != '': 13 | apihelper.proxy = {'http': proxyHost, 'https': proxyHost} 14 | 15 | LIBS = Libs() 16 | o5List = OO5List() 17 | 18 | app = Flask(__name__, static_folder="frontend") 19 | api = Api(app=app) 20 | auth = HTTPBasicAuth() 21 | 22 | class Libs(Resource): 23 | def get(self): 24 | # 获取同步目录列表 25 | data = [] 26 | list = LIBS.list() 27 | for item in list: 28 | data.append(item.getJson()) 29 | return {'code': 200, 'msg': '', 'data': data} 30 | 31 | def post(self): 32 | # 添加同步目录 33 | data = request.get_json() 34 | rs, msg = LIBS.add(data) 35 | if rs is False: 36 | return {'code': 500, 'msg': msg, 'data': {}} 37 | return {'code': 200, 'msg': '', 'data': {}} 38 | 39 | class Lib(Resource): 40 | def get(self, key: str): 41 | lib = LIBS.getLib(key) 42 | if lib is None: 43 | return {'code': 404, 'msg': '同步目录不存在', 'data': {}} 44 | return {'code': 200, 'msg': '', 'data': lib.getJson()} 45 | 46 | def delete(self, key: str): 47 | # 删除同步目录 48 | rs, msg = LIBS.deleteLib(key) 49 | if rs is False: 50 | return {'code': 500, 'msg': msg, 'data': {}} 51 | return {'code': 200, 'msg': '', 'data': {}} 52 | 53 | def put(self, key: str): 54 | # 修改同步目录 55 | data = request.get_json() 56 | rs, msg = LIBS.updateLib(key, data) 57 | if rs is False: 58 | return {'code': 500, 'msg': msg, 'data': {}} 59 | return {'code': 200, 'msg': '', 'data': {}} 60 | 61 | 62 | class LibSync(Resource): 63 | def post(self, key: str): 64 | lib = LIBS.getLib(key) 65 | if lib is None: 66 | return {'code': 404, 'msg': '同步目录不存在', 'data': {}} 67 | if lib.extra.pid > 0: 68 | return {'code': 500, 'msg': '该目录正在同步中...', 'data': {}} 69 | p1 = Process(target=StartJob, kwargs={'key': key}) 70 | p1.start() 71 | return {'code': 200, 'msg': '已启动任务', 'data': {}} 72 | 73 | class LibStop(Resource): 74 | def post(self, key: str): 75 | lib = LIBS.getLib(key) 76 | if lib is None: 77 | return {'code': 404, 'msg': '同步目录不存在', 'data': {}} 78 | if lib.extra.pid > 0: 79 | try: 80 | os.kill(lib.extra.pid, signal.SIGILL) 81 | lib.extra.status = 3 82 | except: 83 | lib.extra.status = 1 84 | lib.extra.pid = 0 85 | LIBS.saveExtra(lib) 86 | return {'code': 200, 'msg': '已停止', 'data': {}} 87 | 88 | class LibLog(Resource): 89 | def get(self, key: str): 90 | logFile = os.path.abspath("./data/logs/%s.log" % key) 91 | if not os.path.exists(logFile): 92 | return {'code': 200, 'msg': '', 'data': ""} 93 | content = "" 94 | with open(logFile, mode='r', encoding='utf-8') as logfd: 95 | content = logfd.read() 96 | content = content.replace("\n", "
") 97 | return {'code': 200, 'msg': '', 'data': content} 98 | 99 | class OO5List(Resource): 100 | def get(self): 101 | data = [] 102 | list = o5List.getList() 103 | for item in list: 104 | data.append(item.getJson()) 105 | return {'code': 200, 'msg': '', 'data': data} 106 | 107 | def post(self): 108 | data = request.get_json() 109 | rs, msg = o5List.add(data) 110 | if rs is False: 111 | return {'code': 500, 'msg': msg, 'data': {}} 112 | return {'code': 200, 'msg': '', 'data': {}} 113 | 114 | class OO5(Resource): 115 | def get(self, key: str): 116 | oo5 = o5List.getLib(key) 117 | if oo5 is None: 118 | return {'code': 404, 'msg': '115账号不存在', 'data': {}} 119 | return {'code': 200, 'msg': '', 'data': oo5} 120 | 121 | def delete(self, key: str): 122 | # 删除同步目录 123 | rs, msg = o5List.delOO5(key) 124 | if rs is False: 125 | return {'code': 500, 'msg': msg, 'data': {}} 126 | return {'code': 200, 'msg': '', 'data': {}} 127 | 128 | def put(self, key: str): 129 | # 修改同步目录 130 | data = request.get_json() 131 | rs, msg = o5List.updateOO5(key, data) 132 | if rs is False: 133 | return {'code': 500, 'msg': msg, 'data': {}} 134 | return {'code': 200, 'msg': '', 'data': {}} 135 | 136 | class SettingApi(Resource): 137 | def get(self): 138 | settings = Setting() 139 | return {'code': 200, 'msg': '', 'data': settings.__dict__} 140 | 141 | def post(self): 142 | data = request.get_json() 143 | settings = Setting() 144 | settings.username = data.get("username") 145 | settings.password = data.get("password") 146 | settings.telegram_bot_token = data.get("telegram_bot_token") 147 | settings.telegram_user_id = data.get("telegram_user_id") 148 | if settings.username == '' or settings.password == '': 149 | return {'code': 500, 'msg': "用户名密码不能为空", 'data': {}} 150 | settings.save() 151 | if settings.telegram_bot_token != "" and settings.telegram_user_id != "": 152 | # 发送测试消息 153 | bot = TGBot() 154 | rs, msg = bot.sendMsg("通知配置成功,稍后您将在此收到运行通知") 155 | if not rs: 156 | return {'code': 500, 'msg': '保存成功,但是Telegram通知配置出错:{0}'.format(msg), 'data': settings.__dict__} 157 | return {'code': 200, 'msg': '', 'data': settings.__dict__} 158 | 159 | class DirApi(Resource): 160 | def post(self): 161 | """ 162 | 返回目录列表 163 | """ 164 | data = request.get_json() 165 | base_dir = data.get('base_dir') 166 | if base_dir is None or base_dir == '': 167 | base_dir = '/' 168 | dirs = os.listdir(base_dir) 169 | result = [] 170 | for dir in dirs: 171 | item = os.path.join(base_dir, dir) 172 | if os.path.isfile(item): 173 | # 如果是文件,则不用递归 174 | continue 175 | result.append(dir) 176 | return {'code': 200, 'msg': '', 'data': result} 177 | 178 | 179 | api.add_resource(Libs, '/api/libs') 180 | api.add_resource(Lib, '/api/lib/') 181 | api.add_resource(LibSync, '/api/lib/sync/') 182 | api.add_resource(LibStop, '/api/lib/stop/') 183 | api.add_resource(LibLog, '/api/lib/log/') 184 | api.add_resource(OO5List, '/api/oo5list') 185 | api.add_resource(OO5, '/api/oo5/') 186 | api.add_resource(SettingApi, '/api/settings') 187 | api.add_resource(DirApi, '/api/dir') 188 | 189 | # 跨域支持 190 | def after_request(resp): 191 | resp.headers['Access-Control-Allow-Origin'] = '*' 192 | resp.headers['Access-Control-Allow-Methods'] = 'POST, GET, OPTIONS, DELETE, PUT' 193 | resp.headers['Access-Control-Allow-Headers'] = 'Content-Type,XFILENAME,XFILECATEGORY,XFILESIZE,x-requested-with,Authorization' 194 | return resp 195 | 196 | app.after_request(after_request) 197 | 198 | @auth.verify_password 199 | def verify_password(username, password): 200 | setting = Setting() 201 | # 验证用户名和密码的逻辑 202 | if username == setting.username and password == setting.password: 203 | return True 204 | return False 205 | 206 | @app.route('/') 207 | @auth.login_required 208 | def index(): 209 | return app.send_static_file('index.html') 210 | 211 | @app.route('/assets/') 212 | def assets(filename): 213 | return app.send_static_file('assets/%s' % filename) 214 | 215 | @app.route('/api/job') 216 | def jobApi(): 217 | path = request.args.get('path') 218 | if path is None or path == "": 219 | return returnJson({'code': 404, 'msg': '同步目录不存在', 'data': {}}) 220 | lib = LIBS.getByPath(path) 221 | if lib is None: 222 | return returnJson({'code': 404, 'msg': '同步目录不存在', 'data': {}}) 223 | if lib.extra.pid > 0: 224 | return returnJson({'code': 500, 'msg': '该目录正在同步中...', 'data': {}}) 225 | p1 = Process(target=StartJob, kwargs={'key': lib.key}) 226 | p1.start() 227 | return returnJson({'code': 200, 'msg': '已启动任务,可调用API查询状态:/api/lib/{0}'.format(lib.key), 'data': {}}) 228 | 229 | def returnJson(returnBody): 230 | returnJson = json.dumps(returnBody) 231 | return returnJson, 200, {"Content-Type":"application/json"} 232 | 233 | def StartServer(host: str = '0.0.0.0'): 234 | # 启动一个线程,处理同步任务 235 | app.run(host, port=12123) 236 | 237 | if __name__ == '__main__': 238 | StartServer(host='127.0.0.1') -------------------------------------------------------------------------------- /watch.py: -------------------------------------------------------------------------------- 1 | from queue import Queue 2 | import shutil 3 | import signal 4 | import time 5 | from typing import Mapping 6 | from watchdog.observers import Observer 7 | from watchdog.observers.api import ObservedWatch 8 | from watchdog.events import * 9 | import os, sys 10 | 11 | from lib import Lib, Libs 12 | from log import getLogger 13 | 14 | LIBS = Libs() 15 | logger = getLogger(name='watch', rotating=True, stream=True) 16 | queue = Queue() 17 | pool: Mapping[str, ObservedWatch] = {} 18 | ob = Observer() 19 | 20 | class FileEventHandler(FileSystemEventHandler): 21 | 22 | lib: Lib 23 | taskPool: list[float] 24 | 25 | def __init__(self, key): 26 | super().__init__() 27 | self.lib = LIBS.getLib(key) 28 | if self.lib is None: 29 | raise ValueError('同步目录不存在') 30 | self.taskPool = [] 31 | 32 | def getStrmPath(self, path): 33 | # 返回目标位置路径 34 | if self.lib.cloud_type == '115': 35 | newPath: str = path.replace(self.lib.path_of_115, '') 36 | else: 37 | newPath: str = path.replace(self.lib.path, '') 38 | newPath = newPath.lstrip(os.sep) 39 | return os.path.join(self.lib.strm_root_path, newPath) 40 | 41 | def getPrePath(self, path: str): 42 | pathList = path.split(os.sep) 43 | pathList.pop() 44 | return os.sep.join(pathList) 45 | 46 | def on_any_event(self, event): 47 | pass 48 | 49 | def on_moved(self, event): 50 | srcStrmPath = self.getStrmPath(event.src_path) 51 | destStrmPath = self.getStrmPath(event.dest_path) 52 | if event.is_directory: 53 | # preStrmPath = self.getPrePath(destStrmPath) 54 | # if not os.path.exists(srcStrmPath): 55 | # logger.error("{0}不存在,无法移动到{1}".format(srcStrmPath, destStrmPath)) 56 | # return False 57 | # if not os.path.exists(preStrmPath): 58 | # logger.info("{0}不存在,创建该目录".format(preStrmPath)) 59 | # os.makedirs(preStrmPath) 60 | # if not os.path.exists(destStrmPath): 61 | # shutil.move(srcStrmPath, destStrmPath) 62 | # logger.info("移动:{0} => {1}".format(srcStrmPath, destStrmPath)) 63 | logger.warning("不处理目录移动,因为需要修改STRM文件内的路径 {0} => {1}".format(srcStrmPath, destStrmPath)) 64 | pass 65 | else: 66 | # 检查是否STRM文件 67 | filename, ext = os.path.splitext(srcStrmPath) 68 | destFilename, ext = os.path.splitext(destStrmPath) 69 | srcStrmFile = srcStrmPath 70 | destStrmFile = destStrmPath 71 | if ext in self.lib.strm_ext: 72 | srcStrmFile = "{0}.strm".format(filename) 73 | destStrmFile = "{0}.strm".format(destFilename) 74 | if not os.path.exists(srcStrmFile): 75 | logger.error("{0}不存在,无法移动到{1}".format(srcStrmFile, destStrmFile)) 76 | return False 77 | destPath = os.path.dirname(destStrmFile) 78 | if not os.path.exists(destPath): 79 | os.makedirs(destPath) 80 | logger.info("创建目录:{0}".format(destPath)) 81 | shutil.move(srcStrmFile, destStrmFile) 82 | logger.info("移动:{0} => {1}".format(srcStrmFile, destStrmFile)) 83 | return True 84 | # self.taskPool.append(timestamp = time.time()) 85 | 86 | def on_created(self, event): 87 | srcStrmFile = self.getStrmPath(event.src_path) 88 | if os.path.exists(srcStrmFile): 89 | logger.info("已存在:{0}".format(srcStrmFile)) 90 | return False 91 | if event.is_directory: 92 | if not os.path.exists(srcStrmFile): 93 | os.makedirs(srcStrmFile) 94 | logger.info("创建目录:{0}".format(srcStrmFile)) 95 | else: 96 | filename, ext = os.path.splitext(srcStrmFile) 97 | if ext in self.lib.strm_ext: 98 | strmFile = "{0}.strm".format(filename) 99 | # 只处理strm文件 100 | with open(strmFile, mode='w', encoding='utf-8') as f: 101 | f.write(event.src_path) 102 | logger.info("STRM文件: {0} => {1}".format(strmFile, event.src_path)) 103 | if ext in self.lib.meta_ext: 104 | # 处理元数据 105 | try: 106 | if self.lib.copy_meta_file == '复制': 107 | shutil.copy(event.src_path, srcStrmFile) 108 | logger.info("元数据复制: {0} => {1}".format(event.src_path, srcStrmFile)) 109 | if self.lib.copy_meta_file == '软链接': 110 | os.symlink(event.src_path, srcStrmFile) 111 | logger.info("元数据软链: {0} => {1}".format(event.src_path, srcStrmFile)) 112 | except Exception as e: 113 | logger.error("元数据失败: {0} => {1} : {2}".format(event.src_path, srcStrmFile, e)) 114 | 115 | def on_deleted(self, event): 116 | srcStrmFile = self.getStrmPath(event.src_path) 117 | if event.is_directory: 118 | if not os.path.exists(srcStrmFile): 119 | logger.info("不存在,跳过删除:{0}".format(srcStrmFile)) 120 | return False 121 | shutil.rmtree(srcStrmFile) 122 | logger.info("删除目录: {0}".format(srcStrmFile)) 123 | else: 124 | filename, ext = os.path.splitext(event.src_path) 125 | if ext in self.lib.strm_ext: 126 | # 尝试删除strm文件 127 | strmFile = "{0}.strm".format(filename) 128 | if os.path.exists(strmFile): 129 | os.unlink(strmFile) 130 | logger.info("删除STRM: {0}".format(strmFile)) 131 | else: 132 | if os.path.exists(srcStrmFile): 133 | os.unlink(srcStrmFile) 134 | logger.info("删除其他文件: {0}".format(srcStrmFile)) 135 | 136 | return True 137 | # self.taskPool.append(timestamp = time.time()) 138 | 139 | def on_modified(self, event): 140 | pass 141 | 142 | def watch(key: str) -> ObservedWatch | None: 143 | try: 144 | eventHandler = FileEventHandler(key) 145 | if eventHandler.lib.cloud_type == '115': 146 | watchObj = ob.schedule(eventHandler,os.path.join(eventHandler.lib.path_of_115, eventHandler.lib.path), recursive=True) # 指定监控路径/触发对应的监控事件类 147 | else: 148 | watchObj = ob.schedule(eventHandler,os.path.join(eventHandler.lib.path), recursive=True) # 指定监控路径/触发对应的监控事件类 149 | return watchObj 150 | except Exception as e: 151 | logger.info('同步目录[{0}]无法启动监控任务\n {1}'.format(key, e)) 152 | return None 153 | 154 | def StartWatch(): 155 | global pool 156 | global ob 157 | def stop(sig, frame): 158 | ob.unschedule_all() 159 | ob.stop() 160 | sys.exit(0) 161 | 162 | signal.signal(signal.SIGINT, stop) 163 | signal.signal(signal.SIGTERM, stop) 164 | # 启动一个队列处理线程 165 | # fst = Thread(target=doFailedQueue) 166 | # fst.start() 167 | # logger.info('失败重试服务已启动') 168 | path_of_115 = '' 169 | isStart = False 170 | while(True): 171 | libs = LIBS.list() 172 | if len(libs) == 0: 173 | logger.info('没有需要监控的目录,等待10s') 174 | time.sleep(10) 175 | continue 176 | # logger.info("开始检测115挂载是否失效") 177 | # if path_of_115 != "": 178 | # if not os.path.exists(path_of_115): 179 | # # 挂载丢失,停止全部线程,等待重试 180 | # logger.warning('115挂载丢失,将结束全部监控线程,等待30s重试') 181 | # ob.unschedule_all() 182 | # ob.stop() 183 | # isStart = False 184 | # pool = {} 185 | # time.sleep(60) 186 | # else: 187 | # logger.info('115挂载正常') 188 | 189 | # 开始处理同步目录 190 | for item in libs: 191 | # if item.cloud_type == '115' and item.type == '本地路径' and path_of_115 == '': 192 | # path_of_115 = os.path.join(item.path_of_115, item.path) 193 | # logger.info("检测挂载路径是否失效的路径:%s" % path_of_115) 194 | # 检查是否存在进程 195 | try: 196 | p = pool.get(item.key) 197 | if item.sync_type != '监控变更' and p is not None: 198 | # 结束进程 199 | logger.info('同步目录[%s]的同步方式变更为非监控,终止监控任务' % item.path) 200 | ob.unschedule(p) 201 | del pool[item.key] 202 | continue 203 | if item.sync_type == '监控变更' and p is None: 204 | # 启动新的子进程 205 | watchObj = watch(item.key) 206 | if watchObj is not None: 207 | pool[item.key] = watchObj 208 | logger.info('新增同步目录[%s]监控任务' % item.path) 209 | continue 210 | except Exception as e: 211 | logger.error("同步目录处理失败 [{0}] : {1}".format(item.path, e)) 212 | continue 213 | # 开始查找已经删除的任务 214 | for key in pool: 215 | item = LIBS.getLib(key) 216 | if item is not None: 217 | continue 218 | # 同步目录已删除,终止任务 219 | try: 220 | watchObj = pool.get(key) 221 | ob.unschedule(watchObj) 222 | except Exception as e: 223 | logger.error("监控任务停止失败 [{0}] : {1}".format(item.path, e)) 224 | del pool[key] 225 | logger.info('同步目录[%s]已删除,终止监控任务' % item.path) 226 | try: 227 | if isStart is False: 228 | ob.start() 229 | isStart = True 230 | logger.info("已启动全部监控任务") 231 | #logger.info('已启动所有监控任务,开始10s一次检测任务执行状态') 232 | time.sleep(10) 233 | except Exception as e: 234 | logger.error("监控任务停止: {1}".format(e)) 235 | break 236 | 237 | if __name__ == '__main__': 238 | StartWatch() --------------------------------------------------------------------------------