├── .github ├── ISSUE_TEMPLATE │ └── bug-report---bug---.md └── workflows │ └── tests.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── alipcs_py ├── __init__.py ├── alipcs │ ├── __init__.py │ ├── api.py │ ├── errors.py │ ├── inner.py │ ├── pcs.py │ ├── phone.py │ └── tree.py ├── app │ ├── __init__.py │ ├── account.py │ ├── app.py │ └── config.py ├── commands │ ├── __init__.py │ ├── cat.py │ ├── crypto.py │ ├── display.py │ ├── download.py │ ├── env.py │ ├── errors.py │ ├── file_operators.py │ ├── index.html │ ├── list_files.py │ ├── log.py │ ├── login.py │ ├── play.py │ ├── search.py │ ├── server.py │ ├── share.py │ ├── sifter.py │ ├── sync.py │ ├── upload.py │ └── user.py ├── common │ ├── cache.py │ ├── concurrent.py │ ├── constant.py │ ├── crypto.py │ ├── date.py │ ├── downloader.py │ ├── event.py │ ├── file_type.py │ ├── io.py │ ├── keyboard.py │ ├── log.py │ ├── net.py │ ├── number.py │ ├── path.py │ ├── platform.py │ ├── progress_bar.py │ ├── simple_cipher.pyx │ ├── url.py │ └── util.py ├── config │ └── __init__.py ├── storage │ ├── __init__.py │ ├── store.py │ └── tables.py └── utils.py ├── build.py ├── imgs └── refresh_token.png ├── pyproject.toml ├── setup.py └── tests ├── __init__.py ├── conftest.py ├── datas.py ├── test-datas └── demo-directory.tar.gz ├── test_alipcs.py ├── test_alipcsapi.py ├── test_commands.py └── test_common.py /.github/ISSUE_TEMPLATE/bug-report---bug---.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report / Bug 报告 3 | about: Create a report to help us improve / 创建一份报告来帮助我们改进 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | > **WARNNING**: Please to search the similar bugs, before you report a bug. Don't report a similar bug which existed. 11 | > **注意**:在你提交一份报告前,请先搜索是否存在类似的报告。请勿重复提交内容相同的报告。 12 | 13 | **Prerequisites / 报告前提** 14 | Before you report a bug, please let the bug to be reproduced at the latest verion of AliPCS-Py. 15 | 在你提交报告前,请在 AliPCS-Py 的最新版本上复现问题。 16 | 17 | **Describe the bug / 描述 bug** 18 | A clear and concise description of what the bug is. 19 | 请清楚的描述你遇到的问题。 20 | 21 | **To Reproduce / 复现问题** 22 | Steps to reproduce the behavior: 23 | 1. Do '...' 24 | 2. Do '....' 25 | 3. ... 26 | 4. See error 27 | 28 | 按照下面的步骤可以复现问题: 29 | 1. 做 '...' 30 | 2. 做 '...' 31 | 3. ... 32 | 4. 问题出现 33 | 34 | **Screenshots / 问题截图** 35 | If applicable, add screenshots to help explain your problem. 36 | 37 | 如果可能,请附加问题截图。 38 | 39 | **Envrionment / 运行环境** 40 | - OS: [e.g. Windows] 41 | - Python [e.g. Python3.8] 42 | - AliPCS-Py Version [e.g. 0.1.0] 43 | 44 | **Runing log / 运行日志** 45 | Please follow steps to paste the content of file `~/.alipcs-py/running.log`. 46 | 1. Remove the file `~/.alipcs-py/running.log` if it exists. 47 | 2. Run the command where the bug occurs with envrionment variable `LOG_LEVEL=DEBUG`. 48 | e.g. `LOG_LEVEL=DEBUG AliPCS-Py upload /abc /` 49 | 3. Paste the content of file `~/.alipcs-py/running.log` after the bug occurs. 50 | 51 | 请按照下面的步骤贴出运行日志 `~/.alipcs-py/running.log` 中的内容。 52 | 1. 删除 `~/.alipcs-py/running.log`,如果存在。 53 | 2. 在问题发生的命令前加入环境变量 `LOG_LEVEL=DEBUG`。 54 | 例如:`LOG_LEVEL=DEBUG AliPCS-Py upload /abc /` 55 | 3. 在问题出现后,贴出 `~/.alipcs-py/running.log` 中的内容。 56 | 57 | **Additional context / 补充内容** 58 | Add any other context about the problem here. 59 | 在这里增加补充内容。 60 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: AliPCS-Py Build & Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | - release/* 11 | 12 | jobs: 13 | build-test: 14 | runs-on: ${{ matrix.os }} 15 | strategy: 16 | matrix: 17 | os: [windows-latest, ubuntu-latest] 18 | python-version: ["3.8", "3.12"] 19 | defaults: 20 | run: 21 | shell: bash 22 | steps: 23 | - uses: actions/checkout@v3 24 | - name: Set up Python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v4 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | architecture: x64 29 | - name: Install and configure Poetry 30 | uses: snok/install-poetry@v1 31 | with: 32 | virtualenvs-create: true 33 | - name: Install dependencies 34 | run: | 35 | poetry run pip3 install setuptools 36 | poetry install --no-root 37 | if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' 38 | - name: Format check 39 | run: poetry run ruff format --check . 40 | - name: Typecheck 41 | run: poetry run ruff check alipcs_py 42 | - name: Test with pytest 43 | run: | 44 | poetry run python build.py build_ext --inplace 45 | poetry run pytest -v -s 46 | - name: Test package 47 | run: | 48 | poetry build -f sdist 49 | poetry run pip install dist/* 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | 45 | # Rope 46 | .ropeproject 47 | 48 | # Django stuff: 49 | *.log 50 | *.pot 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | /bin.py 56 | /alipcs_py/common/simple_cipher.c 57 | /alipcs_py/common/simple_cipher.html 58 | 59 | # Secret things 60 | /run-tests.sh 61 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## v0.8.1 - 2024-04-10 4 | 5 | ### Fixed 6 | 7 | - 修复保持分享链接根目录出错。 8 | 9 | ### Changed 10 | 11 | - `updateuser` 命令中也更新 tokens。 12 | 13 | ## v0.8.0 - 2024-04-09 14 | 15 | Add new apis and remove unneeded apis. 16 | 17 | #### Inner datas 18 | 19 | 1. **PcsFile** class 20 | 21 | - `path` 22 | 23 | Default is the name of the file. It will be different from different apis returned. See `AliPCSApi.meta`, `AliPCSApi.meta_by_path`, `AliPCSApi.get_file`, `AliPCSApi.list`, `AliPCSApi.list_iter`, `AliPCSApi.path_traceback`, `AliPCSApi.path`. 24 | 25 | - `update_download_url` 26 | 27 | The method is removed. Use `AliPCSApi.update_download_url` instead. 28 | 29 | 2. **FromTo** type 30 | 31 | The original `FromTo` is a nametuple. We change it to a general type `FromTo = Tuple[F, T]` 32 | 33 | 3. **PcsDownloadUrl** class 34 | 35 | - `expires` 36 | 37 | Add the method to check whether the `download_url` expires. 38 | 39 | #### Errors 40 | 41 | 1. **AliPCSBaseError** class 42 | 43 | The base Exception class used for the PCS errors. 44 | 45 | 2. **AliPCSError(AliPCSBaseError)** class 46 | 47 | The error returned from alipan server when the client’s request is incorrect or the token is expired. 48 | 49 | It throw at **AliPCS** class when an error occurs. 50 | 51 | 3. **DownloadError(AliPCSBaseError)** class 52 | 53 | An error occurs when downloading action fails. 54 | 55 | 4. **UploadError(AliPCSBaseError)** class 56 | 57 | An error occurs when uploading action fails. 58 | 59 | 5. **RapidUploadError(UploadError)** class 60 | 61 | An error occurred when rapid uploading action fails. 62 | 63 | 6. **make_alipcs_error** function 64 | 65 | Make an AliPCSError instance. 66 | 67 | 7. **handle_error** function 68 | 69 | uses the `_error_max_retries` attribute of the wrapped method’s class to retry. 70 | 71 | #### Core APIs 72 | 73 | 1. **AliPCS** class 74 | 75 | ```python 76 | class AliPCS: 77 | SHARE_AUTHS: Dict[str, SharedAuth] = {} 78 | def __init__( 79 | self, 80 | refresh_token: str, 81 | access_token: str = "", 82 | token_type: str = "Bearer", 83 | expire_time: int = 0, 84 | user_id: str = "", 85 | user_name: str = "", 86 | nick_name: str = "", 87 | device_id: str = "", 88 | default_drive_id: str = "", 89 | role: str = "", 90 | status: str = "", 91 | error_max_retries: int = 2, 92 | max_keepalive_connections: int = 50, 93 | max_connections: int = 50, 94 | keepalive_expiry: float = 10 * 60, 95 | connection_max_retries: int = 2, 96 | ): ... 97 | ``` 98 | 99 | The core alipan.com service apis. It directly handles the raw requests and responses of the service. 100 | 101 | **New/Changed APIs are following:** 102 | 103 | - `path_traceback` method (**New**) 104 | 105 | Traceback the path of the file by its file_id. Return the list of all parent directories' info from the file to the top level directory. 106 | 107 | - `meta_by_path` method (**New**) 108 | 109 | Get meta info of the file by its path. 110 | 111 | > Can not get the shared files' meta info. 112 | 113 | - `meta` method (**Changed**) 114 | 115 | Get meta info of the file by its file_id. 116 | 117 | - `exists` method (**Changed**) 118 | 119 | Check whether the file exists. Return True if the file exists and does not in the trash else False. 120 | 121 | - `exists_in_trash` method (**New**) 122 | 123 | Check whether the file exists in the trash. Return True if the file exists in the trash else False. 124 | 125 | - `walk` method (**New**) 126 | 127 | Walk through the directory tree by its file_id. 128 | 129 | - `download_link` method (**Changed**) 130 | 131 | Get download link of the file by its file_id. 132 | 133 | First try to get the download link from the meta info of the file. If the download link is not in the meta info, then request the getting download url api. 134 | 135 | 2. **AliPCSApi** class 136 | 137 | ```python 138 | class AliPCSApi: 139 | def __init__( 140 | self, 141 | refresh_token: str, 142 | access_token: str = "", 143 | token_type: str = "", 144 | expire_time: int = 0, 145 | user_id: str = "", 146 | user_name: str = "", 147 | nick_name: str = "", 148 | device_id: str = "", 149 | default_drive_id: str = "", 150 | role: str = "", 151 | status: str = "", 152 | error_max_retries: int = 2, 153 | max_keepalive_connections: int = 50, 154 | max_connections: int = 50, 155 | keepalive_expiry: float = 10 * 60, 156 | connection_max_retries: int = 2, 157 | ): ... 158 | ``` 159 | 160 | The core alipan.com service api with wrapped **AliPCS** class. It parses the raw content of response of AliPCS request into the inner data structions. 161 | 162 | - **New/Changed APIs are following:** 163 | 164 | - `path_traceback` method (**New**) 165 | 166 | Traceback the path of the file. Return the list of all `PcsFile`s from the file to the top level directory. 167 | 168 | > _Important_: 169 | > The `path` property of the returned `PcsFile` has absolute path. 170 | 171 | - `meta_by_path` method (**New**) 172 | 173 | Get the meta of the the path. Can not get the shared files' meta info by their paths. 174 | 175 | > _Important_: 176 | > The `path` property of the returned `PcsFile` is the argument `remotepath`. 177 | 178 | - `meta` method (**Changed**) 179 | 180 | Get meta info of the file. 181 | 182 | > _Important_: 183 | > The `path` property of the returned `PcsFile` is only the name of the file. 184 | 185 | - `get_file` method (**New**) 186 | 187 | Get the file's info by the given `remotepath` or `file_id` 188 | 189 | If the `remotepath` is given, the `file_id` will be ignored. 190 | 191 | > _Important_: 192 | > If the `remotepath` is given, the `path` property of the returned `PcsFile` is the `remotepath`. 193 | > If the `file_id` is given, the `path` property of the returned `PcsFile` is only the name of the file. 194 | 195 | - `exists` method (**Changed**) 196 | 197 | Check whether the file exists. Return True if the file exists and does not in the trash else False. 198 | 199 | - `exists_in_trash` method (**Changed**) 200 | 201 | Check whether the file exists in the trash. Return True if the file exists in the trash else False. 202 | 203 | - `list` method (**Changed**) 204 | 205 | List files and directories in the given directory (which has the `file_id`). The return items size is limited by the `limit` parameter. If you want to list more, using the returned `next_marker` parameter for next `list` call. 206 | 207 | > _Important_: 208 | > These PcsFile instances' path property is only the name of the file. 209 | 210 | - `list_iter` method (**Changed**) 211 | 212 | Iterate all files and directories at the directory (which has the `file_id`). 213 | 214 | > These returned PcsFile instances' path property is the path from the first sub-directory of the `file_id` to the file name. 215 | > e.g. 216 | > If the directory (owned `file_id`) has path `level0/`, a sub-directory which of path is 217 | > `level0/level1/level2` then its corresponding PcsFile.path is `level1/level2`. 218 | 219 | - `path` method (**Changed**) 220 | 221 | Get the pcs file's info by the given absolute `remotepath` 222 | 223 | > _Important_: 224 | > The `path` property of the returned `PcsFile` is the argument `remotepath`. 225 | 226 | - `list_path` method (**Removed**) 227 | 228 | - `list_path_iter` method (**Removed**) 229 | 230 | - `walk` method (**New**) 231 | 232 | Recursively Walk through the directory tree which has `file_id`. 233 | 234 | > _Important_: 235 | > These PcsFile instances' path property is the path from the first sub-directory of the `file_id` to the file. 236 | > e.g. 237 | > If the directory (owned `file_id`) has path `level0/`, a sub-directory which of path is 238 | > `level0/level1/level2` then its corresponding PcsFile.path is `level1/level2`. 239 | 240 | - `makedir` method (**Changed**) 241 | 242 | Make a directory in the `dir_id` directory 243 | 244 | > _Important_: 245 | > The `path` property of the returned `PcsFile` is only the name of the directory. 246 | 247 | - **makedir_path** method (**Changed**) 248 | 249 | Make a directory by the absolute `remotedir` path 250 | 251 | Return the list of all `PcsFile`s from the directory to the top level directory. 252 | 253 | > _Important_: 254 | > The `path` property of the returned `PcsFile` has absolute path. 255 | 256 | - `rename` method (**Changed**) 257 | 258 | Rename the file with `file_id` to `name` 259 | 260 | > _Important_: 261 | > The `path` property of the returned `PcsFile` is only the name of the file. 262 | 263 | - `copy` method (**Changed**) 264 | 265 | Copy `file_ids[:-1]` to `file_ids[-1]` 266 | 267 | > _Important_: 268 | > The `path` property of the returned `PcsFile` is only the name of the file. 269 | 270 | - `update_download_url` method (**New**) 271 | 272 | Update the download url of the `pcs_file` if it is expired. 273 | 274 | Return a new `PcsFile` with the updated download url. 275 | 276 | #### Download 277 | 278 | 1. **MeDownloader** class 279 | 280 | ```python 281 | class MeDownloader: 282 | def __init__( 283 | self, 284 | range_request_io: RangeRequestIO, 285 | localpath: PathType, 286 | continue_: bool = False, 287 | max_retries: int = 2, 288 | done_callback: Optional[Callable[..., Any]] = None, 289 | except_callback: Optional[Callable[[Exception], Any]] = None, 290 | ) -> None: ... 291 | ``` 292 | 293 | 2. **download** module 294 | 295 | - `DownloadParams` class (**Removed**) 296 | 297 | We remove the `DownloadParams` instead of using arguments for function calling. 298 | 299 | - `download_file` function (**Changed**) 300 | 301 | `download_file` downloads one remote file to one local directory. Raise any error occurred. So giving the upper level caller to handle errors. 302 | 303 | - `download` function (**Changed**) 304 | 305 | `download` function downloads any number of remote files/directory to one local directory. It uses a `ThreadPoolExecutor` to download files concurrently and raise the exception if any error occurred. 306 | 307 | 3. **upload** module 308 | 309 | - `UploadType` class (**Removed**) 310 | 311 | Alipan.com only support to upload a file through uploading slice parts one by one. 312 | 313 | So, the class is not needed. 314 | 315 | - `upload_file` function (**Changed**) 316 | 317 | Upload a file from one local file ( `from_to[0]`) to remote ( `from_to[1]`). 318 | 319 | First try to rapid upload, if failed, then upload file's slices. 320 | 321 | Raise exception if any error occurs. 322 | 323 | - `upload` function (**Changed**) 324 | 325 | Upload files in `from_to_list` to Alipan Drive. 326 | 327 | Use a `ThreadPoolExecutor` to upload files concurrently. 328 | 329 | Raise exception if any error occurs. 330 | 331 | ## v0.7.0 - 2024-04-03 332 | 333 | ### Updated 334 | 335 | - `MeDownloader` 剥离线程池,让上层函数来控制。 336 | - 不在 `upload_file` 函数上进行重试,让上层函数来控制。 337 | - `PcsFile.update_download_url` 将在下个版本删除。请该用 `AliPCSApi.update_download_url`。 338 | - `RangeRequestIO` 增加 `read_iter` 方法。 339 | - 使用 `python-dateutil` 库来解析时间。 340 | 341 | ### Fixed 342 | 343 | - 修复播放分享文件时的暂停问题。 344 | 345 | ## v0.6.3 - 2024-02-04 346 | 347 | ### Updated 348 | 349 | - 减少上传时对文件检查的 api 请求。 350 | - 支持 Python 3.8 ~ 3.12 351 | 352 | ## v0.6.2 - 2023-12-04 353 | 354 | ### Fixed 355 | 356 | - 修复上传链接超时 357 | 358 | ### Updated 359 | 360 | - 根新依赖 361 | 362 | ## v0.6.1 - 2023-05-09 363 | 364 | ### Fixed 365 | 366 | - 修复下载进度条太长的问题。 367 | 368 | ## v0.6.0 - 2023-04-24 369 | 370 | ### New Feature 371 | 372 | - 支持 阿里云盘开放平台 api。 373 | 374 | ## v0.5.3 - 2023-02-28 375 | 376 | ### Updated 377 | 378 | - 因为 `device_id` 不再在 `AliPCS.user_info` 中返回,需要把运行环境中的 `device_id` 加入到 `user_info`。 379 | 380 | ## v0.5.2 - 2023-02-25 381 | 382 | ### Fixed 383 | 384 | - 修复 `DeviceSessionSignatureInvalid` 报错。支持自动更新 `signature`。 385 | 386 | ## v0.5.1 - 2023-02-22 387 | 388 | ### Fixed 389 | 390 | - 修复 `ls` 命令输出省略结果。 (#11) 391 | 392 | ## v0.5.0 - 2023-02-15 393 | 394 | ### Updated 395 | 396 | - 使用临时 API 接口,让下载可用。 397 | 398 | ## v0.4.1 - 2023-02-02 399 | 400 | ### Fixed 401 | 402 | - 修复安装失败。 403 | 404 | ### Updated 405 | 406 | - 更新依赖。 407 | 408 | ## v0.4.0 - 2023-01-09 409 | 410 | ### Breaking Changes 411 | 412 | - 下面几个 api 都增加了参数 `part_number`。 413 | 414 | - `AliPCS.create_file` 415 | - `AliPCS.prepare_file` 416 | - `AliPCSApi.create_file` 417 | - `AliPCSApi.prepare_file` 418 | 419 | `part_number` 指明上传的该文件需要分几次上传。 420 | 这个参数需要用户自己计算出来。一般用 `ceiling(上传文件大小 / 单个上传分片大小)`,其中一般 `单个上传分片大小` 建议为 `80MB`。 421 | 422 | ### Fixed 423 | 424 | - 修复上传大于 100G 的文件失败的问题。 (#4) 425 | - 修复播出路径出错的问题。 426 | 427 | ## v0.3.4 - 2022-12-16 428 | 429 | ### Fixed 430 | 431 | - 修复删除用户错误。 (#3) 432 | 433 | ## v0.3.3 - 2022-12-10 434 | 435 | ### Updated 436 | 437 | - 更新 `AliPCS.meta` api。 438 | 439 | ## v0.3.2 - 2022-12-04 440 | 441 | ### Added 442 | 443 | - `listsharedlinks` 命令支持分页。 444 | 445 | ### Changed 446 | 447 | - 在下载和上传时,让调用者去初始化进度条。 448 | 449 | ### Fixed 450 | 451 | - 修复不完整上传错误。 452 | - 修复上传时创建多个同名目录的问题。 453 | - 修复同步失败的问题。 454 | - 修复 `ls`, `download`, `play`, `search` 中 `--include-regex` 选项。 455 | 456 | ### Updated 457 | 458 | - 更新依赖。 459 | 460 | ## v0.3.1 - 2022-10-26 461 | 462 | ### Fixed 463 | 464 | - 修复同步命令(`sync`)错误。 465 | - 修复下载连接过期的问题。 466 | 467 | ### Added 468 | 469 | - 删除命令(`remove`)支持`--file-id`参数。 470 | 471 | ## v0.3.0 - 2022-09-24 472 | 473 | ### Added 474 | 475 | - 支持保存分享连接至本地 476 | 477 | 可以将他人分享了连接保存至本地,而不需要保存在网盘。这只作为一个记录。在需要是提供查看搜索功能。 478 | 479 | 使用这个功能,需要使用者在本地配置文件(`~/.alipcs-py/config.toml`)中配置: 480 | 481 | ```toml 482 | [share] 483 | store = true 484 | ``` 485 | 486 | 提供以下命令: 487 | 488 | | 命令 | 描述 | 489 | | ------------------ | ---------------------------- | 490 | | storesharedlinks | 保存分享连接至本地 | 491 | | listsharedlinks | 显示本地保存的分享连接 | 492 | | listsharedfiles | 显示本地保存的分享文件 | 493 | | findsharedlinks | 查找本地保存的分享连接 | 494 | | findsharedfiles | 查找本地保存的分享文件 | 495 | | findshared | 查找本地保存的分享连接和文件 | 496 | | deletestoredshared | 删除本地保存的分享连接或文件 | 497 | | cleanstore | 清理本地保存的无效分享连接 | 498 | 499 | ## v0.2.5 - 2022-01-15 500 | 501 | ### Updated 502 | 503 | - 更新依赖。 504 | 505 | ### Changed 506 | 507 | - 阿里网盘不支持单文件并发上传。`upload --upload-type One` 失效。 508 | 509 | ## v0.2.4 - 2021-10-12 510 | 511 | ### Added 512 | 513 | - 支持 alywp.net 的分享连接。 514 | 515 | ### Fixed 516 | 517 | - 修复 `upload -t One` 518 | - 修复 `ls`, `download`, `play` 用于分享连接。 519 | 520 | ### Changed 521 | 522 | - 改 `play` 命令选项 `--play` 缩写为 `--pl` 523 | 524 | ## v0.2.3 - 2021-09-24 525 | 526 | ### Fixed 527 | 528 | - 重复列出分享根目录。 529 | 530 | ### Changed 531 | 532 | - `ls`, `download`, `play` 命令用于分享 url 或 id 时,必须指定路径或 file id。 533 | 534 | ## v0.2.2 - 2021-09-23 535 | 536 | ### Fixed 537 | 538 | - 修复保存分享连接根目录出错。 539 | 540 | ## v0.2.1 - 2021-09-23 541 | 542 | ### Added 543 | 544 | - 支持下载、播放他人分享的文件。 545 | - `listsharedpaths` 命令合并到 `ls`。 546 | - 支持保存他人分享连接中的特定文件。 547 | 548 | ## v0.1.0 - 2021-09-19 549 | 550 | Runnable version 551 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2021 Peter Ding 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | global-exclude **/__pycache__/** 2 | global-exclude *.pyc 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | typecheck: 2 | ruff check alipcs_py 3 | 4 | 5 | format-check: 6 | ruff format --check . 7 | 8 | format: 9 | ruff format . 10 | 11 | build-pyx: 12 | python build.py build_ext --inplace 13 | 14 | 15 | build: all 16 | rm -fr dist 17 | poetry build -f sdist 18 | 19 | publish: all 20 | poetry publish 21 | 22 | build-publish: build publish 23 | 24 | all: format-check typecheck 25 | -------------------------------------------------------------------------------- /alipcs_py/__init__.py: -------------------------------------------------------------------------------- 1 | from alipcs_py.alipcs import AliPCS, AliPCSApi 2 | 3 | 4 | __all__ = ["AliPCS", "AliPCSApi"] 5 | 6 | __version__ = "0.8.1" 7 | -------------------------------------------------------------------------------- /alipcs_py/alipcs/__init__.py: -------------------------------------------------------------------------------- 1 | from .pcs import AliPCS, AliOpenPCS, AliOpenAuth 2 | from .api import AliPCSApi, AliPCSApiMix 3 | from .errors import AliPCSError 4 | 5 | from .inner import * 6 | 7 | 8 | __all__ = ["AliPCS", "AliPCSApi", "AliOpenPCS", "AliPCSApiMix", "AliOpenAuth", "AliPCSError"] 9 | -------------------------------------------------------------------------------- /alipcs_py/alipcs/errors.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Any 2 | from functools import wraps 3 | import time 4 | import logging 5 | 6 | from alipcs_py.common.path import PathType 7 | from alipcs_py.alipcs.inner import PcsFile 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class AliPCSBaseError(Exception): 13 | """Base exception for all errors. 14 | 15 | Args: 16 | message (Optional[object]): The message object stringified as 'message' attribute 17 | keyword error (Exception): The original exception if any 18 | """ 19 | 20 | def __init__(self, message: Optional[object], *args: Any, **kwargs: Any) -> None: 21 | self.inner_exception: Optional[BaseException] = kwargs.get("error") 22 | 23 | self.message = str(message) 24 | super().__init__(self.message, *args) 25 | 26 | 27 | class AliPCSError(AliPCSBaseError): 28 | """The error returned from alipan server when the client’s request is incorrect or the token is expired. 29 | 30 | It is throwed at `AliPCS` class when an error occurs, then transports to the upper level class. 31 | """ 32 | 33 | def __init__(self, message: str, error_code: Optional[str] = None): 34 | self.error_code = error_code 35 | super().__init__(message) 36 | 37 | 38 | class DownloadError(AliPCSBaseError): 39 | """An error occurred while downloading a file.""" 40 | 41 | def __init__(self, message: str, remote_pcs_file: PcsFile, localdir: PathType): 42 | self.remote_pcs_file = remote_pcs_file 43 | self.localdir = localdir 44 | super().__init__(message) 45 | 46 | 47 | class UploadError(AliPCSBaseError): 48 | """An error occurred while uploading a file.""" 49 | 50 | def __init__(self, message: str, localpath: PathType, remotepath: str): 51 | self.local_file = localpath 52 | self.remote_dir = remotepath 53 | super().__init__(message) 54 | 55 | 56 | class RapidUploadError(UploadError): 57 | """An error occurred while rapid uploading a file.""" 58 | 59 | def __init__(self, message: str, localpath: PathType, remotepath: str): 60 | super().__init__(message, localpath, remotepath) 61 | 62 | 63 | def make_alipcs_error(error_code: str, info: Any = None) -> AliPCSError: 64 | msg = f"API error code: {error_code}, response: {info}" 65 | return AliPCSError(msg, error_code=error_code) 66 | 67 | 68 | def handle_error(func): 69 | """Handle error when calling AliPCS API.""" 70 | 71 | @wraps(func) 72 | def retry(*args, **kwargs): 73 | self = args[0] 74 | max_retries = getattr(self, "_error_max_retries", 2) 75 | code = "This is impossible !!!" 76 | result = None 77 | for _ in range(max_retries): 78 | result = func(*args, **kwargs) 79 | if not isinstance(result, dict): 80 | return result 81 | 82 | code = result.get("code") 83 | if not code: 84 | return result 85 | 86 | # Error code 87 | # {{{ 88 | if code == "AccessTokenInvalid": 89 | self.refresh() 90 | continue 91 | 92 | elif code == "ShareLinkTokenInvalid": 93 | varnames = func.__code__.co_varnames 94 | idx = varnames.index("share_id") 95 | if idx < len(args): 96 | share_id = args[idx] 97 | else: 98 | share_id = kwargs.get("share_id") 99 | 100 | share_auth = self.__class__.SHARE_AUTHS.get(share_id) 101 | if share_auth: 102 | share_auth.expire_time = 0 103 | continue 104 | 105 | elif code == "ParamFlowException": 106 | logger.warning("ParamFlowException occurs. sleep 10s.") 107 | time.sleep(10) 108 | continue 109 | 110 | elif code == "DeviceSessionSignatureInvalid": 111 | self._signature = "" 112 | continue 113 | 114 | elif code.startswith("NotFound."): 115 | break 116 | # }}} 117 | 118 | # Status code 119 | # {{{ 120 | elif code == "PreHashMatched": # AliPCS.create_file: Pre hash matched. 121 | return result 122 | # }}} 123 | 124 | raise make_alipcs_error(code, info=result) 125 | 126 | return retry 127 | -------------------------------------------------------------------------------- /alipcs_py/alipcs/phone.py: -------------------------------------------------------------------------------- 1 | PHONE_MODEL_DATABASE = [ 2 | "1501_M02", # 360 F4 3 | "1503-M02", # 360 N4 4 | "1505-A01", # 360 N4S 5 | "303SH", # 夏普 Aquos Crystal Xx Mini 303SH 6 | "304SH", # 夏普 Aquos Crystal Xx SoftBank 7 | "305SH", # 夏普 Aquos Crystal Y 8 | "306SH", # 夏普 Aquos Crystal 306SH 9 | "360 Q5 Plus", # 360 Q5 Plus 10 | "360 Q5", # 360 Q5 11 | "402SH", # 夏普 Aquos Crystal X 12 | "502SH", # 夏普 Aquos Crystal Xx2 13 | "6607", # OPPO U3 14 | "A1001", # 一加手机1 15 | "ASUS_A001", # 华硕 ZenFone 3 Ultra 16 | "ASUS_A001", # 华硕 ZenFone 3 Ultra 17 | "ASUS_Z00ADB", # 华硕 ZenFone 2 18 | "ASUS_Z00UDB", # 华硕 Zenfone Selfie 19 | "ASUS_Z00XSB", # 华硕 ZenFone Zoom 20 | "ASUS_Z012DE", # 华硕 ZenFone 3 21 | "ASUS_Z012DE", # 华硕 ZenFone 3 22 | "ASUS_Z016D", # 华硕 ZenFone 3 尊爵 23 | "ATH-TL00H", # 华为 荣耀 7i 24 | "Aster T", # Vertu Aster T 25 | "BLN-AL10", # 华为 荣耀 畅玩6X 26 | "BND-AL10", # 荣耀7X 27 | "BTV-W09", # 华为 M3 28 | "CAM-UL00", # 华为 荣耀 畅玩5A 29 | "Constellation V", # Vertu Constellation V 30 | "D6683", # 索尼 Xperia Z3 Dual TD 31 | "DIG-AL00", # 华为 畅享 6S 32 | "E2312", # 索尼 Xperia M4 Aqua 33 | "E2363 ", # 索尼 Xperia M4 Aqua Dual 34 | "E5363", # 索尼 Xperia C4 35 | "E5563", # 索尼 Xperia C5 36 | "E5663", # 索尼 Xperia M5 37 | "E5823", # 索尼 Xperia Z5 Compact 38 | "E6533", # 索尼 Xperia Z3+ 39 | "E6683", # 索尼 Xperia Z5 40 | "E6883", # 索尼 Xperia Z5 Premium 41 | "EBEN M2", # 8848 M2 42 | "EDI-AL10", # 华为 荣耀 Note 8 43 | "EVA-AL00", # 华为 P9 44 | "F100A", # 金立 F100 45 | "F103B", # 金立 F103B 46 | "F3116", # 索尼 Xperia XA 47 | "F3216", # 索尼 Xperia XA Ultra 48 | "F5121 / F5122", # 索尼 Xperia X 49 | "F5321", # 索尼 Xperia X Compact 50 | "F8132", # 索尼 Xperia X Performance 51 | "F8332", # 索尼 Xperia XZ 52 | "FRD-AL00", # 华为 荣耀 8 53 | "FS8001", # 夏普 C1 54 | "FS8002", # 夏普 A1 55 | "G0111", # 格力手机 1 56 | "G0215", # 格力手机 2 57 | "G8142", # 索尼Xperia XZ Premium G8142 58 | "G8342", # 索尼Xperia XZ1 59 | "GIONEE S9", # 金立 S9 60 | "GN5001S", # 金立 金钢 61 | "GN5003", # 金立 大金钢 62 | "GN8002S", # 金立 M6 Plus 63 | "GN8003", # 金立 M6 64 | "GN9011", # 金立 S8 65 | "GN9012", # 金立 S6 Pro 66 | "GRA-A0", # Coolpad Cool Play 6C 67 | "H60-L11", # 华为 荣耀 6 68 | "HN3-U01", # 华为 荣耀 3 69 | "HTC D10w", # HTC Desire 10 Pro 70 | "HTC E9pw", # HTC One E9+ 71 | "HTC M10u", # HTC 10 72 | "HTC M8St", # HTC One M8 73 | "HTC M9PT", # HTC One M9+ 74 | "HTC M9e", # HTC One M9 75 | "HTC One A9", # HTC One A9 76 | "HTC U-1w", # HTC U Ultra 77 | "HTC X9u", # HTC One X9 78 | "HTC_M10h", # HTC 10 国际版 79 | "HUAWEI CAZ-AL00", # 华为 Nova 80 | "HUAWEI CRR-UL00", # 华为 Mate S 81 | "HUAWEI GRA-UL10", # 华为 P8 82 | "HUAWEI MLA-AL10", # 华为 麦芒 5 83 | "HUAWEI MT7-AL00", # 华为 mate 7 84 | "HUAWEI MT7-TL00", # 华为 Mate 7 85 | "HUAWEI NXT-AL10", # 华为 Mate 8 86 | "HUAWEI P7-L00", # 华为 P7 87 | "HUAWEI RIO-AL00", # 华为 麦芒 4 88 | "HUAWEI TAG-AL00", # 华为 畅享 5S 89 | "HUAWEI VNS-AL00", # 华为 G9 90 | "IUNI N1", # 艾优尼 N1 91 | "IUNI i1", # 艾优尼 i1 92 | "KFAPWI", # Amazon Kindle Fire HDX 8.9 93 | "KFSOWI", # Amazon Kindle Fire HDX 7 94 | "KFTHWI", # Amazon Kindle Fire HD 95 | "KIW-TL00H", # 华为 荣耀 畅玩5X 96 | "KNT-AL10", # 华为 荣耀 V8 97 | "L55t", # 索尼 Xperia Z3 98 | "L55u", # 索尼 Xperia Z3 99 | "LEX626", # 乐视 乐S3 100 | "LEX720", # 乐视 乐Pro3 101 | "LG-D858", # LG G3 102 | "LG-H818", # LG G4 103 | "LG-H848", # LG G5 SE 104 | "LG-H868", # LG G5 105 | "LG-H968", # LG V10 106 | "LON-AL00", # 华为 Mate 9 Pro 107 | "LON-AL00-PD", # 华为 Mate 9 Porsche Design 108 | "LT18i", # Sony Ericsson Xperia Arc S 109 | "LT22i", # Sony Ericsson Xperia P 110 | "LT26i", # Sony Ericsson Xperia S 111 | "LT26ii", # Sony Ericsson Xperia SL 112 | "LT26w", # Sony Ericsson Xperia Acro S 113 | "Le X520", # 乐视 乐2 114 | "Le X620", # 乐视 乐2Pro 115 | "Le X820", # 乐视 乐Max2 116 | "Lenovo A3580", # 联想 黄金斗士 A8 畅玩 117 | "Lenovo A7600-m", # 联想 黄金斗士 S8 118 | "Lenovo A938t", # 联想 黄金斗士 Note8 119 | "Lenovo K10e70", # 联想 乐檬K10 120 | "Lenovo K30-T", # 联想 乐檬 K3 121 | "Lenovo K32C36", # 联想 乐檬3 122 | "Lenovo K50-t3s", # 联想 乐檬 K3 Note 123 | "Lenovo K52-T38", # 联想 乐檬 K5 Note 124 | "Lenovo K52e78", # Lenovo K5 Note 125 | "Lenovo P2c72", # 联想 P2 126 | "Lenovo X3c50", # 联想 乐檬 X3 127 | "Lenovo Z90-3", # 联想 VIBE Shot大拍 128 | "M040", # 魅族 MX 2 129 | "M1 E", # 魅蓝 E 130 | "M2-801w", # 华为 M2 131 | "M2017", # 金立 M2017 132 | "M3", # EBEN M3 133 | "M355", # 魅族 MX 3 134 | "MHA-AL00", # 华为 Mate 9 135 | "MI 4LTE", # 小米手机4 136 | "MI 4S", # 小米手机4S 137 | "MI 5", # 小米手机5 138 | "MI 5s Plus", # 小米手机5s Plus 139 | "MI 5s", # 小米手机5s 140 | "MI MAX", # 小米Max 141 | "MI Note Pro", # 小米Note顶配版 142 | "MI PAD 2", # 小米平板 2 143 | "MIX", # 小米MIX 144 | "MLA-UL00", # 华为 G9 Plus 145 | "MP1503", # 美图 M6 146 | "MP1512", # 美图 M6s 147 | "MT27i", # Sony Ericsson Xperia Sola 148 | "MX4 Pro", # 魅族 MX 4 Pro 149 | "MX4", # 魅族 MX 4 150 | "MX5", # 魅族 MX 5 151 | "MX6", # 魅族 MX 6 152 | "Meitu V4s", # 美图 V4s 153 | "Meizu M3 Max", # 魅蓝max 154 | "Meizu U20", # 魅蓝U20 155 | "Mi 5", 156 | "Mi 6", 157 | "Mi A1", # MI androidone 158 | "Mi Note 2", # 小米Note2 159 | "MiTV2S-48", # 小米电视2s 160 | "Moto G (4)", # 摩托罗拉 G⁴ Plus 161 | "N1", # Nokia N1 162 | "NCE-AL00", # 华为 畅享 6 163 | "NTS-AL00", # 华为 荣耀 Magic 164 | "NWI-AL10", # nova2s 165 | "NX508J", # 努比亚 Z9 166 | "NX511J", # 努比亚 小牛4 Z9 Mini 167 | "NX512J", # 努比亚 大牛 Z9 Max 168 | "NX513J", # 努比亚 My 布拉格 169 | "NX513J", # 努比亚 布拉格S 170 | "NX523J", # 努比亚 Z11 Max 171 | "NX529J", # 努比亚 小牛5 Z11 Mini 172 | "NX531J", # 努比亚 Z11 173 | "NX549J", # 努比亚 小牛6 Z11 MiniS 174 | "NX563J", # 努比亚Z17 175 | "Nexus 4", 176 | "Nexus 5X", 177 | "Nexus 6", 178 | "Nexus 6P", 179 | "Nexus 7", 180 | "Nexus 9", 181 | "Nokia_X", # Nokia X 182 | "Nokia_XL_4G", # Nokia XL 183 | "ONE A2001", # 一加手机2 184 | "ONE E1001", # 一加手机X 185 | "ONEPLUS A5010", # 一加5T 186 | "OPPO A53", # OPPO A53 187 | "OPPO A59M", # OPPO A59 188 | "OPPO A59s", # OPPO A59s 189 | "OPPO R11", 190 | "OPPO R7", # OPPO R7 191 | "OPPO R7Plus", # OPPO R7Plus 192 | "OPPO R7S", # OPPO R7S 193 | "OPPO R7sPlus", # OPPO R7sPlus 194 | "OPPO R9 Plustm A", # OPPO R9Plus 195 | "OPPO R9s Plus", # OPPO R9s Plus 196 | "OPPO R9s", 197 | "OPPO R9s", # OPPO R9s 198 | "OPPO R9tm", # OPPO R9 199 | "PE-TL10", # 华为 荣耀 6 Plus 200 | "PLK-TL01H", # 华为 荣耀 7 201 | "Pro 5", # 魅族 Pro 5 202 | "Pro 6", # 魅族 Pro 6 203 | "Pro 6s", # 魅族 Pro 6s 204 | "RM-1010", # Nokia Lumia 638 205 | "RM-1018", # Nokia Lumia 530 206 | "RM-1087", # Nokia Lumia 930 207 | "RM-1090", # Nokia Lumia 535 208 | "RM-867", # Nokia Lumia 920 209 | "RM-875", # Nokia Lumia 1020 210 | "RM-887", # Nokia Lumia 720 211 | "RM-892", # Nokia Lumia 925 212 | "RM-927", # Nokia Lumia 929 213 | "RM-937", # Nokia Lumia 1520 214 | "RM-975", # Nokia Lumia 635 215 | "RM-977", # Nokia Lumia 630 216 | "RM-984", # Nokia Lumia 830 217 | "RM-996", # Nokia Lumia 1320 218 | "Redmi 3S", # 红米3s 219 | "Redmi 4", # 小米 红米4 220 | "Redmi 4A", # 小米 红米4A 221 | "Redmi Note 2", # 小米 红米Note2 222 | "Redmi Note 3", # 小米 红米Note3 223 | "Redmi Note 4", # 小米 红米Note4 224 | "Redmi Pro", # 小米 红米Pro 225 | "S3", # 佳域S3 226 | "SCL-TL00H", # 华为 荣耀 4A 227 | "SD4930UR", # Amazon Fire Phone 228 | "SH-03G", # 夏普 Aquos Zeta SH-03G 229 | "SH-04F", # 夏普 Aquos Zeta SH-04F 230 | "SHV31", # 夏普 Aquos Serie Mini SHV31 231 | "SM-A5100", # Samsung Galaxy A5 232 | "SM-A7100", # Samsung Galaxy A7 233 | "SM-A8000", # Samsung Galaxy A8 234 | "SM-A9000", # Samsung Galaxy A9 235 | "SM-A9100", # Samsung Galaxy A9 高配版 236 | "SM-C5000", # Samsung Galaxy C5 237 | "SM-C5010", # Samsung Galaxy C5 Pro 238 | "SM-C7000", # Samsung Galaxy C7 239 | "SM-C7010", # Samsung Galaxy C7 Pro 240 | "SM-C9000", # Samsung Galaxy C9 Pro 241 | "SM-G1600", # Samsung Galaxy Folder 242 | "SM-G5500", # Samsung Galaxy On5 243 | "SM-G6000", # Samsung Galaxy On7 244 | "SM-G7100", # Samsung Galaxy On7(2016) 245 | "SM-G7200", # Samsung Galasy Grand Max 246 | "SM-G9198", # Samsung 领世旗舰Ⅲ 247 | "SM-G9208", # Samsung Galaxy S6 248 | "SM-G9250", # Samsung Galasy S7 Edge 249 | "SM-G9280", # Samsung Galaxy S6 Edge+ 250 | "SM-G9300", # Samsung Galaxy S7 251 | "SM-G9350", # Samsung Galaxy S7 Edge 252 | "SM-G9500", # Samsung Galaxy S8 253 | "SM-G9550", # Samsung Galaxy S8+ 254 | "SM-G9600", # Samsung Galaxy S9 255 | "SM-G960F", # Galaxy S9 Dual SIM 256 | "SM-G9650", # Samsung Galaxy S9+ 257 | "SM-G965F", # Galaxy S9+ Dual SIM 258 | "SM-J3109", # Samsung Galaxy J3 259 | "SM-J3110", # Samsung Galaxy J3 Pro 260 | "SM-J327A", # Samsung Galaxy J3 Emerge 261 | "SM-J5008", # Samsung Galaxy J5 262 | "SM-J7008", # Samsung Galaxy J7 263 | "SM-N9108V", # Samsung Galasy Note4 264 | "SM-N9200", # Samsung Galaxy Note5 265 | "SM-N9300", # Samsung Galaxy Note 7 266 | "SM-N935S", # Samsung Galaxy Note Fan Edition 267 | "SM-N9500", # Samsung Galasy Note8 268 | "SM-W2015", # Samsung W2015 269 | "SM-W2016", # Samsung W2016 270 | "SM-W2017", # Samsung W2017 271 | "SM705", # 锤子 T1 272 | "SM801", # 锤子 T2 273 | "SM901", # 锤子 M1 274 | "SM919", # 锤子 M1L 275 | "ST18i", # Sony Ericsson Xperia Ray 276 | "ST25i", # Sony Ericsson Xperia U 277 | "STV100-1", # 黑莓Priv 278 | "Signature Touch", # Vertu Signature Touch 279 | "TA-1000", # Nokia 6 280 | "TA-1000", # HMD Nokia 6 281 | "TA-1041", # Nokia 7 282 | "VERTU Ti", # Vertu Ti 283 | "VIE-AL10", # 华为 P9 Plus 284 | "VIVO X20", 285 | "VIVO X20A", 286 | "W909", # 金立 天鉴 W909 287 | "X500", # 乐视 乐1S 288 | "X608", # 乐视 乐1 289 | "X800", # 乐视 乐1Pro 290 | "X900", # 乐视 乐Max 291 | "XT1085", # 摩托罗拉 X 292 | "XT1570", # 摩托罗拉 X Style 293 | "XT1581", # 摩托罗拉 X 极 294 | "XT1585", # 摩托罗拉 Droid Turbo 2 295 | "XT1635", # 摩托罗拉 Z Play 296 | "XT1635-02", # 摩托罗拉 Z Play 297 | "XT1650", # 摩托罗拉 Z 298 | "XT1650-05", # 摩托罗拉 Z 299 | "XT1706", # 摩托罗拉 E³ POWER 300 | "YD201", # YotaPhone2 301 | "YD206", # YotaPhone2 302 | "YQ60", # 锤子 坚果 303 | "ZTE A2015", # 中兴 AXON 天机 304 | "ZTE A2017", # 中兴 AXON 天机 7 305 | "ZTE B2015", # 中兴 AXON 天机 MINI 306 | "ZTE BV0720", # 中兴 Blade A2 307 | "ZTE BV0730", # 中兴 Blade A2 Plus 308 | "ZTE C2016", # 中兴 AXON 天机 MAX 309 | "ZTE C2017", # 中兴 AXON 天机 7 MAX 310 | "ZTE G720C", # 中兴 星星2号 311 | "ZUK Z2121", # ZUK Z2 Pro 312 | "ZUK Z2131", # ZUK Z2 313 | "ZUK Z2151", # ZUK Edge 314 | "ZUK Z2155", # ZUK Edge L 315 | "m030", # 魅族mx 316 | "m1 metal", # 魅蓝metal 317 | "m1 note", # 魅蓝 Note 318 | "m1", # 魅蓝 319 | "m2 note", # 魅蓝 Note 2 320 | "m2", # 魅蓝 2 321 | "m3 note", # 魅蓝 Note 3 322 | "m3", # 魅蓝 3 323 | "m3s", # 魅蓝 3S 324 | "m9", # 魅族m9 325 | "marlin", # Google Pixel XL 326 | "sailfish", # Google Pixel 327 | "vivo V3Max", # vivo V3Max 328 | "vivo X6D", # vivo X6 329 | "vivo X6PlusD", # vivo X6Plus 330 | "vivo X6S", # vivo X6S 331 | "vivo X6SPlus", # vivo X6SPlus 332 | "vivo X7", # vivo X7 333 | "vivo X7Plus", # vivo X7Plus 334 | "vivo X9", # vivo X9 335 | "vivo X9Plus", # vivo X9Plus 336 | "vivo Xplay5A 金", # vivo Xplay5 337 | "vivo Xplay6", # vivo Xplay6 338 | "vivo Y66", # vivo Y66 339 | "vivo Y67", # vivo Y67 340 | "z1221", # ZUK Z1 341 | ] 342 | 343 | 344 | # 根据key计算出imei 345 | def sum_IMEI(key: str): 346 | hs = 53202347234687234 347 | for k in key: 348 | hs += (hs << 5) + ord(k) 349 | hs %= int(1e15) 350 | if hs < int(1e14): 351 | hs += int(1e14) 352 | return str(int(hs)) 353 | 354 | 355 | # 根据key, 从 PHONE_MODEL_DATABASE 中取出手机型号 356 | def get_phone_model(key: str): 357 | if len(PHONE_MODEL_DATABASE) <= 0: 358 | return "S3" 359 | hs = 2134 360 | for k in key: 361 | hs += (hs << 4) + ord(k) 362 | hs %= len(PHONE_MODEL_DATABASE) 363 | return PHONE_MODEL_DATABASE[hs] 364 | -------------------------------------------------------------------------------- /alipcs_py/alipcs/tree.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, List, Iterable, Optional 2 | from collections import OrderedDict 3 | 4 | from alipcs_py.alipcs.api import AliPCSApi 5 | from alipcs_py.alipcs.inner import PcsFile 6 | from alipcs_py.common.path import split_posix_path 7 | 8 | 9 | class _Node: 10 | def __init__(self, file_id: str, pcs_file: PcsFile): 11 | self.file_id = file_id 12 | self.pcs_file = pcs_file 13 | self.sub_nodes: OrderedDict[str, _Node] = OrderedDict() 14 | 15 | 16 | class PathTree: 17 | def __init__(self, api: AliPCSApi): 18 | self._api = api 19 | self.root = _Node("root", PcsFile.root()) 20 | 21 | def search(self, remotepath: str = "", topdown: Iterable[str] = [], root: _Node = None) -> Optional[PcsFile]: 22 | if not topdown: 23 | assert remotepath.startswith("/") 24 | topdown = split_posix_path(remotepath) 25 | 26 | root = root or self.root 27 | return self._dfs(list(topdown), root) 28 | 29 | def _dfs(self, topdown: List[str], root: _Node) -> Optional[PcsFile]: 30 | if not topdown: 31 | return root.pcs_file 32 | 33 | next_key = topdown[0] 34 | root_pcs_file = root.pcs_file 35 | 36 | if next_key not in root.sub_nodes: 37 | for pf in self._api.list_iter(root_pcs_file.file_id): 38 | root.sub_nodes[pf.name] = _Node(pf.file_id, pf) 39 | if pf.name == next_key: 40 | break 41 | 42 | if next_key not in root.sub_nodes: 43 | return None 44 | 45 | return self._dfs(topdown[1:], root.sub_nodes[next_key]) 46 | -------------------------------------------------------------------------------- /alipcs_py/app/__init__.py: -------------------------------------------------------------------------------- 1 | from types import SimpleNamespace 2 | from alipcs_py.app.app import app as _app 3 | 4 | 5 | def main(): 6 | _app(obj=SimpleNamespace()) 7 | -------------------------------------------------------------------------------- /alipcs_py/app/account.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List, Dict 2 | from dataclasses import dataclass 3 | from pathlib import Path 4 | import pickle 5 | 6 | from alipcs_py.alipcs import AliPCSApi, AliPCSApiMix, PcsUser 7 | from alipcs_py.common.path import PathType, join_path 8 | 9 | 10 | @dataclass 11 | class Account: 12 | user: PcsUser 13 | 14 | # Account Name which can be set by hand 15 | account_name: str = "" 16 | 17 | # current working directory 18 | pwd: str = "/" 19 | encrypt_password: Optional[str] = None 20 | 21 | # Is the account is used now 22 | on: bool = False 23 | 24 | def pcsapi(self) -> AliPCSApi: 25 | web_refresh_token = self.user.web_refresh_token 26 | web_access_token = self.user.web_access_token 27 | web_token_type = self.user.web_token_type or "Bearer" 28 | web_expire_time = self.user.web_expire_time or 0 29 | 30 | openapi_refresh_token = self.user.openapi_refresh_token 31 | openapi_access_token = self.user.openapi_access_token 32 | openapi_token_type = self.user.openapi_token_type or "Bearer" 33 | openapi_expire_time = self.user.openapi_expire_time or 0 34 | 35 | client_id = self.user.client_id 36 | client_secret = self.user.client_secret 37 | client_server = self.user.client_server 38 | 39 | user_id = self.user.user_id or "" 40 | user_name = self.user.user_name or "" 41 | nick_name = self.user.nick_name or "" 42 | device_id = self.user.device_id or "" 43 | default_drive_id = self.user.default_drive_id or "" 44 | role = self.user.role or "" 45 | status = self.user.status or "" 46 | 47 | assert web_refresh_token, f"{self}.user.web_refresh_token is None" 48 | return AliPCSApiMix( 49 | web_refresh_token, 50 | web_access_token=web_access_token or "", 51 | web_token_type=web_token_type, 52 | web_expire_time=web_expire_time, 53 | openapi_refresh_token=openapi_refresh_token or "", 54 | openapi_access_token=openapi_access_token or "", 55 | openapi_token_type=openapi_token_type, 56 | openapi_expire_time=openapi_expire_time, 57 | client_id=client_id or "", 58 | client_secret=client_secret or "", 59 | client_server=client_server or "", 60 | user_id=user_id, 61 | user_name=user_name, 62 | nick_name=nick_name, 63 | device_id=device_id, 64 | default_drive_id=default_drive_id, 65 | role=role, 66 | status=status, 67 | ) 68 | 69 | @staticmethod 70 | def from_refresh_token( 71 | web_refresh_token: str, 72 | web_access_token: str = "", 73 | web_token_type: str = "Bearer", 74 | web_expire_time: int = 0, 75 | openapi_refresh_token: str = "", 76 | openapi_access_token: str = "", 77 | openapi_token_type: str = "Bearer", 78 | openapi_expire_time: int = 0, 79 | client_id: str = "", 80 | client_secret: str = "", 81 | client_server: str = "", 82 | account_name: str = "", 83 | ) -> "Account": 84 | api = AliPCSApiMix( 85 | web_refresh_token, 86 | web_access_token=web_access_token or "", 87 | web_token_type=web_token_type, 88 | web_expire_time=web_expire_time, 89 | openapi_refresh_token=openapi_refresh_token or "", 90 | openapi_access_token=openapi_access_token or "", 91 | openapi_token_type=openapi_token_type, 92 | openapi_expire_time=openapi_expire_time, 93 | client_id=client_id or "", 94 | client_secret=client_secret or "", 95 | client_server=client_server or "", 96 | ) 97 | user = api.user_info() 98 | 99 | return Account( 100 | user, 101 | account_name or user.user_id, # Default use `user_id` 102 | ) 103 | 104 | 105 | class AccountManager: 106 | """Account Manager 107 | 108 | Manage all accounts 109 | """ 110 | 111 | def __init__(self, data_path: Optional[PathType] = None): 112 | self._accounts: Dict[str, Account] = {} # user_id (str) -> Account 113 | self._who: Optional[str] = None # user_id (str) 114 | self._data_path = data_path 115 | self._apis: Dict[str, AliPCSApi] = {} 116 | 117 | @staticmethod 118 | def load_data(data_path: PathType) -> "AccountManager": 119 | try: 120 | data_path = Path(data_path).expanduser() 121 | am = pickle.load(data_path.open("rb")) 122 | am._data_path = data_path 123 | return am 124 | except Exception: 125 | return AccountManager(data_path=data_path) 126 | 127 | @property 128 | def accounts(self) -> List[Account]: 129 | """All accounts""" 130 | 131 | return list(self._accounts.values()) 132 | 133 | def get_api(self, user_id: Optional[str] = None) -> Optional[AliPCSApi]: 134 | user_id = user_id or self._who 135 | if user_id: 136 | account = self._accounts.get(user_id) 137 | if not account: 138 | return None 139 | 140 | if user_id in self._apis: 141 | api = self._apis[user_id] 142 | else: 143 | api = account.pcsapi() 144 | self._apis[user_id] = api 145 | return api 146 | return None 147 | 148 | def set_account_name(self, account_name: str, user_id: Optional[str] = None): 149 | """Set account name""" 150 | 151 | user_id = user_id or self._who 152 | 153 | assert user_id, "No recent user" 154 | 155 | account = self._accounts.get(user_id) 156 | 157 | assert account 158 | 159 | account.account_name = account_name 160 | 161 | def set_encrypt_password(self, encrypt_password: Optional[str] = None): 162 | """Set encryption key""" 163 | 164 | assert self._who, "No recent user" 165 | 166 | account = self._accounts.get(self._who) 167 | 168 | assert account 169 | 170 | account.encrypt_password = encrypt_password 171 | 172 | def cd(self, remotedir: str = "/"): 173 | """Change current working directory""" 174 | 175 | assert self._who, "No recent user" 176 | 177 | account = self._accounts.get(self._who) 178 | 179 | assert account 180 | 181 | pwd = join_path(account.pwd, remotedir) 182 | account.pwd = pwd 183 | 184 | @property 185 | def pwd(self) -> str: 186 | """Current working directory of recent user""" 187 | 188 | assert self._who, "No recent user" 189 | 190 | account = self._accounts.get(self._who) 191 | 192 | assert account 193 | 194 | return account.pwd 195 | 196 | def who(self, user_id: Optional[str] = None) -> Optional[Account]: 197 | """Return recent `Account`""" 198 | 199 | user_id = user_id or self._who 200 | if user_id: 201 | return self._accounts.get(user_id) 202 | else: 203 | return None 204 | 205 | def refresh(self, user_id: Optional[str] = None): 206 | """Refresh the refresh_token and access_token""" 207 | 208 | user_id = user_id or self._who 209 | if user_id: 210 | api = self.get_api(user_id) 211 | if api: 212 | api.refresh() 213 | 214 | def update(self, user_id: Optional[str] = None): 215 | """Update user_info""" 216 | 217 | user_id = user_id or self._who 218 | if user_id: 219 | api = self.get_api(user_id) 220 | if api: 221 | account = self._accounts.get(user_id) 222 | assert account 223 | 224 | user = api.user_info() 225 | account.user = user 226 | 227 | def su(self, user_id: str): 228 | """Change recent user with `PcsUser.user_id` 229 | 230 | Args: 231 | user_id (str): `PcsUser.user_id` 232 | """ 233 | 234 | assert user_id in self._accounts, f"No user {user_id}" 235 | 236 | self._switch(user_id) 237 | 238 | def _switch(self, user_id: str): 239 | for uid, account in list(self._accounts.items()): 240 | if uid == user_id: 241 | account.on = True 242 | self._who = account.user.user_id 243 | else: 244 | account.on = False 245 | 246 | def add_account(self, account: Account): 247 | """Add an account to the manager""" 248 | 249 | self._accounts[account.user.user_id] = account 250 | 251 | def delete_account(self, user_id: str): 252 | """Delete an account 253 | 254 | Args: 255 | user_id (str): `PcsUser.user_id` 256 | """ 257 | 258 | if user_id in self._accounts: 259 | del self._accounts[user_id] 260 | if user_id == self._who: 261 | self._who = None 262 | 263 | def save(self, data_path: Optional[PathType] = None): 264 | """Serialize to local path""" 265 | 266 | data_path = data_path or self._data_path 267 | assert data_path, "No data path" 268 | 269 | data_path = Path(data_path).expanduser() 270 | if not data_path.parent.exists(): 271 | data_path.parent.mkdir(parents=True, exist_ok=True) 272 | 273 | apis = self._apis 274 | self._apis = {} # Ignore to save apis 275 | 276 | pickle.dump(self, open(data_path, "wb")) 277 | 278 | self._apis = apis 279 | -------------------------------------------------------------------------------- /alipcs_py/app/config.py: -------------------------------------------------------------------------------- 1 | from alipcs_py.config import AppConfig 2 | from alipcs_py.storage.tables import ( 3 | connect_sqlite, 4 | bind_tables, 5 | create_tables, 6 | modify_table, 7 | PcsSharedLinkInfoTable, 8 | PcsFileTable, 9 | ) 10 | from alipcs_py.commands.env import CONFIG_PATH, SHARED_STORE_PATH 11 | 12 | 13 | def init_config(app_config: AppConfig): 14 | # Connect to shared store database 15 | if app_config.share.store: 16 | db, migrator = connect_sqlite(str(SHARED_STORE_PATH)) 17 | bind_tables([PcsFileTable, PcsSharedLinkInfoTable], db) 18 | create_tables([PcsFileTable, PcsSharedLinkInfoTable], db) 19 | 20 | modify_table(PcsFileTable, db, migrator) 21 | modify_table(PcsSharedLinkInfoTable, db, migrator) 22 | 23 | if not CONFIG_PATH.exists(): 24 | save_config(app_config) 25 | 26 | 27 | def save_config(app_config: AppConfig): 28 | app_config.dump(str(CONFIG_PATH)) 29 | -------------------------------------------------------------------------------- /alipcs_py/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PeterDing/AliPCS-Py/beeb515ab8f963c795f99c82d2e3a88487b0f0ae/alipcs_py/commands/__init__.py -------------------------------------------------------------------------------- /alipcs_py/commands/cat.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from alipcs_py.alipcs import AliPCSApi 4 | from alipcs_py.commands.display import display_blocked_remotepath 5 | 6 | import chardet # type: ignore 7 | 8 | 9 | def cat( 10 | api: AliPCSApi, 11 | remotepath: str, 12 | encoding: Optional[str] = None, 13 | encrypt_password: bytes = b"", 14 | ): 15 | pcs_file = api.get_file(remotepath=remotepath) 16 | if not pcs_file: 17 | return 18 | 19 | fs = api.file_stream(pcs_file.file_id, encrypt_password=encrypt_password) 20 | if not fs: 21 | display_blocked_remotepath(remotepath) 22 | return 23 | 24 | cn = fs.read() 25 | if cn: 26 | if not encoding: 27 | r = chardet.detect(cn) 28 | if r["confidence"] > 0.5: 29 | encoding = r["encoding"] 30 | 31 | if encoding: 32 | print(cn.decode(encoding)) 33 | else: 34 | print(cn) 35 | -------------------------------------------------------------------------------- /alipcs_py/commands/crypto.py: -------------------------------------------------------------------------------- 1 | from os import PathLike 2 | from pathlib import Path 3 | 4 | from alipcs_py.common.io import to_decryptio, READ_SIZE 5 | from alipcs_py.common.path import exists 6 | 7 | 8 | def decrypt_file(from_encrypted: PathLike, to_decrypted: PathLike, encrypt_password: bytes = b""): 9 | assert exists(from_encrypted) 10 | 11 | dio = to_decryptio(open(from_encrypted, "rb"), encrypt_password=encrypt_password) 12 | 13 | dpath = Path(to_decrypted) 14 | dir_ = dpath.parent 15 | if not dir_.exists(): 16 | dir_.mkdir(parents=True, exist_ok=True) 17 | 18 | with dpath.open("wb") as dfd: 19 | while True: 20 | data = dio.read(READ_SIZE) 21 | if not data: 22 | break 23 | dfd.write(data) 24 | -------------------------------------------------------------------------------- /alipcs_py/commands/env.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | # Local data directory 4 | DATA_DIR = Path("~").expanduser() / ".alipcs-py" 5 | 6 | # Account data path 7 | ACCOUNT_DATA_PATH = DATA_DIR / "accounts.pk" 8 | 9 | # Rapiduploadinfo database path 10 | RAPIDUPLOADINFO_PATH = DATA_DIR / "rapiduploadinfo.sqlite3" 11 | 12 | # Shared store database path 13 | SHARED_STORE_PATH = DATA_DIR / "shared-store.sqlite3" 14 | 15 | # Logging path 16 | LOG_PATH = DATA_DIR / "running.log" 17 | 18 | # Logging level 19 | LOG_LEVEL = "CRITICAL" 20 | 21 | # Configuration path 22 | CONFIG_PATH = DATA_DIR / "config.toml" 23 | -------------------------------------------------------------------------------- /alipcs_py/commands/errors.py: -------------------------------------------------------------------------------- 1 | class CommandError(Exception): 2 | pass 3 | -------------------------------------------------------------------------------- /alipcs_py/commands/file_operators.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from alipcs_py.alipcs import AliPCSApi 4 | from alipcs_py.alipcs.inner import PcsFile, FromTo 5 | from alipcs_py.common.path import ( 6 | split_posix_path, 7 | posix_path_basename, 8 | join_path, 9 | ) 10 | from alipcs_py.commands.display import display_files, display_from_to 11 | 12 | 13 | def makedir(api: AliPCSApi, *remotedirs: str, show: bool = False) -> List[PcsFile]: 14 | """Make directories (`remotepaths`).""" 15 | 16 | pcs_files = [] 17 | for d in remotedirs: 18 | pf = api.makedir_path(d)[0] 19 | pcs_files.append(pf) 20 | 21 | if show: 22 | display_files(pcs_files, "", show_file_id=True, show_absolute_path=True) 23 | 24 | return pcs_files 25 | 26 | 27 | def move(api: AliPCSApi, *remotepaths: str, show: bool = False): 28 | """Move files or directories to a destination directory. 29 | 30 | Move the paths(`remotepaths[:-1]`) to the directory(`remotepaths[-1]`). 31 | """ 32 | 33 | assert len(remotepaths) > 1 34 | 35 | dest_remotepath = remotepaths[-1] 36 | pcs_files = api.paths(*remotepaths) 37 | 38 | # Make sure destination directory exists 39 | if not pcs_files[-1]: 40 | dest_pcs_file = api.makedir_path(dest_remotepath)[0] 41 | pcs_files[-1] = dest_pcs_file 42 | 43 | file_ids = [pf.file_id for pf in pcs_files if pf] 44 | oks = api.move(*file_ids) 45 | 46 | from_to_list: List[FromTo[str, str]] = [] 47 | j = 0 48 | for i, pf in enumerate(pcs_files[:-1]): 49 | if not pf: 50 | continue 51 | 52 | if oks[j]: 53 | from_to = ( 54 | remotepaths[i], 55 | join_path(dest_remotepath, posix_path_basename(remotepaths[i])), 56 | ) 57 | from_to_list.append(from_to) 58 | j += 1 59 | 60 | if show: 61 | display_from_to(*from_to_list) 62 | 63 | 64 | def rename(api: AliPCSApi, remotepath: str, new_name: str, show: bool = False): 65 | """Rename a file or directory. 66 | 67 | e.g. 68 | rename(api, remotepath="/some/file", new_name="new_file") is equal to 69 | move(api, "/some/file", "/some/new_file") 70 | """ 71 | 72 | pcs_file = api.path(remotepath) 73 | 74 | if not pcs_file: 75 | return 76 | 77 | api.rename(pcs_file.file_id, new_name) 78 | pcs_file.path = join_path(*split_posix_path(pcs_file.path)[:-1], new_name) 79 | 80 | if show: 81 | display_files([pcs_file], "", show_file_id=True, show_absolute_path=True) 82 | 83 | 84 | def copy(api: AliPCSApi, *remotepaths: str, show: bool = False): 85 | """Copy files or directories to a destination directory. 86 | 87 | Copy the paths(`remotepaths[:-1]`) to the directory(`remotepaths[-1]`). 88 | """ 89 | 90 | assert len(remotepaths) > 1 91 | 92 | dest_remotepath = remotepaths[-1] 93 | pcs_files = api.paths(*remotepaths) 94 | 95 | non_exist = [(rp, pf) for rp, pf in zip(remotepaths[:-1], pcs_files[:-1]) if pf is None] 96 | assert len(non_exist) == 0, f"Paths not exist: {non_exist}" 97 | 98 | # Make sure destination directory exists 99 | if not pcs_files[-1]: 100 | dest_pcs_file = api.makedir_path(dest_remotepath)[0] 101 | pcs_files[-1] = dest_pcs_file 102 | 103 | file_ids = [pf.file_id for pf in pcs_files if pf] 104 | new_pfs = api.copy(*file_ids) 105 | if show: 106 | display_files(new_pfs, "", show_file_id=True, show_absolute_path=True) 107 | 108 | 109 | def remove(api: AliPCSApi, *remotepaths: str): 110 | """Remove files or directories to the trash.""" 111 | 112 | pcs_files = api.paths(*remotepaths) 113 | file_ids = [pf.file_id for pf in pcs_files if pf] 114 | api.remove(*file_ids) 115 | -------------------------------------------------------------------------------- /alipcs_py/commands/list_files.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | 3 | from alipcs_py.alipcs import AliPCSApi 4 | from alipcs_py.alipcs.inner import PcsFile 5 | from alipcs_py.common.path import join_path 6 | from alipcs_py.commands.log import get_logger 7 | from alipcs_py.commands.sifter import Sifter, sift 8 | from alipcs_py.commands.display import display_files 9 | 10 | from rich import print 11 | 12 | logger = get_logger(__name__) 13 | 14 | DEFAULT_MAX_WORKERS = 10 15 | 16 | 17 | def list_file( 18 | api: AliPCSApi, 19 | remotepath: str, 20 | file_id: str = None, 21 | share_id: str = None, 22 | desc: bool = False, 23 | name: bool = False, 24 | time: bool = False, 25 | size: bool = False, 26 | all: bool = False, 27 | limit: int = 200, 28 | url_expire_sec: int = 14400, 29 | recursive: bool = False, 30 | sifters: List[Sifter] = [], 31 | highlight: bool = False, 32 | rapiduploadinfo_file: Optional[str] = None, 33 | user_id: Optional[str] = None, 34 | user_name: Optional[str] = None, 35 | show_size: bool = False, 36 | show_date: bool = False, 37 | show_file_id: bool = False, 38 | show_hash: bool = False, 39 | show_absolute_path: bool = False, 40 | show_dl_link: bool = False, 41 | csv: bool = False, 42 | only_dl_link: bool = False, 43 | ): 44 | pcs_file = api.get_file(remotepath=remotepath, file_id=file_id, share_id=share_id) 45 | if pcs_file is None: 46 | return 47 | 48 | is_dir = pcs_file.is_dir 49 | if is_dir: 50 | pcs_files = [] 51 | for sub_pf in api.list_iter( 52 | pcs_file.file_id, 53 | share_id=share_id, 54 | desc=desc, 55 | name=name, 56 | time=time, 57 | size=size, 58 | all=all, 59 | limit=limit, 60 | url_expire_sec=url_expire_sec, 61 | ): 62 | sub_pf.path = join_path(remotepath, sub_pf.path) 63 | pcs_files.append(sub_pf) 64 | else: 65 | pcs_files = [pcs_file] 66 | 67 | pcs_files = sift(pcs_files, sifters, recursive=recursive) 68 | if not pcs_files: 69 | return 70 | 71 | if show_dl_link: 72 | for pcs_file in pcs_files: 73 | if only_dl_link: 74 | print(pcs_file.download_url) 75 | 76 | if not only_dl_link: 77 | display_files( 78 | pcs_files, 79 | remotepath, 80 | sifters=sifters, 81 | highlight=highlight, 82 | show_size=show_size, 83 | show_date=show_date, 84 | show_file_id=show_file_id, 85 | show_hash=show_hash, 86 | show_absolute_path=show_absolute_path, 87 | show_dl_link=show_dl_link, 88 | csv=csv, 89 | ) 90 | 91 | if is_dir and recursive: 92 | for pcs_file in pcs_files: 93 | if pcs_file.is_dir: 94 | list_file( 95 | api, 96 | pcs_file.path, 97 | file_id=pcs_file.file_id, 98 | share_id=share_id, 99 | desc=desc, 100 | name=name, 101 | time=time, 102 | size=size, 103 | all=all, 104 | limit=limit, 105 | url_expire_sec=url_expire_sec, 106 | recursive=recursive, 107 | sifters=sifters, 108 | highlight=highlight, 109 | rapiduploadinfo_file=rapiduploadinfo_file, 110 | user_id=user_id, 111 | user_name=user_name, 112 | show_size=show_size, 113 | show_date=show_date, 114 | show_file_id=show_file_id, 115 | show_hash=show_hash, 116 | show_absolute_path=show_absolute_path, 117 | show_dl_link=show_dl_link, 118 | csv=csv, 119 | only_dl_link=only_dl_link, 120 | ) 121 | 122 | 123 | def list_files( 124 | api: AliPCSApi, 125 | *remotepaths: str, 126 | file_ids: List[str] = [], 127 | share_id: str = None, 128 | desc: bool = False, 129 | name: bool = False, 130 | time: bool = False, 131 | size: bool = False, 132 | all: bool = False, 133 | limit: int = 200, 134 | url_expire_sec: int = 14400, 135 | recursive: bool = False, 136 | sifters: List[Sifter] = [], 137 | highlight: bool = False, 138 | rapiduploadinfo_file: Optional[str] = None, 139 | user_id: Optional[str] = None, 140 | user_name: Optional[str] = None, 141 | show_size: bool = False, 142 | show_date: bool = False, 143 | show_file_id: bool = False, 144 | show_hash: bool = False, 145 | show_absolute_path: bool = False, 146 | show_dl_link: bool = False, 147 | csv: bool = False, 148 | only_dl_link: bool = False, 149 | ): 150 | for rp in remotepaths: 151 | list_file( 152 | api, 153 | rp, 154 | share_id=share_id, 155 | desc=desc, 156 | name=name, 157 | time=time, 158 | size=size, 159 | all=all, 160 | limit=limit, 161 | url_expire_sec=url_expire_sec, 162 | recursive=recursive, 163 | sifters=sifters, 164 | highlight=highlight, 165 | rapiduploadinfo_file=rapiduploadinfo_file, 166 | user_id=user_id, 167 | user_name=user_name, 168 | show_size=show_size, 169 | show_date=show_date, 170 | show_file_id=show_file_id, 171 | show_hash=show_hash, 172 | show_absolute_path=show_absolute_path, 173 | show_dl_link=show_dl_link, 174 | csv=csv, 175 | only_dl_link=only_dl_link, 176 | ) 177 | 178 | for file_id in file_ids: 179 | list_file( 180 | api, 181 | "", 182 | file_id=file_id, 183 | share_id=share_id, 184 | desc=desc, 185 | name=name, 186 | time=time, 187 | size=size, 188 | all=all, 189 | limit=limit, 190 | url_expire_sec=url_expire_sec, 191 | recursive=recursive, 192 | sifters=sifters, 193 | highlight=highlight, 194 | rapiduploadinfo_file=rapiduploadinfo_file, 195 | user_id=user_id, 196 | user_name=user_name, 197 | show_size=show_size, 198 | show_date=show_date, 199 | show_file_id=show_file_id, 200 | show_hash=show_hash, 201 | show_absolute_path=show_absolute_path, 202 | show_dl_link=show_dl_link, 203 | csv=csv, 204 | only_dl_link=only_dl_link, 205 | ) 206 | -------------------------------------------------------------------------------- /alipcs_py/commands/log.py: -------------------------------------------------------------------------------- 1 | from typing import cast 2 | import os 3 | from pathlib import Path 4 | 5 | from alipcs_py.common.log import LogLevels, TLogLevel, get_logger as _get_logger 6 | from alipcs_py.commands.env import LOG_LEVEL, LOG_PATH 7 | 8 | 9 | def get_logger(name: str): 10 | _LOG_PATH = Path(os.getenv("LOG_PATH") or LOG_PATH).expanduser() 11 | _LOG_LEVEL: TLogLevel = cast( 12 | TLogLevel, 13 | os.getenv("LOG_LEVEL", LOG_LEVEL).upper(), 14 | ) 15 | 16 | assert _LOG_LEVEL in LogLevels 17 | 18 | return _get_logger(name, filename=_LOG_PATH, level=_LOG_LEVEL) 19 | -------------------------------------------------------------------------------- /alipcs_py/commands/login.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple 2 | from io import StringIO 3 | import time 4 | 5 | import qrcode 6 | 7 | 8 | _print = print 9 | 10 | from rich import print 11 | 12 | from alipcs_py.alipcs import AliOpenAuth, AliPCSError 13 | from alipcs_py.alipcs.inner import AuthInfo 14 | 15 | 16 | def openapi_qrcode_login(client_id: str, client_secret: str, client_server: str) -> AuthInfo: 17 | aliopenauth = AliOpenAuth(client_id=client_id, client_secret=client_secret, client_server=client_server) 18 | 19 | # Get qrcode info 20 | info = aliopenauth.get_qrcode_info() 21 | sid = info["sid"] 22 | 23 | qrcode_url = f"https://www.aliyundrive.com/o/oauth/authorize?sid={sid}" 24 | 25 | qr = qrcode.QRCode() 26 | qr.add_data(qrcode_url) 27 | f = StringIO() 28 | qr.print_ascii(out=f, tty=False, invert=True) 29 | f.seek(0) 30 | _print(f.read()) 31 | 32 | print(" [red b]Please scan the qrcode to login in 120 seconds[/red b]") 33 | 34 | interval = 2 * 60 # wait 2min 35 | sleep = 2 36 | 37 | auth_code = "" 38 | for _ in range(interval // sleep): 39 | time.sleep(2) 40 | 41 | info = aliopenauth.scan_status(sid) 42 | if info["status"] == "LoginSuccess": 43 | auth_code = info["authCode"] 44 | break 45 | 46 | if not auth_code: 47 | raise AliPCSError("Login failed") 48 | 49 | info = aliopenauth.get_refresh_token(auth_code) 50 | return AuthInfo.from_(info) 51 | -------------------------------------------------------------------------------- /alipcs_py/commands/play.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | from enum import Enum 3 | import os 4 | import shutil 5 | import subprocess 6 | import random 7 | import time 8 | 9 | from alipcs_py.alipcs import AliPCSApi, PcsFile 10 | from alipcs_py.alipcs.errors import AliPCSError 11 | from alipcs_py.commands.sifter import Sifter, sift 12 | from alipcs_py.commands.download import USER_AGENT 13 | from alipcs_py.commands.errors import CommandError 14 | from alipcs_py.common.file_type import MEDIA_EXTS 15 | 16 | from rich import print 17 | 18 | 19 | _print = __builtins__["print"] # type: ignore 20 | 21 | 22 | def _with_media_ext(path: str) -> bool: 23 | ext = os.path.splitext(path)[-1].lower() 24 | if ext in MEDIA_EXTS: 25 | return True 26 | else: 27 | return False 28 | 29 | 30 | class Player(Enum): 31 | mpv = "mpv" # https://mpv.io 32 | 33 | def which(self) -> Optional[str]: 34 | return shutil.which(self.value) 35 | 36 | def play( 37 | self, 38 | url: str, 39 | quiet: bool = False, 40 | player_params: List[str] = [], 41 | out_cmd: bool = False, 42 | use_local_server: bool = False, 43 | ): 44 | global DEFAULT_PLAYER 45 | if not self.which(): 46 | print(f"[yellow]No player {self.name}[/yellow], using default player: {DEFAULT_PLAYER.name}") 47 | self = DEFAULT_PLAYER 48 | if not self.which(): 49 | raise CommandError(f"No player: {self.name}") 50 | 51 | if self == Player.mpv: 52 | cmd = self._mpv_cmd( 53 | url, 54 | quiet=quiet, 55 | player_params=player_params, 56 | use_local_server=use_local_server, 57 | ) 58 | else: 59 | raise RuntimeError 60 | 61 | # Print out command 62 | if out_cmd: 63 | _print(" ".join((repr(c) for c in cmd))) 64 | return 65 | 66 | returncode = self.spawn(cmd) 67 | if returncode != 0: 68 | print(f"[italic]{self.value}[/italic] fails. return code: [red]{returncode}[/red]") 69 | 70 | def spawn(self, cmd: List[str], quiet: bool = False): 71 | child = subprocess.run( 72 | cmd, 73 | stdout=subprocess.DEVNULL if quiet else None, 74 | ) 75 | return child.returncode 76 | 77 | def _mpv_cmd( 78 | self, 79 | url: str, 80 | quiet: bool = False, 81 | player_params: List[str] = [], 82 | use_local_server: bool = False, 83 | ): 84 | if use_local_server: 85 | cmd = [self.which(), url, *player_params] 86 | else: 87 | cmd = [ 88 | self.which(), 89 | url, 90 | "--no-ytdl", 91 | "--http-header-fields=" 92 | f"User-Agent: {USER_AGENT.replace(',', '')},Referer: https://www.aliyundrive.com/,Connection: Keep-Alive", 93 | *player_params, 94 | ] 95 | if quiet: 96 | cmd.append("--really-quiet") 97 | return cmd 98 | 99 | 100 | DEFAULT_PLAYER = Player.mpv 101 | 102 | 103 | def play_file( 104 | api: AliPCSApi, 105 | pcs_file: PcsFile, 106 | share_id: str = None, 107 | player: Player = DEFAULT_PLAYER, 108 | player_params: List[str] = [], 109 | quiet: bool = False, 110 | ignore_ext: bool = False, 111 | out_cmd: bool = False, 112 | local_server: str = "", 113 | ): 114 | if not ignore_ext and not _with_media_ext(pcs_file.name): 115 | return 116 | 117 | print(f"[italic blue]Play[/italic blue]: {pcs_file.path or pcs_file.name}") 118 | 119 | use_local_server = bool(local_server) 120 | 121 | if share_id: 122 | shared_pcs_file_id = pcs_file.file_id 123 | shared_pcs_filename = pcs_file.name 124 | use_local_server = False 125 | remote_temp_dir = "/__alipcs_py_temp__" 126 | pcs_temp_dir = api.path(remote_temp_dir) or api.makedir_path(remote_temp_dir)[0] 127 | pf = api.transfer_shared_files([shared_pcs_file_id], pcs_temp_dir.file_id, share_id)[0] 128 | target_file_id = pf.file_id 129 | while True: 130 | pfs = api.search_all(shared_pcs_filename) 131 | for pf_ in pfs: 132 | if pf_.file_id == target_file_id: 133 | pcs_file = pf_ 134 | break 135 | else: 136 | time.sleep(2) 137 | continue 138 | 139 | break 140 | 141 | download_url: Optional[str] = None 142 | if use_local_server: 143 | download_url = f"{local_server}/__fileid__/?file_id={pcs_file.file_id}" 144 | print("url:", download_url) 145 | else: 146 | if not pcs_file or pcs_file.is_dir: 147 | return 148 | 149 | while True: 150 | try: 151 | pcs_file = api.update_download_url(pcs_file) 152 | break 153 | except AliPCSError as err: 154 | if err.error_code == "TooManyRequests": 155 | time.sleep(random.randint(1, 2)) 156 | continue 157 | raise err 158 | 159 | download_url = pcs_file.download_url 160 | 161 | if download_url: 162 | player.play( 163 | download_url, 164 | quiet=quiet, 165 | player_params=player_params, 166 | out_cmd=out_cmd, 167 | use_local_server=use_local_server, 168 | ) 169 | 170 | if share_id: 171 | api.remove(pcs_file.file_id) 172 | 173 | 174 | def play_dir( 175 | api: AliPCSApi, 176 | pcs_file: PcsFile, 177 | share_id: str = None, 178 | sifters: List[Sifter] = [], 179 | recursive: bool = False, 180 | from_index: int = 0, 181 | player: Player = DEFAULT_PLAYER, 182 | player_params: List[str] = [], 183 | quiet: bool = False, 184 | shuffle: bool = False, 185 | ignore_ext: bool = False, 186 | out_cmd: bool = False, 187 | local_server: str = "", 188 | ): 189 | sub_pcs_files = list(api.list_iter(pcs_file.file_id, share_id=share_id)) 190 | sub_pcs_files = sift(sub_pcs_files, sifters, recursive=recursive) 191 | 192 | if shuffle: 193 | rg = random.Random(time.time()) 194 | rg.shuffle(sub_pcs_files) 195 | 196 | for pf in sub_pcs_files[from_index:]: 197 | if pf.is_file: 198 | play_file( 199 | api, 200 | pf, 201 | share_id=share_id, 202 | player=player, 203 | player_params=player_params, 204 | quiet=quiet, 205 | ignore_ext=ignore_ext, 206 | out_cmd=out_cmd, 207 | local_server=local_server, 208 | ) 209 | else: # is_dir 210 | if recursive: 211 | play_dir( 212 | api, 213 | pf, 214 | share_id=share_id, 215 | sifters=sifters, 216 | recursive=recursive, 217 | from_index=from_index, 218 | player=player, 219 | player_params=player_params, 220 | quiet=quiet, 221 | shuffle=shuffle, 222 | ignore_ext=ignore_ext, 223 | out_cmd=out_cmd, 224 | local_server=local_server, 225 | ) 226 | 227 | 228 | def play( 229 | api: AliPCSApi, 230 | remotepaths: List[str], 231 | file_ids: List[str], 232 | share_id: str = None, 233 | sifters: List[Sifter] = [], 234 | recursive: bool = False, 235 | from_index: int = 0, 236 | player: Player = DEFAULT_PLAYER, 237 | player_params: List[str] = [], 238 | quiet: bool = False, 239 | shuffle: bool = False, 240 | ignore_ext: bool = False, 241 | out_cmd: bool = False, 242 | local_server: str = "", 243 | ): 244 | """Play media file in `remotepaths` 245 | 246 | Args: 247 | `from_index` (int): The start index of playing entries from EACH remote directory 248 | """ 249 | 250 | if shuffle: 251 | rg = random.Random(time.time()) 252 | rg.shuffle(remotepaths) 253 | 254 | for rp in remotepaths: 255 | rpf = api.path(rp, share_id=share_id) 256 | if not rpf: 257 | print(f"[yellow]WARNING[/yellow]: `{rp}` does not exist.") 258 | continue 259 | 260 | if rpf.is_file: 261 | play_file( 262 | api, 263 | rpf, 264 | share_id=share_id, 265 | player=player, 266 | player_params=player_params, 267 | quiet=quiet, 268 | ignore_ext=ignore_ext, 269 | out_cmd=out_cmd, 270 | local_server=local_server, 271 | ) 272 | else: 273 | play_dir( 274 | api, 275 | rpf, 276 | share_id=share_id, 277 | sifters=sifters, 278 | recursive=recursive, 279 | from_index=from_index, 280 | player=player, 281 | player_params=player_params, 282 | quiet=quiet, 283 | shuffle=shuffle, 284 | ignore_ext=ignore_ext, 285 | out_cmd=out_cmd, 286 | local_server=local_server, 287 | ) 288 | 289 | for file_id in file_ids: 290 | rpf = api.get_file(file_id=file_id, share_id=share_id) 291 | if rpf is None: 292 | print(f"[yellow]WARNING[/yellow]: file_id `{file_id}` does not exist.") 293 | continue 294 | 295 | if rpf.is_file: 296 | play_file( 297 | api, 298 | rpf, 299 | share_id=share_id, 300 | player=player, 301 | player_params=player_params, 302 | quiet=quiet, 303 | ignore_ext=ignore_ext, 304 | out_cmd=out_cmd, 305 | local_server=local_server, 306 | ) 307 | else: 308 | play_dir( 309 | api, 310 | rpf, 311 | share_id=share_id, 312 | sifters=sifters, 313 | recursive=recursive, 314 | from_index=from_index, 315 | player=player, 316 | player_params=player_params, 317 | quiet=quiet, 318 | shuffle=shuffle, 319 | ignore_ext=ignore_ext, 320 | out_cmd=out_cmd, 321 | local_server=local_server, 322 | ) 323 | -------------------------------------------------------------------------------- /alipcs_py/commands/search.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from alipcs_py.alipcs import AliPCSApi 4 | from alipcs_py.commands.display import display_files 5 | from alipcs_py.commands.sifter import Sifter, IncludeSifter 6 | 7 | 8 | def search( 9 | api: AliPCSApi, 10 | keyword: str, 11 | sifters: List[Sifter] = None, 12 | highlight: bool = False, 13 | show_size: bool = False, 14 | show_date: bool = False, 15 | show_file_id: bool = False, 16 | show_hash: bool = False, 17 | csv: bool = False, 18 | ): 19 | pcs_files = api.search_all(keyword) 20 | 21 | sifters = [*(sifters or []), IncludeSifter(keyword)] 22 | display_files( 23 | pcs_files, 24 | sifters=sifters, 25 | highlight=highlight, 26 | show_size=show_size, 27 | show_date=show_date, 28 | show_file_id=show_file_id, 29 | show_hash=show_hash, 30 | show_absolute_path=True, 31 | csv=csv, 32 | ) 33 | -------------------------------------------------------------------------------- /alipcs_py/commands/server.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | if sys.version_info < (3, 9): 4 | from typing_extensions import Annotated 5 | else: 6 | from typing import Annotated 7 | 8 | from typing import Optional, Dict, Any 9 | 10 | from pathlib import Path 11 | import os 12 | import mimetypes 13 | import secrets 14 | import copy 15 | from urllib.parse import quote 16 | 17 | import uvicorn 18 | 19 | from alipcs_py.alipcs import AliPCSApi 20 | from alipcs_py.common.io import RangeRequestIO 21 | from alipcs_py.common.constant import CPU_NUM 22 | from alipcs_py.common.path import join_path 23 | from alipcs_py.utils import format_date 24 | 25 | from fastapi import Depends, FastAPI, Request, status, HTTPException 26 | from fastapi.security import HTTPBasic, HTTPBasicCredentials 27 | from fastapi.responses import HTMLResponse, StreamingResponse, Response 28 | from jinja2 import Template 29 | 30 | from rich import print 31 | 32 | mimetypes.init() 33 | 34 | 35 | _api: Optional[AliPCSApi] = None 36 | _root_dir: str = "/" 37 | _encrypt_password: bytes = b"" 38 | 39 | # For Auth 40 | _username: Optional[str] = None 41 | _password: Optional[str] = None 42 | 43 | # This template is from https://github.com/rclone/rclone/blob/master/cmd/serve/httplib/serve/data/templates/index.html 44 | _html_tempt: Template = Template((Path(__file__).parent / "index.html").open(encoding="utf-8").read()) 45 | 46 | 47 | def fake_io(io: RangeRequestIO, start: int = 0, end: int = -1): 48 | for b in io._auto_decrypt_request.read((start, end)): 49 | yield b 50 | 51 | 52 | def get_file(file_id: str, remotepath: str, _range: Optional[str]): 53 | global _api 54 | assert _api 55 | 56 | try: 57 | fs = _api.file_stream(file_id, encrypt_password=_encrypt_password) 58 | except Exception as err: 59 | raise HTTPException(status_code=500, detail=f"Error: {err}, remotepath: {remotepath}") 60 | 61 | if not fs: 62 | raise HTTPException(status_code=404, detail=f"No download link: {remotepath}") 63 | 64 | length = len(fs) 65 | 66 | headers: Dict[str, str] = { 67 | "accept-ranges": "bytes", 68 | "connection": "Keep-Alive", 69 | "access-control-allow-origin": "*", 70 | } 71 | 72 | ext = os.path.splitext(remotepath)[-1] 73 | content_type = mimetypes.types_map.get(ext) 74 | 75 | if content_type: 76 | headers["content-type"] = content_type 77 | 78 | if _range and fs.seekable(): 79 | assert _range.startswith("bytes=") 80 | 81 | status_code = 206 82 | start, end = _range[6:].split("-") 83 | _s, _e = int(start or 0), int(end or length - 1) + 1 84 | _iter_io = fake_io(fs, _s, _e) 85 | headers["content-range"] = f"bytes {_s}-{_e-1}/{length}" 86 | headers["content-length"] = str(_e - _s) 87 | else: 88 | status_code = 200 89 | _iter_io = fake_io(fs) 90 | headers["content-length"] = str(length) 91 | return StreamingResponse(_iter_io, status_code=status_code, headers=headers) 92 | 93 | 94 | async def handle_request( 95 | request: Request, 96 | remotepath: str = "", 97 | order: str = "asc", # desc , asc 98 | sort: str = "name", # name, time, size 99 | file_id: str = "", 100 | ) -> Response: 101 | desc = order == "desc" 102 | name = sort == "name" 103 | time = sort == "time" 104 | size = sort == "size" 105 | 106 | global _root_dir 107 | global _api 108 | assert _api 109 | 110 | _range = request.headers.get("range") 111 | 112 | if file_id: 113 | return get_file(file_id, remotepath, _range) 114 | 115 | remotepath = remotepath.strip("/") 116 | 117 | _rp = join_path(_root_dir, remotepath) 118 | 119 | # Anti path traversal attack 120 | if not _rp.startswith(_root_dir): 121 | raise HTTPException(status_code=404, detail="Item not found") 122 | 123 | rpf = _api.path(_rp) 124 | if not rpf: 125 | raise HTTPException(status_code=404, detail="Item not found") 126 | 127 | is_dir = rpf.is_dir 128 | if is_dir: 129 | chunks = ["/"] + (remotepath.split("/") if remotepath != "" else []) 130 | navigation = [(i - 1, "../" * (len(chunks) - i), name) for i, name in enumerate(chunks, 1)] 131 | entries = [] 132 | pcs_files = _api.list_iter(rpf.file_id, desc=desc, name=name, time=time, size=size) 133 | for pf in pcs_files: 134 | p = rpf.path / Path(pf.path) 135 | entries.append( 136 | ( 137 | pf.file_id, 138 | pf.is_dir, 139 | p.name, 140 | quote(p.name), 141 | pf.size, 142 | format_date(pf.updated_at or 0), 143 | ) 144 | ) 145 | cn = _html_tempt.render(root_dir=remotepath, navigation=navigation, entries=entries) 146 | return HTMLResponse(cn) 147 | else: 148 | return get_file(rpf.file_id, remotepath, _range) 149 | 150 | 151 | _security = HTTPBasic() 152 | 153 | 154 | def to_auth(credentials: HTTPBasicCredentials = Depends(_security)) -> str: 155 | correct_username = secrets.compare_digest(credentials.username, _username or "") 156 | correct_password = secrets.compare_digest(credentials.password, _password or "") 157 | if not (correct_username and correct_password): 158 | raise HTTPException( 159 | status_code=status.HTTP_401_UNAUTHORIZED, 160 | detail="Incorrect email or password", 161 | headers={"WWW-Authenticate": "Basic"}, 162 | ) 163 | return credentials.username 164 | 165 | 166 | app = FastAPI() 167 | 168 | 169 | def make_auth_http_server(path: str = ""): 170 | @app.get("%s/{remotepath:path}" % path) 171 | async def auth_http_server( 172 | username: Annotated[str, Depends(to_auth)], response: Annotated[Any, Depends(handle_request)] 173 | ): 174 | if username: 175 | return response 176 | 177 | @app.get("/__fileid__/") 178 | async def auth_file_id( 179 | username: Annotated[str, Depends(to_auth)], response: Annotated[Any, Depends(handle_request)] 180 | ): 181 | if username: 182 | return response 183 | 184 | 185 | def make_http_server(path: str = ""): 186 | @app.get("%s/{remotepath:path}" % path) 187 | async def http_server(response: Annotated[Any, Depends(handle_request)]): 188 | return response 189 | 190 | @app.get("/__fileid__/") 191 | async def file_id(response: Annotated[Any, Depends(handle_request)]): 192 | return response 193 | 194 | 195 | def start_server( 196 | api: AliPCSApi, 197 | root_dir: str = "/", 198 | path: str = "", 199 | host: str = "localhost", 200 | port: int = 8000, 201 | workers: int = CPU_NUM, 202 | encrypt_password: bytes = b"", 203 | log_level: str = "info", 204 | username: Optional[str] = None, 205 | password: Optional[str] = None, 206 | ): 207 | """Create a http server on remote `root_dir`""" 208 | 209 | global _encrypt_password 210 | _encrypt_password = encrypt_password 211 | 212 | global _root_dir 213 | _root_dir = root_dir 214 | 215 | global _api 216 | if not _api: 217 | _api = api 218 | 219 | global _username 220 | if not _username: 221 | _username = username 222 | 223 | global _password 224 | if not _password: 225 | _password = password 226 | 227 | if path == "/" or not path: 228 | path = "" 229 | else: 230 | path = "/" + path.strip("/") 231 | 232 | print(f"[yellow]Server running on[/yellow] [b]http://{host}:{port}{path}/[/b]") 233 | 234 | if _username and _password: 235 | make_auth_http_server(path) 236 | else: 237 | make_http_server(path) 238 | 239 | log_config = copy.deepcopy(uvicorn.config.LOGGING_CONFIG) 240 | log_config["formatters"]["access"]["fmt"] = ( 241 | '%(asctime)s - %(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s - %(msecs)d ms' 242 | ) 243 | uvicorn.run( 244 | "alipcs_py.commands.server:app", 245 | host=host, 246 | port=port, 247 | log_level=log_level, 248 | log_config=log_config, 249 | workers=1, 250 | ) 251 | -------------------------------------------------------------------------------- /alipcs_py/commands/share.py: -------------------------------------------------------------------------------- 1 | from typing import List, Dict, Set, Union 2 | import re 3 | 4 | from alipcs_py.alipcs import AliPCSApi, PcsFile 5 | from alipcs_py.commands.list_files import list_files 6 | from alipcs_py.commands.sifter import Sifter 7 | from alipcs_py.commands.display import ( 8 | display_invalid_shared_link_infos, 9 | display_shared_files, 10 | display_shared_link_infos, 11 | display_shared_links, 12 | ) 13 | from alipcs_py.commands.download import ( 14 | DEFAULT_CHUNK_SIZE, 15 | DEFAULT_CONCURRENCY, 16 | download, 17 | Downloader, 18 | DEFAULT_DOWNLOADER, 19 | ) 20 | from alipcs_py.commands.play import play, Player, DEFAULT_PLAYER 21 | 22 | import requests # type: ignore 23 | 24 | from rich import print 25 | 26 | 27 | def share_files(api: AliPCSApi, *remotepaths: str, password: str = "", period: int = 0): 28 | pcs_files = api.paths(*remotepaths) 29 | assert all(pcs_files) 30 | 31 | file_ids = [pf.file_id for pf in pcs_files if pf] 32 | 33 | shared_link = api.share(*file_ids, password=password, period=period) 34 | display_shared_links(shared_link) 35 | 36 | 37 | def list_shared(api: AliPCSApi, show_all=True): 38 | pcs_shared_links = api.list_shared_all() 39 | if not pcs_shared_links: 40 | return 41 | 42 | display_shared_links(*pcs_shared_links) 43 | 44 | 45 | def cancel_shared(api: AliPCSApi, *share_ids: str): 46 | api.cancel_shared(*share_ids) 47 | 48 | 49 | def _redirect(url: str) -> str: 50 | if not url or "alywp.net" not in url: 51 | return url 52 | resp = requests.get(url, allow_redirects=False) 53 | return resp.headers.get("Location") or "" 54 | 55 | 56 | def _extract_share_id(share_url: str) -> str: 57 | m = re.search(r"/s/(\w+)", share_url) 58 | return m.group(1) if m else "" 59 | 60 | 61 | def _extract_file_id(share_url: str) -> str: 62 | m = re.search(r"/folder/(\w+)", share_url) 63 | return m.group(1) if m else "" 64 | 65 | 66 | def extract_shared_info_from_url(share_url: str): 67 | share_url = _redirect(share_url) 68 | share_id = _extract_share_id(share_url) 69 | file_id = _extract_file_id(share_url) 70 | 71 | return share_id, file_id 72 | 73 | 74 | def save_shared_files_to_remotedir( 75 | api: AliPCSApi, shared_pcs_files: List[PcsFile], share_id: str, remote_pcs_file: PcsFile 76 | ): 77 | """Save shared files to the remote directory `remotedir`. Ignore existed files.""" 78 | 79 | wanted_pcs_files = [] 80 | remain_pcs_files = [] 81 | for sp in shared_pcs_files: 82 | if not remotepath_exists(api, sp.name, remote_pcs_file.file_id): 83 | wanted_pcs_files.append(sp) 84 | else: 85 | remain_pcs_files.append(sp) 86 | 87 | if wanted_pcs_files: 88 | api.transfer_shared_files( 89 | [sp.file_id for sp in wanted_pcs_files], 90 | remote_pcs_file.file_id, 91 | share_id, 92 | auto_rename=False, 93 | ) 94 | for sp in wanted_pcs_files: 95 | print(f"save: `{sp.path}` to `{remote_pcs_file.path}`") 96 | 97 | for sp in remain_pcs_files: 98 | if sp.is_file: 99 | print(f"[yellow]WARNING[/]: `{sp.path}` has be in `{remote_pcs_file.path}`") 100 | else: 101 | remote_dir_pcs_file = api.get_file(remotepath=f"{remote_pcs_file.path}/{sp.name}") 102 | if remote_dir_pcs_file is None: 103 | remote_dir_pcs_file = api.makedir(remote_pcs_file.file_id, sp.name) 104 | 105 | sub_files = list(api.list_iter(sp.file_id, share_id=share_id, recursive=False, include_dir=True)) 106 | save_shared_files_to_remotedir(api, sub_files, share_id, remote_dir_pcs_file) 107 | 108 | 109 | def save_shared( 110 | api: AliPCSApi, 111 | remotedir: str, 112 | share_id: str = "", 113 | share_url: str = "", 114 | file_ids: List[str] = [], 115 | password: str = "", 116 | ): 117 | """Save shared files of the shared url to the remote directory `remotedir`.""" 118 | 119 | assert remotedir.startswith("/"), "`remotedir` must be an absolute path" 120 | 121 | assert int(bool(share_id)) ^ int(bool(share_url)), "`share_id` and `share_url` only can be given one" 122 | 123 | # 1. Get shared info 124 | if share_url: 125 | share_id, file_id = extract_shared_info_from_url(share_url) 126 | if file_id: 127 | file_ids.append(file_id) 128 | file_ids = list(set(file_ids)) 129 | 130 | # Default save the sub files in root directory 131 | if not file_ids: 132 | file_ids = ["root"] 133 | assert share_id 134 | 135 | # 2. Get shared link token 136 | api.get_share_token(share_id, share_password=password) 137 | 138 | # 3. Make sure the remote directory exists 139 | remote_pcs_file = api.get_file(remotepath=remotedir) 140 | if remote_pcs_file is not None: 141 | if not remote_pcs_file.is_dir: 142 | print(f"[yellow]WARNING[/]: `{remotedir}` is not a directory") 143 | return 144 | else: 145 | remote_pcs_file = api.makedir_path(remotedir)[0] 146 | 147 | # 4. Get shared files and save them to the remote directory 148 | shared_pcs_files = [] 149 | for file_id in file_ids: 150 | shared_pcs_file = api.get_file(file_id=file_id, share_id=share_id) 151 | if shared_pcs_file is not None: 152 | if shared_pcs_file.is_root(): 153 | # No need to save root directory, save its sub files/directories 154 | shared_pcs_files.extend(api.list_iter(file_id, share_id=share_id, recursive=False, include_dir=True)) 155 | else: 156 | shared_pcs_files.append(shared_pcs_file) 157 | 158 | save_shared_files_to_remotedir(api, shared_pcs_files, share_id, remote_pcs_file) 159 | 160 | 161 | def list_shared_files( 162 | api: AliPCSApi, 163 | *remotepaths: str, 164 | share_id: str = "", 165 | share_url: str = "", 166 | password: str = "", 167 | file_ids: List[str] = [], 168 | desc: bool = False, 169 | name: bool = False, 170 | time: bool = False, 171 | size: bool = False, 172 | all: bool = True, 173 | limit: int = 200, 174 | recursive: bool = False, 175 | sifters: List[Sifter] = [], 176 | highlight: bool = False, 177 | show_size: bool = False, 178 | show_date: bool = False, 179 | show_file_id: bool = False, 180 | show_hash: bool = False, 181 | show_absolute_path: bool = False, 182 | csv: bool = False, 183 | ): 184 | """List shared files in the shared url or shared id.""" 185 | 186 | assert int(bool(share_id)) ^ int(bool(share_url)), "`share_id` and `share_url` only can be given one" 187 | 188 | if share_url: 189 | share_id, file_id = extract_shared_info_from_url(share_url) 190 | if file_id: 191 | file_ids.append(file_id) 192 | file_ids = list(set(file_ids)) 193 | assert share_id 194 | 195 | if not remotepaths and not file_ids: 196 | return 197 | 198 | api.get_share_token(share_id, share_password=password) 199 | 200 | list_files( 201 | api, 202 | *remotepaths, 203 | file_ids=file_ids, 204 | share_id=share_id, 205 | desc=desc, 206 | name=name, 207 | time=time, 208 | size=size, 209 | all=all, 210 | limit=limit, 211 | recursive=recursive, 212 | sifters=sifters, 213 | highlight=highlight, 214 | show_size=show_size, 215 | show_date=show_date, 216 | show_file_id=show_file_id, 217 | show_hash=show_hash, 218 | show_absolute_path=show_absolute_path, 219 | csv=csv, 220 | ) 221 | 222 | 223 | def remotepath_exists(api: AliPCSApi, name: str, remote_file_id: str, _cache: Dict[str, Set[str]] = {}) -> bool: 224 | """Check if the `name` exists in the remote directory `remote_file_id`.""" 225 | 226 | names = _cache.get(remote_file_id) 227 | if not names: 228 | names = set(sp.name for sp in api.list_iter(remote_file_id)) 229 | _cache[remote_file_id] = names 230 | return name in names 231 | 232 | 233 | def download_shared( 234 | api: AliPCSApi, 235 | remotepaths: List[Union[str, PcsFile]], 236 | file_ids: List[str], 237 | localdir: str, 238 | share_id: str = "", 239 | share_url: str = "", 240 | password: str = "", 241 | sifters: List[Sifter] = [], 242 | recursive: bool = False, 243 | from_index: int = 0, 244 | downloader: Downloader = DEFAULT_DOWNLOADER, 245 | concurrency: int = DEFAULT_CONCURRENCY, 246 | chunk_size: Union[str, int] = DEFAULT_CHUNK_SIZE, 247 | show_progress: bool = False, 248 | max_retries: int = 2, 249 | out_cmd: bool = False, 250 | encrypt_password: bytes = b"", 251 | ): 252 | """Download shared files in the shared url or shared id.""" 253 | 254 | assert int(bool(share_id)) ^ int(bool(share_url)), "`share_id` and `share_url` only can be given one" 255 | 256 | if share_url: 257 | share_id, file_id = extract_shared_info_from_url(share_url) 258 | if file_id: 259 | file_ids.append(file_id) 260 | file_ids = list(set(file_ids)) 261 | assert share_id 262 | 263 | if not remotepaths and not file_ids: 264 | return 265 | 266 | api.get_share_token(share_id, share_password=password) 267 | 268 | download( 269 | api, 270 | remotepaths, 271 | file_ids=file_ids, 272 | localdir=localdir, 273 | share_id=share_id, 274 | sifters=sifters, 275 | recursive=recursive, 276 | from_index=from_index, 277 | downloader=downloader, 278 | concurrency=concurrency, 279 | chunk_size=chunk_size, 280 | show_progress=show_progress, 281 | max_retries=max_retries, 282 | out_cmd=out_cmd, 283 | encrypt_password=encrypt_password, 284 | ) 285 | 286 | 287 | def play_shared( 288 | api: AliPCSApi, 289 | remotepaths: List[str], 290 | file_ids: List[str], 291 | share_id: str, 292 | share_url: str = "", 293 | password: str = "", 294 | sifters: List[Sifter] = [], 295 | recursive: bool = False, 296 | from_index: int = 0, 297 | player: Player = DEFAULT_PLAYER, 298 | player_params: List[str] = [], 299 | quiet: bool = False, 300 | shuffle: bool = False, 301 | ignore_ext: bool = False, 302 | out_cmd: bool = False, 303 | local_server: str = "", 304 | ): 305 | """Play shared files in the shared url or shared id.""" 306 | 307 | assert int(bool(share_id)) ^ int(bool(share_url)), "`share_id` and `share_url` only can be given one" 308 | 309 | if share_url: 310 | share_id, file_id = extract_shared_info_from_url(share_url) 311 | if file_id: 312 | file_ids.append(file_id) 313 | file_ids = list(set(file_ids)) 314 | assert share_id 315 | 316 | if not remotepaths and not file_ids: 317 | return 318 | 319 | assert share_id 320 | 321 | api.get_share_token(share_id, share_password=password) 322 | 323 | play( 324 | api, 325 | remotepaths, 326 | file_ids=file_ids, 327 | share_id=share_id, 328 | sifters=sifters, 329 | recursive=recursive, 330 | from_index=from_index, 331 | player=player, 332 | player_params=player_params, 333 | quiet=quiet, 334 | shuffle=shuffle, 335 | ignore_ext=ignore_ext, 336 | out_cmd=out_cmd, 337 | local_server=local_server, 338 | ) 339 | 340 | 341 | def get_share_token(api: AliPCSApi, share_id: str, share_url: str = "", password: str = "") -> str: 342 | """Initiate a shared link (or id) and get the share token.""" 343 | 344 | assert int(bool(share_id)) ^ int(bool(share_url)), "`share_id` and `share_url` only can be given one" 345 | 346 | share_id, _ = extract_shared_info_from_url(share_url) 347 | assert share_id 348 | 349 | return api.get_share_token(share_id, share_password=password) 350 | -------------------------------------------------------------------------------- /alipcs_py/commands/sifter.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar, Optional, List, Union, Pattern 2 | from abc import ABC 3 | import re 4 | 5 | from alipcs_py.alipcs import PcsFile 6 | 7 | 8 | class Sifter(ABC): 9 | def pattern(self) -> Union[Pattern, str, None]: 10 | """ 11 | The regex pattern used to the sifter 12 | 13 | If it returns '', then the sifter will match all inputs 14 | """ 15 | return None 16 | 17 | def include(self) -> bool: 18 | """Include the sifted result or exclude""" 19 | return True 20 | 21 | def sift(self, obj: Union[PcsFile, str]) -> bool: 22 | """ 23 | True: to include 24 | False: to exclude 25 | """ 26 | 27 | include = self.include() 28 | pat = self.pattern() 29 | 30 | if not pat: 31 | return include 32 | 33 | if isinstance(obj, PcsFile): 34 | buf = obj.path 35 | else: 36 | buf = obj 37 | 38 | if isinstance(pat, Pattern): 39 | if pat.search(buf): 40 | return include 41 | else: 42 | return not include 43 | else: # str 44 | if pat in buf: 45 | return include 46 | else: 47 | return not include 48 | 49 | def __call__(self, obj: Union[PcsFile, str]) -> bool: 50 | return self.sift(obj) 51 | 52 | 53 | class IncludeSifter(Sifter): 54 | def __init__(self, needle: Optional[str], regex: bool = False): 55 | _pattern: Union[Pattern, str, None] = None 56 | self._pattern = _pattern 57 | if needle: 58 | if regex: 59 | self._pattern = re.compile(needle) 60 | else: 61 | self._pattern = needle 62 | 63 | def pattern(self): 64 | return self._pattern 65 | 66 | 67 | class ExcludeSifter(IncludeSifter): 68 | def __init__(self, needle: Optional[str], regex: bool = False): 69 | super().__init__(needle, regex=regex) 70 | 71 | def include(self): 72 | return False 73 | 74 | 75 | class IsFileSifter(Sifter): 76 | def sift(self, obj: Union[PcsFile, str]) -> bool: 77 | assert isinstance(obj, PcsFile) 78 | return obj.is_file or not obj.is_dir 79 | 80 | 81 | class IsDirSifter(Sifter): 82 | def sift(self, obj: Union[PcsFile, str]) -> bool: 83 | assert isinstance(obj, PcsFile) 84 | return obj.is_dir or not obj.is_file 85 | 86 | 87 | T = TypeVar("T", PcsFile, str) 88 | 89 | 90 | def sift(objs: List[T], sifters: List[Sifter], recursive: bool = False) -> List[T]: 91 | if sifters: 92 | obj_dirs: List[T] 93 | if recursive: 94 | # If it is recursive, we ignore to sift dirs. 95 | obj_dirs = [o for o in objs if isinstance(o, PcsFile) and o.is_dir] 96 | objs = [o for o in objs if not isinstance(o, PcsFile) or isinstance(o, PcsFile) and o.is_file] 97 | else: 98 | obj_dirs = [] 99 | 100 | objs = obj_dirs + [obj for obj in objs if all([sifter(obj) for sifter in sifters])] 101 | return objs 102 | -------------------------------------------------------------------------------- /alipcs_py/commands/sync.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple 2 | from pathlib import Path 3 | import os 4 | 5 | from alipcs_py.alipcs import AliPCSApi, PcsFile, FromTo 6 | from alipcs_py.common.path import PathType, join_path 7 | from alipcs_py.common.crypto import calc_sha1 8 | from alipcs_py.common.constant import CPU_NUM 9 | from alipcs_py.common.io import EncryptType 10 | from alipcs_py.commands.upload import upload, DEFAULT_SLICE_SIZE 11 | from alipcs_py.commands.log import get_logger 12 | 13 | from rich import print 14 | 15 | logger = get_logger(__name__) 16 | 17 | 18 | def sync( 19 | api: AliPCSApi, 20 | localdir: str, 21 | remotedir: str, 22 | encrypt_password: bytes = b"", 23 | encrypt_type: EncryptType = EncryptType.No, 24 | max_workers: int = CPU_NUM, 25 | slice_size: int = DEFAULT_SLICE_SIZE, 26 | show_progress: bool = True, 27 | ): 28 | """Sync local directory to remote directory.""" 29 | 30 | localdir = Path(localdir).as_posix() 31 | remotedir = Path(remotedir).as_posix() 32 | 33 | remote_pcs_file = api.makedir_path(remotedir)[0] 34 | assert remote_pcs_file and remote_pcs_file.is_dir, "remotedir must be a directory" 35 | 36 | sub_path_to_its_pcs_file = { 37 | pcs_file.path: pcs_file 38 | for pcs_file in api.list_iter(remote_pcs_file.file_id, recursive=True, include_dir=False) 39 | } 40 | 41 | needed_uploads: List[FromTo[PathType, str]] = [] 42 | needed_checks: List[Tuple[Path, PcsFile]] = [] 43 | all_localpaths = set() 44 | for root, _, filenames in os.walk(localdir): 45 | for filename in filenames: 46 | localpath = Path(root[len(localdir) + 1 :]) / filename 47 | localpath_posix = localpath.as_posix() 48 | all_localpaths.add(localpath_posix) 49 | 50 | if localpath_posix not in sub_path_to_its_pcs_file: 51 | needed_uploads.append((root / localpath, join_path(remotedir, localpath))) 52 | else: 53 | needed_checks.append((root / localpath, sub_path_to_its_pcs_file[localpath_posix])) 54 | 55 | for lp, pf in needed_checks: 56 | sha1 = calc_sha1(lp.open("rb")) 57 | 58 | if pf.rapid_upload_info and sha1.lower() != pf.rapid_upload_info.content_hash.lower(): 59 | needed_uploads.append((lp, pf.path)) 60 | 61 | need_deleted_file_ids = [] 62 | for rp in sub_path_to_its_pcs_file.keys(): 63 | if rp not in all_localpaths: 64 | need_deleted_file_ids.append(sub_path_to_its_pcs_file[rp].file_id) 65 | 66 | logger.debug( 67 | "`sync`: all localpaths: %s, localpaths needed to upload: %s, remotepaths needed to delete: %s", 68 | len(all_localpaths), 69 | len(needed_uploads), 70 | len(need_deleted_file_ids), 71 | ) 72 | 73 | upload( 74 | api, 75 | needed_uploads, 76 | check_name_mode="overwrite", 77 | encrypt_password=encrypt_password, 78 | encrypt_type=encrypt_type, 79 | max_workers=max_workers, 80 | slice_size=slice_size, 81 | show_progress=show_progress, 82 | ) 83 | 84 | if need_deleted_file_ids: 85 | api.remove(*need_deleted_file_ids) 86 | print(f"Delete: [i]{len(need_deleted_file_ids)}[/i] remote paths") 87 | -------------------------------------------------------------------------------- /alipcs_py/commands/user.py: -------------------------------------------------------------------------------- 1 | from alipcs_py.alipcs import AliPCSApi 2 | from alipcs_py.commands.display import display_user_info 3 | 4 | 5 | def show_user_info(api: AliPCSApi): 6 | user_info = api.user_info() 7 | display_user_info(user_info) 8 | -------------------------------------------------------------------------------- /alipcs_py/common/cache.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Any 2 | from collections import UserDict 3 | from functools import wraps, _make_key 4 | import time 5 | 6 | 7 | class TimeoutCache(UserDict): 8 | def __init__(self, timeout: int): 9 | super().__init__() 10 | self._timeout = timeout 11 | self._last_used: Dict[Any, float] = {} 12 | 13 | def __getitem__(self, key): 14 | val = super().__getitem__(key) 15 | 16 | now = time.time() 17 | if now - self._last_used[key] > self._timeout: 18 | self.clear_timeout() 19 | raise KeyError(key) 20 | 21 | return val 22 | 23 | def __setitem__(self, key, value): 24 | super().__setitem__(key, value) 25 | now = time.time() 26 | self._last_used[key] = now 27 | 28 | def clear_timeout(self): 29 | now = time.time() 30 | for key in list(self.keys()): 31 | if now - self._last_used[key] > self._timeout: 32 | self.__delitem__(key) 33 | del self._last_used[key] 34 | 35 | 36 | def timeout_cache(timeout: int): 37 | def cached(func): 38 | _cache = TimeoutCache(timeout) 39 | 40 | @wraps(func) 41 | def wrap(*args, **kwargs): 42 | key = _make_key(args, kwargs, False) 43 | val = _cache.get(key) 44 | if val: 45 | return val 46 | val = func(*args, **kwargs) 47 | _cache[key] = val 48 | return val 49 | 50 | return wrap 51 | 52 | return cached 53 | -------------------------------------------------------------------------------- /alipcs_py/common/concurrent.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Callable, Any 2 | from functools import wraps 3 | from threading import Semaphore 4 | 5 | 6 | def sure_release(semaphore: Semaphore, func, *args, **kwargs): 7 | """Release semaphore after func is done.""" 8 | 9 | try: 10 | return func(*args, **kwargs) 11 | finally: 12 | semaphore.release() 13 | 14 | 15 | def retry(times: int, except_callback: Optional[Callable[[Exception, int], Any]] = None): 16 | """Retry times when func fails""" 17 | 18 | def wrap(func): 19 | @wraps(func) 20 | def retry_it(*args, **kwargs): 21 | nonlocal times 22 | if times < 0: # forever 23 | times = 1 << 32 24 | 25 | for i in range(1, times + 1): 26 | try: 27 | r = func(*args, **kwargs) 28 | return r 29 | except Exception as err: 30 | if except_callback is not None: 31 | except_callback(err, i) 32 | 33 | if i == times: 34 | raise err 35 | 36 | return retry_it 37 | 38 | return wrap 39 | -------------------------------------------------------------------------------- /alipcs_py/common/constant.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | CPU_NUM = os.cpu_count() or 1 4 | 5 | # Defines that should never be changed 6 | OneK = 1024 7 | OneM = OneK * OneK 8 | OneG = OneM * OneK 9 | OneT = OneG * OneK 10 | OneP = OneT * OneK 11 | OneE = OneP * OneK 12 | -------------------------------------------------------------------------------- /alipcs_py/common/crypto.py: -------------------------------------------------------------------------------- 1 | from typing import Union, List, Tuple, IO, Callable, Any, cast 2 | import re 3 | import os 4 | import subprocess 5 | import random 6 | from io import BufferedReader 7 | from abc import ABC, abstractmethod 8 | from zlib import crc32 9 | import hashlib 10 | from hashlib import md5, sha1 11 | import base64 12 | 13 | from passlib.crypto.digest import pbkdf1 14 | 15 | import ecdsa 16 | from ecdsa import SigningKey, VerifyingKey 17 | 18 | from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes 19 | from cryptography.hazmat.primitives.padding import PKCS7 20 | from cryptography.hazmat.backends import default_backend 21 | 22 | from alipcs_py.common import constant 23 | from alipcs_py.common.platform import IS_LINUX, IS_MACOS 24 | from alipcs_py.common.simple_cipher import SimpleCryptography as _SimpleCryptography 25 | 26 | 27 | def _md5_cmd(localpath: str) -> List[str]: 28 | if IS_MACOS: 29 | cmd = ["md5", localpath] 30 | elif IS_LINUX: 31 | cmd = ["md5sum", localpath] 32 | else: # windows 33 | cmd = ["CertUtil", "-hashfile", localpath, "MD5"] 34 | return cmd 35 | 36 | 37 | def calc_file_md5(localpath: str) -> str: 38 | cp = subprocess.run(_md5_cmd(localpath), universal_newlines=True, stdout=subprocess.PIPE) 39 | 40 | output = cp.stdout.strip() 41 | if IS_MACOS: 42 | return re.split(r"\s+", output)[-1] 43 | elif IS_LINUX: 44 | return re.split(r"\s+", output)[0] 45 | else: # windows 46 | cn = output.split("CertUtil")[0].strip() 47 | cn = cn.split(":")[-1].strip().replace(" ", "") 48 | return cn 49 | 50 | 51 | ReadableBuffer = Union[bytes, bytearray] # stable 52 | 53 | 54 | def calc_crc32_and_md5(stream: IO, chunk_size: int) -> Tuple[int, str]: 55 | md5_v = md5() 56 | crc32_v = 0 57 | while True: 58 | buf = stream.read(chunk_size) 59 | if buf: 60 | md5_v.update(buf) 61 | crc32_v = crc32(buf, crc32_v).conjugate() 62 | else: 63 | break 64 | return crc32_v.conjugate() & 0xFFFFFFFF, md5_v.hexdigest() 65 | 66 | 67 | def calc_hash(hash_method: Callable, buf: Union[str, bytes, IO, BufferedReader], encoding="utf-8") -> str: 68 | assert isinstance(buf, (str, bytes, IO, BufferedReader)) 69 | 70 | if isinstance(buf, str): 71 | buf = buf.encode(encoding) 72 | return hash_method(buf).hexdigest() 73 | elif isinstance(buf, bytes): 74 | return hash_method(buf).hexdigest() 75 | else: 76 | hasher = hash_method() 77 | while True: 78 | chunk = buf.read(constant.OneM) 79 | if not chunk: 80 | return hasher.hexdigest() 81 | hasher.update(chunk) 82 | 83 | 84 | def calc_md5(buf: Union[str, bytes, IO, BufferedReader], encoding="utf-8") -> str: 85 | return calc_hash(md5, buf, encoding=encoding) 86 | 87 | 88 | def calc_sha1(buf: Union[str, bytes, IO, BufferedReader], encoding="utf-8") -> str: 89 | return calc_hash(sha1, buf, encoding=encoding) 90 | 91 | 92 | def calc_proof_code(io: IO, io_len: int, key: str) -> str: 93 | if io_len == 0: 94 | return "" 95 | 96 | key_md5 = calc_md5(key) 97 | offset = int("0x" + key_md5[:16], 16) % io_len 98 | pre_offset = io.tell() 99 | io.seek(offset, 0) 100 | buf = io.read(8) or b"" 101 | io.seek(pre_offset, 0) 102 | return base64.b64encode(buf).decode("utf-8") 103 | 104 | 105 | U8_LIST = list(range(1 << 8)) 106 | 107 | 108 | def random_bytes(size: int, seed: Any = None) -> bytes: 109 | """Generate random bytes""" 110 | 111 | rg = random.Random(seed) 112 | return bytes(rg.sample(U8_LIST, size)) 113 | 114 | 115 | def random_sys_bytes(size: int) -> bytes: 116 | """Generate random bytes with `os.urandom`""" 117 | 118 | return os.urandom(size) 119 | 120 | 121 | def padding_key(key: Union[str, bytes], length: int = 0, value: bytes = b"\xff") -> bytes: 122 | """padding key with `value`""" 123 | 124 | assert len(value) < 2 125 | 126 | if isinstance(key, str): 127 | key = key.encode("utf-8") 128 | 129 | assert len(key) <= length 130 | 131 | pad_len = length - len(key) 132 | if value: 133 | pad_bytes = value * (pad_len) 134 | else: 135 | pad_bytes = random_sys_bytes(pad_len) 136 | return key + pad_bytes 137 | 138 | 139 | def padding_size(length: int, block_size: int, ceil: bool = True) -> int: 140 | """Return minimum the multiple which is large or equal than the `length` 141 | 142 | Args: 143 | block_size (int): the length of bytes, no the length of bit 144 | """ 145 | 146 | remainder = length % block_size 147 | if ceil: 148 | return (block_size - remainder) * int(remainder != 0) + length 149 | else: 150 | return length - remainder 151 | 152 | 153 | def pkcs7_padding(data: bytes, block_size): 154 | """ 155 | Args: 156 | block_size (int): the length of bytes, no the length of bit 157 | """ 158 | 159 | padder = PKCS7(block_size * 8).padder() 160 | return padder.update(data) + padder.finalize() 161 | 162 | 163 | def pkcs7_unpadding(data: bytes, block_size): 164 | """ 165 | Args: 166 | block_size (int): the length of bytes, no the length of bit 167 | """ 168 | 169 | unpadder = PKCS7(block_size * 8).unpadder() 170 | return unpadder.update(data) + unpadder.finalize() 171 | 172 | 173 | def generate_salt(size: int = 8) -> bytes: 174 | return random_sys_bytes(size) 175 | 176 | 177 | # Generate key and iv with password and salt 178 | # https://security.stackexchange.com/a/117654 179 | # {{{ 180 | def generate_key_iv( 181 | password: bytes, salt: bytes, key_size: int, iv_size: int, algo: str = "md5" 182 | ) -> Tuple[bytes, bytes]: 183 | def hasher(algo: str, data: bytes) -> bytes: 184 | hashes = { 185 | "md5": hashlib.md5, 186 | "sha256": hashlib.sha256, 187 | "sha512": hashlib.sha512, 188 | } 189 | h = hashes[algo]() 190 | h.update(data) 191 | return h.digest() 192 | 193 | if algo == "md5": 194 | temp = pbkdf1("md5", password, salt, 1, 16) 195 | else: 196 | temp = b"" 197 | 198 | fd = temp 199 | while len(fd) < key_size + iv_size: 200 | temp = hasher(algo, temp + password + salt) 201 | fd += temp 202 | 203 | key = fd[0:key_size] 204 | iv = fd[key_size : key_size + iv_size] 205 | 206 | return key, iv 207 | 208 | 209 | # }}} 210 | 211 | 212 | class Cryptography(ABC): 213 | @abstractmethod 214 | def encrypt(self, data: bytes) -> bytes: 215 | pass 216 | 217 | @abstractmethod 218 | def decrypt(self, data: bytes) -> bytes: 219 | pass 220 | 221 | @abstractmethod 222 | def reset(self): 223 | pass 224 | 225 | @abstractmethod 226 | def finalize(self): 227 | """Finalize encryptor and decryptor, no return data""" 228 | 229 | 230 | class SimpleCryptography(Cryptography): 231 | """Simple Cryptography 232 | 233 | This crypto algorithm uses a random uint8 map to transfer an uint8 to another uint8. 234 | So, the decryption process does not depend on previous decrypted data. 235 | 236 | The algorithm is vulnerable, so NO using to encrypt important data. 237 | """ 238 | 239 | def __init__(self, key): 240 | self._c = _SimpleCryptography(key) 241 | self._key = key 242 | 243 | def encrypt(self, data: bytes) -> bytes: 244 | return self._c.encrypt(data) 245 | 246 | def decrypt(self, data: bytes) -> bytes: 247 | return self._c.decrypt(data) 248 | 249 | def reset(self): 250 | pass 251 | 252 | def finalize(self): 253 | pass 254 | 255 | 256 | class ChaCha20Cryptography(Cryptography): 257 | """ChaCha20 Cryptography 258 | 259 | ChaCha20 stream algorithm. 260 | 261 | The decryption process does depend on previous decrypted data. 262 | """ 263 | 264 | def __init__(self, key: bytes, nonce: bytes): 265 | assert len(key) == 32 266 | assert len(nonce) == 16 267 | 268 | self._key = key 269 | self._nonce = nonce 270 | self.reset() 271 | 272 | def encrypt(self, data: bytes) -> bytes: 273 | return self._encryptor.update(data) 274 | 275 | def decrypt(self, data: bytes) -> bytes: 276 | return self._decryptor.update(data) 277 | 278 | def reset(self): 279 | cipher = Cipher( 280 | algorithms.ChaCha20(self._key, self._nonce), 281 | mode=None, 282 | backend=default_backend(), 283 | ) 284 | self._encryptor = cipher.encryptor() 285 | self._decryptor = cipher.decryptor() 286 | 287 | def finalize(self): 288 | self._encryptor.finalize() 289 | self._decryptor.finalize() 290 | 291 | 292 | class AES256CBCCryptography(Cryptography): 293 | def __init__(self, key: bytes, iv: bytes): 294 | assert len(key) == 32 295 | assert len(iv) == 16 296 | 297 | self._key = key 298 | self._iv = iv 299 | self._mode = modes.CBC(iv) 300 | self.reset() 301 | 302 | def encrypt(self, data: bytes) -> bytes: 303 | assert len(data) % 16 == 0 304 | return self._encryptor.update(data) 305 | 306 | def decrypt(self, data: bytes) -> bytes: 307 | return self._decryptor.update(data) 308 | 309 | def reset(self): 310 | cipher = Cipher(algorithms.AES(self._key), mode=self._mode) 311 | self._encryptor = cipher.encryptor() 312 | self._decryptor = cipher.decryptor() 313 | 314 | def finalize(self): 315 | self._encryptor.finalize() 316 | self._decryptor.finalize() 317 | 318 | 319 | def aes256cbc_encrypt(data: bytes, key: bytes, iv: bytes): 320 | crypto = AES256CBCCryptography(key, iv) 321 | return crypto.encrypt(data) + crypto._encryptor.finalize() 322 | 323 | 324 | def aes256cbc_decrypt(data: bytes, key: bytes, iv: bytes): 325 | crypto = AES256CBCCryptography(key, iv) 326 | return crypto.decrypt(data) + crypto._decryptor.finalize() 327 | 328 | 329 | def generate_secp256k1_keys() -> Tuple[SigningKey, VerifyingKey]: 330 | private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1) 331 | public_key: VerifyingKey = cast(VerifyingKey, private_key.verifying_key) 332 | return private_key, public_key 333 | -------------------------------------------------------------------------------- /alipcs_py/common/date.py: -------------------------------------------------------------------------------- 1 | import time 2 | from datetime import datetime, timezone 3 | from dateutil import parser 4 | 5 | 6 | def now_timestamp() -> int: 7 | """Now timestamp (Second)""" 8 | 9 | return int(time.time()) 10 | 11 | 12 | def iso_8601_to_timestamp(date_string: str) -> int: 13 | """Convert ISO 8601 datetime string to the timestamp (integer) 14 | 15 | Args: 16 | date_string (str): ISO 8601 format. 17 | e.g. "2021-06-22T07:16:03Z" or "2021-06-22T07:16:03.032Z" 18 | """ 19 | 20 | date_obj = parser.parse(date_string) 21 | return int(date_obj.timestamp()) 22 | 23 | 24 | def timestamp_to_iso_8601(timestamp: int) -> str: 25 | return datetime.fromtimestamp(timestamp, timezone.utc).isoformat().replace("+00:00", ".000Z") 26 | -------------------------------------------------------------------------------- /alipcs_py/common/downloader.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Any, Callable 2 | from pathlib import Path 3 | 4 | from alipcs_py.common.io import RangeRequestIO 5 | from alipcs_py.common.concurrent import retry 6 | from alipcs_py.common.path import PathType 7 | 8 | 9 | DEFAULT_MAX_WORKERS = 5 10 | 11 | 12 | class MeDownloader: 13 | """Download the content from `range_request_io` to `localpath`""" 14 | 15 | def __init__( 16 | self, 17 | range_request_io: RangeRequestIO, 18 | localpath: PathType, 19 | continue_: bool = False, 20 | max_retries: int = 2, 21 | done_callback: Optional[Callable[..., Any]] = None, 22 | except_callback: Optional[Callable[[Exception], Any]] = None, 23 | ) -> None: 24 | self.range_request_io = range_request_io 25 | self.localpath = localpath 26 | self.continue_ = continue_ 27 | self.max_retries = max_retries 28 | self.done_callback = done_callback 29 | self.except_callback = except_callback 30 | 31 | def _init_fd(self): 32 | if self.continue_: 33 | path = Path(self.localpath) 34 | if self.range_request_io.seekable(): 35 | offset = path.stat().st_size if path.exists() else 0 36 | fd = path.open("ab") 37 | fd.seek(offset, 0) 38 | else: 39 | offset = 0 40 | fd = path.open("wb") 41 | else: 42 | offset = 0 43 | fd = open(self.localpath, "wb") 44 | 45 | self.offset = offset 46 | self.fd = fd 47 | 48 | def download(self): 49 | """Download the url content to `localpath` 50 | 51 | Args: 52 | continue_ (bool): If set to True, only downloading the remain content depended on 53 | the size of `localpath` 54 | """ 55 | 56 | @retry( 57 | self.max_retries, 58 | except_callback=lambda err, fails: ( 59 | self.range_request_io.reset(), 60 | self.except_callback(err) if self.except_callback else None, 61 | ), 62 | ) 63 | def _download(): 64 | self._init_fd() 65 | 66 | self.range_request_io.seek(self.offset) 67 | 68 | for buf in self.range_request_io.read_iter(): 69 | self.fd.write(buf) 70 | self.offset += len(buf) 71 | 72 | if self.done_callback: 73 | self.done_callback() 74 | 75 | self.fd.close() 76 | 77 | _download() 78 | -------------------------------------------------------------------------------- /alipcs_py/common/event.py: -------------------------------------------------------------------------------- 1 | from typing import List, Callable, Any 2 | import sys 3 | 4 | from alipcs_py.common.keyboard import KeyboardListener 5 | 6 | 7 | class KeyHandler: 8 | def __init__(self, key: str, callback: Callable[..., Any]): 9 | self._key = key 10 | self._callback = callback 11 | 12 | def handle(self, key: str): 13 | if self._key == key: 14 | self._callback(key) 15 | 16 | 17 | class KeyboardMonitor: 18 | KEY_HANDLERS: List[KeyHandler] = [] 19 | 20 | @classmethod 21 | def register(cls, key_handler: KeyHandler): 22 | cls.KEY_HANDLERS.append(key_handler) 23 | 24 | @classmethod 25 | def on(cls, key: str): 26 | for handler in cls.KEY_HANDLERS: 27 | handler.handle(key) 28 | 29 | 30 | global _KEYBOARD_LISTENER_STARTED 31 | _KEYBOARD_LISTENER_STARTED = False 32 | 33 | global _KEYBOARD_LISTENER 34 | _KEYBOARD_LISTENER = None 35 | 36 | 37 | def keyboard_listener_start(): 38 | global _KEYBOARD_LISTENER_STARTED 39 | if _KEYBOARD_LISTENER_STARTED: 40 | return 41 | 42 | # KeyboardListener is only available in a terminal 43 | if sys.stdin.isatty(): 44 | listener = KeyboardListener(on=KeyboardMonitor.on) 45 | listener.start() 46 | 47 | global _KEYBOARD_LISTENER 48 | _KEYBOARD_LISTENER = listener 49 | 50 | _KEYBOARD_LISTENER_STARTED = True 51 | -------------------------------------------------------------------------------- /alipcs_py/common/file_type.py: -------------------------------------------------------------------------------- 1 | MEDIA_EXTS = set( 2 | [ 3 | ".wma", 4 | ".wav", 5 | ".mp3", 6 | ".aac", 7 | ".ra", 8 | ".ram", 9 | ".mp2", 10 | ".ogg", 11 | ".aif", 12 | ".mpega", 13 | ".amr", 14 | ".mid", 15 | ".midi", 16 | ".m4a", 17 | ".m4v", 18 | ".wmv", 19 | ".rmvb", 20 | ".mpeg4", 21 | ".mpeg2", 22 | ".flv", 23 | ".avi", 24 | ".3gp", 25 | ".mpga", 26 | ".qt", 27 | ".rm", 28 | ".wmz", 29 | ".wmd", 30 | ".wvx", 31 | ".wmx", 32 | ".wm", 33 | ".swf", 34 | ".mpg", 35 | ".mp4", 36 | ".mkv", 37 | ".mpeg", 38 | ".mov", 39 | ".mdf", 40 | ".iso", 41 | ".asf", 42 | ".vob", 43 | ".ts", 44 | ] 45 | ) 46 | 47 | IMAGE_EXTS = set( 48 | [ 49 | ".jpg", 50 | ".jpeg", 51 | ".gif", 52 | ".bmp", 53 | ".png", 54 | ".jpe", 55 | ".cur", 56 | ".svg", 57 | ".svgz", 58 | ".tif", 59 | ".tiff", 60 | ".ico", 61 | ] 62 | ) 63 | 64 | DOC_EXTS = set( 65 | [ 66 | ".doc", 67 | ".docx", 68 | ".xls", 69 | ".xlsx", 70 | ".ppt", 71 | ".pptx", 72 | ".vsd", 73 | ".txt", 74 | ".pdf", 75 | ".ods", 76 | ".ots", 77 | ".odt", 78 | ".rtf", 79 | ".dot", 80 | ".dotx", 81 | ".odm", 82 | ".pps", 83 | ".pot", 84 | ".xlt", 85 | ".xltx", 86 | ".csv", 87 | ".ppsx", 88 | ".potx", 89 | ".epub", 90 | ".mobi", 91 | ".azw3", 92 | ".apk", 93 | ".exe", 94 | ".msi", 95 | ".ipa", 96 | ".torrent", 97 | ] 98 | ) 99 | 100 | ARCHIVE_EXTS = set( 101 | [ 102 | ".7z", 103 | ".a", 104 | ".ace", 105 | ".afa", 106 | ".alz", 107 | ".android", 108 | ".apk", 109 | ".ar", 110 | ".arc", 111 | ".arj", 112 | ".b1", 113 | ".b1", 114 | ".ba", 115 | ".bh", 116 | ".bz2", 117 | ".cab", 118 | ".cab", 119 | ".cfs", 120 | ".chm", 121 | ".cpio", 122 | ".cpt", 123 | ".cqm", 124 | ".dar", 125 | ".dd", 126 | ".dgc", 127 | ".dmg", 128 | ".ear", 129 | ".ecc", 130 | ".eqe", 131 | ".exe", 132 | ".f", 133 | ".gca", 134 | ".gz", 135 | ".ha", 136 | ".hki", 137 | ".html", 138 | ".ice", 139 | ".id", 140 | ".infl", 141 | ".iso", 142 | ".jar", 143 | ".kgb", 144 | ".lbr", 145 | ".lha", 146 | ".lqr", 147 | ".lz", 148 | ".lzh", 149 | ".lzma", 150 | ".lzo", 151 | ".lzx", 152 | ".mar", 153 | ".ms", 154 | ".net", 155 | ".package", 156 | ".pak", 157 | ".paq6", 158 | ".paq7", 159 | ".paq8", 160 | ".par", 161 | ".par2", 162 | ".partimg", 163 | ".pea", 164 | ".pim", 165 | ".pit", 166 | ".qda", 167 | ".rar", 168 | ".rk", 169 | ".rz", 170 | ".s7z", 171 | ".sda", 172 | ".sea", 173 | ".sen", 174 | ".sfark", 175 | ".sfx", 176 | ".shar", 177 | ".sit", 178 | ".sitx", 179 | ".sqx", 180 | ".tar", 181 | ".tbz2", 182 | ".tgz", 183 | ".tlz", 184 | ".tqt", 185 | ".uc", 186 | ".uc0", 187 | ".uc2", 188 | ".uca", 189 | ".ucn", 190 | ".ue2", 191 | ".uha", 192 | ".ur2", 193 | ".war", 194 | ".web", 195 | ".wim", 196 | ".x", 197 | ".xar", 198 | ".xp3", 199 | ".xz", 200 | ".yz1", 201 | ".z", 202 | ".zip", 203 | ".zipx", 204 | ".zoo", 205 | ".zpaq", 206 | ".zz", 207 | ] 208 | ) 209 | -------------------------------------------------------------------------------- /alipcs_py/common/keyboard.py: -------------------------------------------------------------------------------- 1 | # https://stackoverflow.com/a/22085679/2478637 2 | # http://simondlevy.academic.wlu.edu/files/software/kbhit.py 3 | 4 | from typing import Callable, Any 5 | 6 | import os 7 | import threading 8 | import time 9 | 10 | # Windows 11 | if os.name == "nt": 12 | import msvcrt 13 | 14 | # Posix (Linux, OS X) 15 | else: 16 | import sys 17 | import termios 18 | import atexit 19 | from select import select 20 | 21 | 22 | class KeyboardListener(threading.Thread): 23 | def __init__(self, on: Callable[[str], Any]): 24 | """Creates a KeyboardListener object that you can call to do various keyboard things.""" 25 | super().__init__() 26 | 27 | self._on = on 28 | self._mt = threading.main_thread() 29 | 30 | if os.name == "nt": 31 | pass 32 | else: 33 | # Save the terminal settings 34 | self.fd = sys.stdin.fileno() 35 | self.new_term = termios.tcgetattr(self.fd) 36 | self.old_term = termios.tcgetattr(self.fd) 37 | 38 | # New terminal setting unbuffered 39 | self.new_term[3] = self.new_term[3] & ~termios.ICANON & ~termios.ECHO 40 | termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.new_term) 41 | 42 | # Support normal-terminal reset at exit 43 | atexit.register(self.set_normal_term) 44 | 45 | def set_normal_term(self): 46 | """Resets to normal terminal. On Windows this is a no-op.""" 47 | 48 | if os.name == "nt": 49 | pass 50 | else: 51 | termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old_term) 52 | 53 | def getch(self): 54 | """Returns a keyboard character after kbhit() has been called. 55 | Should not be called in the same program as getarrow(). 56 | """ 57 | 58 | if os.name == "nt": 59 | return msvcrt.getch().decode("utf-8") 60 | else: 61 | return sys.stdin.read(1) 62 | 63 | def getarrow(self): 64 | """Returns an arrow-key code after kbhit() has been called. Codes are 65 | 0 : up 66 | 1 : right 67 | 2 : down 68 | 3 : left 69 | Should not be called in the same program as getch(). 70 | """ 71 | 72 | if os.name == "nt": 73 | msvcrt.getch() # skip 0xE0 74 | c = msvcrt.getch() 75 | vals = [72, 77, 80, 75] 76 | else: 77 | c = sys.stdin.read(3)[2] 78 | vals = [65, 67, 66, 68] 79 | 80 | return vals.index(ord(c.decode("utf-8"))) 81 | 82 | def kbhit(self): 83 | """Returns True if keyboard character was hit, False otherwise.""" 84 | if os.name == "nt": 85 | return msvcrt.kbhit() 86 | 87 | else: 88 | dr, dw, de = select([sys.stdin], [], [], 0) 89 | return dr != [] 90 | 91 | def run(self): 92 | while True: 93 | if self.kbhit(): 94 | c = self.getch() 95 | self._on(c) 96 | else: 97 | time.sleep(0.1) 98 | 99 | # Exit when main_thread exited 100 | if not self._mt.is_alive(): 101 | break 102 | 103 | self.set_normal_term() 104 | -------------------------------------------------------------------------------- /alipcs_py/common/log.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | from pathlib import Path 3 | from os import PathLike 4 | import logging 5 | from logging import Logger 6 | 7 | from typing_extensions import Literal, Final 8 | 9 | 10 | TLogLevel = Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] 11 | LogLevels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] 12 | DEFAULT_LOG_LEVEL: Final = "ERROR" 13 | 14 | _LOG_FORMAT = "%(asctime)-15s | %(levelname)s | %(module)s: %(message)s" 15 | 16 | 17 | def get_logger( 18 | name: str, 19 | fmt: str = _LOG_FORMAT, 20 | filename: Optional[PathLike] = None, 21 | level: TLogLevel = DEFAULT_LOG_LEVEL, 22 | ) -> Logger: 23 | logger = logging.getLogger(name) 24 | logger.setLevel(level) 25 | 26 | stream_handler = logging.StreamHandler() # stdout 27 | stream_handler.setFormatter(logging.Formatter(fmt)) 28 | logger.addHandler(stream_handler) 29 | 30 | if filename: 31 | filename = Path(filename) 32 | _dir = filename.parent 33 | if not _dir.exists(): 34 | _dir.mkdir(parents=True, exist_ok=True) 35 | 36 | file_handler = logging.FileHandler(filename) 37 | file_handler.setFormatter(logging.Formatter(fmt)) 38 | logger.addHandler(file_handler) 39 | 40 | return logger 41 | -------------------------------------------------------------------------------- /alipcs_py/common/net.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | import requests 4 | import requests.adapters 5 | 6 | 7 | def avail_port(port: int) -> bool: 8 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 9 | return s.connect_ex(("localhost", port)) != 0 10 | 11 | 12 | def random_avail_port() -> int: 13 | """Find a random available port using port 0 14 | 15 | https://www.lifewire.com/port-0-in-tcp-and-udp-818145 16 | Port 0 is a wildcard port that tells the system to find a suitable port number. 17 | Unix, Windows, and other operating systems vary in the handling of port 0, but 18 | the same general convention applies. 19 | """ 20 | 21 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 22 | s.bind(("127.0.0.1", 0)) 23 | s.listen() 24 | _, port = s.getsockname() 25 | return port 26 | 27 | 28 | def make_http_session( 29 | max_keepalive_connections: int = 50, 30 | max_connections: int = 50, 31 | keepalive_expiry: float = 10 * 60, 32 | max_retries: int = 2, 33 | ) -> requests.Session: 34 | """Make a http session with keepalive connections, maximum connections and retries""" 35 | 36 | session = requests.Session() 37 | adapter = requests.adapters.HTTPAdapter( 38 | pool_connections=max_keepalive_connections, 39 | pool_maxsize=max_connections, 40 | max_retries=max_retries, 41 | ) 42 | session.mount("http://", adapter) 43 | session.mount("https://", adapter) 44 | return session 45 | -------------------------------------------------------------------------------- /alipcs_py/common/number.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | 4 | def u64_to_u8x8(u64: int) -> bytes: 5 | return struct.pack("!Q", u64) 6 | 7 | 8 | def u8x8_to_u64(u8x8: bytes) -> int: 9 | return struct.unpack("!Q", u8x8)[0] 10 | -------------------------------------------------------------------------------- /alipcs_py/common/path.py: -------------------------------------------------------------------------------- 1 | from typing import Tuple, Union 2 | from pathlib import Path, PurePosixPath 3 | from os import PathLike 4 | 5 | from alipcs_py.common.platform import IS_WIN 6 | 7 | PathType = Union[str, PathLike, Path] 8 | 9 | 10 | def exists(localpath: PathType) -> bool: 11 | localpath = Path(localpath) 12 | return localpath.exists() 13 | 14 | 15 | def is_file(localpath: PathType) -> bool: 16 | localpath = Path(localpath) 17 | return localpath.is_file() 18 | 19 | 20 | def is_dir(localpath: PathType) -> bool: 21 | localpath = Path(localpath) 22 | return localpath.is_dir() 23 | 24 | 25 | # TODO: Change function name to `join_path_as_posix` 26 | def join_path(parent: PathType, *children: PathType) -> str: 27 | """Join posix paths""" 28 | 29 | _path = Path(parent) 30 | for child in children: 31 | _path = _path / child 32 | 33 | path = _path.as_posix() 34 | has_root = path.startswith("/") 35 | if not has_root: 36 | path = "/" + path 37 | 38 | path = Path(path).resolve().as_posix() 39 | 40 | if IS_WIN: 41 | p = path.split(":", 1)[-1] 42 | if not has_root: 43 | return p[1:] 44 | else: 45 | return p 46 | else: 47 | if not has_root: 48 | return path[1:] 49 | else: 50 | return path 51 | 52 | 53 | def split_posix_path(path: PathType) -> Tuple[str, ...]: 54 | return PurePosixPath(path).parts 55 | 56 | 57 | def posix_path_basename(path: PathType) -> str: 58 | return PurePosixPath(path).name 59 | 60 | 61 | def posix_path_dirname(path: PathType) -> str: 62 | return PurePosixPath(path).parent.as_posix() 63 | -------------------------------------------------------------------------------- /alipcs_py/common/platform.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | IS_WIN = sys.platform.startswith("win") 4 | 5 | IS_LINUX = sys.platform.startswith("linux") 6 | 7 | IS_MACOS = sys.platform.startswith("darwin") 8 | -------------------------------------------------------------------------------- /alipcs_py/common/progress_bar.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from rich.progress import ( 4 | Progress, 5 | SpinnerColumn, 6 | TextColumn, 7 | BarColumn, 8 | DownloadColumn, 9 | TransferSpeedColumn, 10 | TimeRemainingColumn, 11 | TaskID, 12 | ) 13 | from rich.table import Column 14 | 15 | _progress = Progress( 16 | SpinnerColumn(), 17 | TextColumn("[bold blue]{task.fields[title]}", justify="right", table_column=Column(overflow="fold")), 18 | BarColumn(bar_width=40), 19 | "[progress.percentage]{task.percentage:>3.1f}%", 20 | "•", 21 | DownloadColumn(binary_units=True), 22 | "•", 23 | TransferSpeedColumn(), 24 | "•", 25 | TimeRemainingColumn(), 26 | ) 27 | 28 | 29 | def init_progress_bar(): 30 | if not _progress.live._started: 31 | _progress.start() 32 | 33 | 34 | def exit_progress_bar(): 35 | if _progress.live._started: 36 | _progress.stop() 37 | 38 | 39 | def progress_task_exists(task_id: Optional[TaskID]) -> bool: 40 | if task_id is None: 41 | return False 42 | return task_id in _progress.task_ids 43 | 44 | 45 | def remove_progress_task(task_id: Optional[TaskID]): 46 | if task_id is not None and progress_task_exists(task_id): 47 | _progress.remove_task(task_id) 48 | 49 | 50 | def reset_progress_task(task_id: Optional[TaskID]): 51 | if task_id is not None and progress_task_exists(task_id): 52 | _progress.reset(task_id) 53 | -------------------------------------------------------------------------------- /alipcs_py/common/simple_cipher.pyx: -------------------------------------------------------------------------------- 1 | import random 2 | import copy 3 | 4 | cdef crypt(unsigned char *data, unsigned char *byte_map, int len_): 5 | cdef unsigned int c 6 | 7 | for i in range(len_): 8 | c = data[i] 9 | data[i] = byte_map[c] 10 | 11 | 12 | class SimpleCryptography: 13 | """Simple Cryptography 14 | 15 | This crypto algorithm uses a random uint8 map to transfer an uint8 to another uint8. 16 | So, the decryption process does not depend on previous decrypted data. 17 | 18 | The algorithm is vulnerable, so NO using to encrypt important data. 19 | """ 20 | 21 | def __init__(self, key): 22 | rg = random.Random() 23 | rg.seed(key, version=2) 24 | 25 | _byte_map = list(range(1 << 8)) 26 | rg.shuffle(_byte_map) 27 | 28 | # encrypt_byte_map[ori_char] -> encrypted_char 29 | self._encrypt_byte_map = bytes(bytearray(_byte_map)) 30 | 31 | # decrypt_byte_map[encrypted_char] -> ori_char 32 | self._decrypt_byte_map = bytes( 33 | bytearray([c for _, c in sorted(zip(_byte_map, range(1 << 8)))]) 34 | ) 35 | 36 | self._key = key 37 | 38 | def encrypt(self, data): 39 | data = bytes(bytearray(data)) # copy 40 | crypt(data, self._encrypt_byte_map, len(data)) 41 | return data 42 | 43 | def decrypt(self, data): 44 | data = bytes(bytearray(data)) # copy 45 | crypt(data, self._decrypt_byte_map, len(data)) 46 | return data 47 | 48 | def reset(self): 49 | pass 50 | 51 | -------------------------------------------------------------------------------- /alipcs_py/common/url.py: -------------------------------------------------------------------------------- 1 | def is_magnet(url: str) -> bool: 2 | return url[:7].lower() == "magnet:" 3 | -------------------------------------------------------------------------------- /alipcs_py/common/util.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Any 2 | import json 3 | 4 | 5 | def json_dumps(obj: Any) -> str: 6 | return json.dumps(obj, separators=(",", ":"), ensure_ascii=False, sort_keys=True) 7 | 8 | 9 | def json_dump_values(obj: Dict[str, Any]) -> Dict[str, str]: 10 | """Convert dict's values which are list or dict as json string""" 11 | 12 | new_obj = dict(obj) 13 | for k, v in new_obj.items(): 14 | if isinstance(v, (dict, list)): 15 | new_obj[k] = json.dumps(v) 16 | return new_obj 17 | -------------------------------------------------------------------------------- /alipcs_py/config/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from types import SimpleNamespace 3 | import os 4 | import json 5 | 6 | import toml # type: ignore 7 | 8 | 9 | def _to_buildin(obj: Any) -> Any: 10 | if isinstance(obj, SimpleNamespace): 11 | data = dict(obj.__dict__) 12 | for field in getattr(obj, "__annotations__", {}): 13 | val = getattr(obj, field) 14 | data[field] = _to_buildin(val) 15 | return data 16 | elif isinstance(obj, list): 17 | return [_to_buildin(item) for item in obj] 18 | elif isinstance(obj, dict): 19 | return {k: _to_buildin(v) for k, v in obj.items()} 20 | else: 21 | return obj 22 | 23 | 24 | class Share(SimpleNamespace): 25 | """Share Configuration""" 26 | 27 | store: bool = False 28 | 29 | 30 | class AppConfig(SimpleNamespace): 31 | """App Configuration""" 32 | 33 | share: Share = Share() 34 | 35 | @classmethod 36 | def load(cls, path: str): 37 | if os.path.exists(path): 38 | return json.loads( 39 | json.dumps(toml.load(path)), 40 | object_hook=lambda d: SimpleNamespace(**d), 41 | ) 42 | else: 43 | return cls() 44 | 45 | def dumps(self): 46 | return toml.dumps(_to_buildin(self)) 47 | 48 | def dump(self, path: str): 49 | with open(path, "w") as f: 50 | toml.dump(_to_buildin(self), f) 51 | -------------------------------------------------------------------------------- /alipcs_py/storage/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PeterDing/AliPCS-Py/beeb515ab8f963c795f99c82d2e3a88487b0f0ae/alipcs_py/storage/__init__.py -------------------------------------------------------------------------------- /alipcs_py/storage/store.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Tuple, List, Any 2 | 3 | from peewee import SQL 4 | 5 | from alipcs_py.alipcs.api import AliPCSApiMix 6 | from alipcs_py.alipcs.inner import PcsSharedLinkInfo, PcsFile 7 | from alipcs_py.storage.tables import PcsSharedLinkInfoTable, PcsFileTable 8 | 9 | 10 | class SharedStore: 11 | def __init__(self) -> None: 12 | pass 13 | 14 | def get_shared_link_info(self, share_id: str) -> Optional[PcsSharedLinkInfo]: 15 | ins = PcsSharedLinkInfoTable.get_or_none(share_id=share_id) 16 | if ins: 17 | return ins.to_pcs() 18 | else: 19 | return None 20 | 21 | def add_shared_link_info(self, pcs_shared_link_info: PcsSharedLinkInfo) -> Any: 22 | pcs_shared_link_info_ins, _ = PcsSharedLinkInfoTable.get_or_create( 23 | **{k: getattr(pcs_shared_link_info, k) for k in pcs_shared_link_info.__dataclass_fields__} 24 | ) 25 | return pcs_shared_link_info_ins 26 | 27 | def add_shared_file(self, share_id: str, pcs_file: PcsFile) -> Any: 28 | if PcsFileTable.get_or_none(file_id=pcs_file.file_id): 29 | # The file exists 30 | return 31 | 32 | pcs_shared_link_info_ins = PcsSharedLinkInfoTable.get_or_none(share_id=share_id) 33 | 34 | pcs_file_ins, _ = PcsFileTable.get_or_create( 35 | **{k: getattr(pcs_file, k) for k in pcs_file.__dataclass_fields__}, 36 | shared_link_info_id=pcs_shared_link_info_ins.id, 37 | ) 38 | return pcs_file_ins 39 | 40 | def delete_shared_links(self, *share_ids: str) -> None: 41 | PcsSharedLinkInfoTable.delete().where(PcsSharedLinkInfoTable.share_id.in_(share_ids)).execute() 42 | 43 | def delete_shared_files(self, *file_ids: str) -> None: 44 | PcsFileTable.delete().where(PcsFileTable.file_id.in_(file_ids)).execute() 45 | 46 | def search_shared_links( 47 | self, *keywords: str, fields: List[str] = ["share_name", "display_name"] 48 | ) -> List[PcsSharedLinkInfo]: 49 | sql = " OR ".join([f"`{f}` like ?" for f in fields * len(keywords)]) 50 | query = PcsSharedLinkInfoTable.select().where(SQL(sql, [f"%{keyword}%" for keyword in keywords] * len(fields))) 51 | return [item.to_pcs() for item in query] 52 | 53 | def search_shared_files( 54 | self, 55 | *keywords: str, 56 | fields: List[str] = ["name", "path"], 57 | share_ids: List[str] = [], 58 | ) -> List[Tuple[PcsFile, PcsSharedLinkInfo]]: 59 | sql = " OR ".join([f"`{f}` like ?" for f in fields * len(keywords)]) 60 | query = PcsFileTable.select().join( 61 | PcsSharedLinkInfoTable, 62 | on=(PcsFileTable.shared_link_info_id == PcsSharedLinkInfoTable.id), 63 | ) 64 | if share_ids: 65 | query = query.where( 66 | PcsSharedLinkInfoTable.share_id.in_(share_ids) 67 | & SQL(sql, [f"%{keyword}%" for keyword in keywords] * len(fields)) 68 | ) 69 | else: 70 | query = query.where(SQL(sql, [f"%{keyword}%" for keyword in keywords] * len(fields))) 71 | return [(item.to_pcs(), item.shared_link_info_id.to_pcs()) for item in query] 72 | 73 | def list_shared_links( 74 | self, 75 | by_id: bool = False, 76 | by_name: bool = False, 77 | limit: int = 100, 78 | offset: int = 0, 79 | ) -> List[PcsSharedLinkInfo]: 80 | query = PcsSharedLinkInfoTable.select() 81 | if by_name: 82 | query = query.order_by(PcsSharedLinkInfoTable.share_name) 83 | else: 84 | # Default by id 85 | query = query.order_by(PcsSharedLinkInfoTable.id) 86 | 87 | query = query.limit(limit).offset(offset) 88 | 89 | return [item.to_pcs() for item in query] 90 | 91 | def list_shared_files( 92 | self, 93 | share_ids: List[str] = [], 94 | by_id: bool = False, 95 | by_name: bool = False, 96 | by_path: bool = False, 97 | limit: int = 100, 98 | offset: int = 0, 99 | ) -> List[Tuple[PcsFile, PcsSharedLinkInfo]]: 100 | query = PcsFileTable.select().join( 101 | PcsSharedLinkInfoTable, 102 | on=(PcsFileTable.shared_link_info_id == PcsSharedLinkInfoTable.id), 103 | ) 104 | if share_ids: 105 | query = query.where(PcsSharedLinkInfoTable.share_id.in_(share_ids)) 106 | if by_name: 107 | query = query.order_by(PcsFileTable.name) 108 | elif by_path: 109 | query = query.order_by(PcsFileTable.path) 110 | else: 111 | # Default by id 112 | query = query.order_by(PcsFileTable.id) 113 | 114 | # If limit is 0, selecting all items 115 | if limit > 0: 116 | query = query.limit(limit).offset(offset) 117 | 118 | return [(item.to_pcs(), item.shared_link_info_id.to_pcs()) for item in query] 119 | 120 | 121 | class AliPCSApiMixWithSharedStore(AliPCSApiMix): 122 | """AliPCS API Mix with SharedStore 123 | 124 | Hooking the `AliPCSApiMix.list` to store the shared file infos 125 | """ 126 | 127 | def __init__(self, *args, **kwargs): 128 | super().__init__(*args, **kwargs) 129 | 130 | self._sharedstore: Optional[SharedStore] = None 131 | 132 | @property 133 | def sharedstore(self) -> Optional[SharedStore]: 134 | return self._sharedstore 135 | 136 | def get_share_token(self, share_id: str, share_password: str = "") -> str: 137 | token = super().get_share_token(share_id, share_password=share_password) 138 | if not self._sharedstore: 139 | return token 140 | 141 | if token: 142 | shared_link_info = self._sharedstore.get_shared_link_info(share_id) 143 | if not shared_link_info: 144 | shared_link_info = self.shared_info(share_id) 145 | self._sharedstore.add_shared_link_info(shared_link_info) 146 | 147 | return token 148 | 149 | def meta(self, file_id: str, share_id: Optional[str] = None) -> Optional[PcsFile]: 150 | pcs_file = super().meta(file_id, share_id=share_id) 151 | if pcs_file is None: 152 | return None 153 | if not self._sharedstore: 154 | return pcs_file 155 | 156 | if share_id: 157 | self._sharedstore.add_shared_file(share_id, pcs_file) 158 | 159 | return pcs_file 160 | 161 | def list(self, *args, **kwargs): 162 | share_id = kwargs.get("share_id") 163 | 164 | pcs_files, next_marker = super().list(*args, **kwargs) 165 | if not self._sharedstore: 166 | return pcs_files, next_marker 167 | 168 | if share_id: 169 | for pcs_file in pcs_files: 170 | self._sharedstore.add_shared_file(share_id, pcs_file) 171 | 172 | return pcs_files, next_marker 173 | -------------------------------------------------------------------------------- /alipcs_py/storage/tables.py: -------------------------------------------------------------------------------- 1 | import typing 2 | from typing import Tuple, List, Dict, Any 3 | from peewee import ( 4 | Model, 5 | CharField, 6 | IntegerField, 7 | Database, 8 | SqliteDatabase, 9 | TextField, 10 | SmallIntegerField, 11 | ForeignKeyField, 12 | ) 13 | from playhouse.migrate import SchemaMigrator, SqliteMigrator, migrate 14 | 15 | from alipcs_py.alipcs.inner import PcsFile, PcsSharedLinkInfo 16 | from alipcs_py.common.util import json_dump_values 17 | 18 | 19 | class Deserializer: 20 | @classmethod 21 | def pcs_item(cls) -> Any: 22 | raise NotImplementedError() 23 | 24 | def to_pcs(self) -> PcsSharedLinkInfo: 25 | pcs_item = self.pcs_item() 26 | data: Dict[str, Any] = {} 27 | columns = self.__class__._meta.columns # type: ignore 28 | pcs_fields = pcs_item.__dataclass_fields__ 29 | for col in columns.keys(): 30 | if col in pcs_fields: 31 | val = getattr(self, col) 32 | if pcs_fields[col].type == typing.Optional[bool] or pcs_fields[col].type == bool: 33 | if val is None: 34 | data[col] = None 35 | else: 36 | data[col] = bool(val) 37 | else: 38 | data[col] = val 39 | return pcs_item(**data) 40 | 41 | 42 | class PcsSharedLinkInfoTable(Deserializer, Model): 43 | share_id = CharField(null=False, unique=True) 44 | share_pwd = CharField(null=True) 45 | share_name = CharField(null=True) 46 | display_name = CharField(null=True) 47 | file_count = IntegerField(null=True) 48 | file_infos = TextField(null=True) # json 49 | expiration = IntegerField(null=True) 50 | updated_at = IntegerField(null=True) 51 | vip = CharField(null=True) 52 | avatar = CharField(null=True) 53 | is_following_creator = SmallIntegerField(null=True) 54 | creator_id = CharField(null=True) 55 | creator_name = CharField(null=True) 56 | creator_phone = CharField(null=True) 57 | 58 | class Meta: 59 | indexes = ( 60 | # create a non-unique 61 | (("share_id",), False), 62 | (("share_name",), False), 63 | (("display_name",), False), 64 | ) 65 | 66 | @classmethod 67 | def get_or_create(cls, **kwargs): 68 | kwargs = json_dump_values(kwargs) 69 | return super().get_or_create(**kwargs) 70 | 71 | @classmethod 72 | def pcs_item(cls) -> Any: 73 | return PcsSharedLinkInfo 74 | 75 | 76 | class PcsFileTable(Deserializer, Model): 77 | file_id = CharField(null=False, unique=True) 78 | name = CharField(null=False) 79 | parent_file_id = CharField(null=False) 80 | type = CharField(null=False) 81 | is_dir = SmallIntegerField(null=False) 82 | is_file = SmallIntegerField(null=False) 83 | size = IntegerField(null=True) 84 | path = CharField(null=True) 85 | 86 | created_at = IntegerField(null=True) 87 | updated_at = IntegerField(null=True) 88 | 89 | file_extension = CharField(null=True) 90 | content_type = CharField(null=True) 91 | mime_type = CharField(null=True) 92 | mime_extension = CharField(null=True) 93 | labels = CharField(null=True) 94 | 95 | status = CharField(null=True) 96 | hidden = IntegerField(null=True) 97 | starred = IntegerField(null=True) 98 | category = CharField(null=True) 99 | punish_flag = IntegerField(null=True) 100 | encrypt_mode = CharField(null=True) 101 | 102 | drive_id = CharField(null=True) 103 | domain_id = CharField(null=True) 104 | upload_id = CharField(null=True) 105 | async_task_id = CharField(null=True) 106 | 107 | rapid_upload_info = CharField(null=True) 108 | download_url = CharField(null=True) 109 | 110 | shared_link_info_id = ForeignKeyField(PcsSharedLinkInfoTable) 111 | 112 | class Meta: 113 | indexes = ( 114 | # create a non-unique 115 | (("file_id",), False), 116 | (("name",), False), 117 | (("is_dir",), False), 118 | (("is_file",), False), 119 | (("path",), False), 120 | (("file_extension",), False), 121 | ) 122 | 123 | @classmethod 124 | def get_or_create(cls, **kwargs): 125 | kwargs = json_dump_values(kwargs) 126 | return super().get_or_create(**kwargs) 127 | 128 | @classmethod 129 | def pcs_item(cls) -> Any: 130 | return PcsFile 131 | 132 | 133 | def connect_sqlite(path: str) -> Tuple[Database, SchemaMigrator]: 134 | db = SqliteDatabase(path) 135 | return db, SqliteMigrator(db) 136 | 137 | 138 | def bind_tables(tables: List[type], db: Database): 139 | db.bind(tables) 140 | 141 | 142 | def create_tables(tables: List[type], db: Database): 143 | db.create_tables(tables) 144 | 145 | 146 | def get_db_field(tp: type): 147 | if tp == str: 148 | return CharField(null=True) 149 | elif tp == typing.Optional[str]: 150 | return CharField(null=True) 151 | 152 | elif tp == typing.List[str]: 153 | return TextField(null=True) 154 | 155 | elif tp == int: 156 | return IntegerField(null=True) 157 | elif tp == typing.Optional[int]: 158 | return IntegerField(null=True) 159 | 160 | elif tp == bool: 161 | return IntegerField(null=True) 162 | elif tp == typing.Optional[int]: 163 | return IntegerField(null=True) 164 | 165 | else: 166 | raise ValueError("Unsupported type: %s", tp) 167 | 168 | 169 | def modify_table(table: Any, db: Database, migrator: SchemaMigrator): 170 | table_name = table._meta.name # type: ignore 171 | pcs_fields = table.pcs_item().__dataclass_fields__ 172 | columns = set([c.name for c in db.get_columns(table_name)]) 173 | 174 | for name, field in pcs_fields.items(): 175 | if name not in columns: 176 | db_field = get_db_field(field.type) 177 | migrate(migrator.add_column(table_name, name, db_field)) 178 | -------------------------------------------------------------------------------- /alipcs_py/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | import string 4 | import math 5 | 6 | 7 | def dump_json(obj) -> str: 8 | return json.dumps(obj, separators=(",", ":"), ensure_ascii=False) 9 | 10 | 11 | def format_date(timestramp: int) -> str: 12 | return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(timestramp)) 13 | 14 | 15 | def format_time(seconds: int) -> str: 16 | tm = seconds 17 | ft = "" 18 | for unit in [60, 60, 24]: 19 | ft = f"{tm % unit:0>2}" + ft 20 | if unit != 24: 21 | ft = ":" + ft 22 | tm //= unit 23 | 24 | ft = f"{tm}days {ft}" 25 | return ft 26 | 27 | 28 | def human_size(size: int) -> str: 29 | s = float(size) 30 | v = "" 31 | t = "" 32 | for t in ["B", "KB", "MB", "GB", "TB"]: 33 | if s < 1024.0: 34 | v = f"{s:3.1f}" 35 | break 36 | s /= 1024.0 37 | if v.endswith(".0"): 38 | v = v[:-2] 39 | return f"{v} {t}" 40 | 41 | 42 | _nums_set = set(string.digits + ".") 43 | 44 | 45 | def human_size_to_int(size_str: str) -> int: 46 | size_str = size_str.strip() 47 | if not size_str: 48 | return 0 49 | 50 | i = 0 51 | while i < len(size_str): 52 | if size_str[i] in _nums_set: 53 | i += 1 54 | continue 55 | else: 56 | break 57 | 58 | if i == 0: 59 | return 0 60 | 61 | s = float(size_str[:i]) 62 | _s = s 63 | 64 | unit = size_str[i:].upper().replace(" ", "") 65 | if not unit: 66 | return math.floor(_s) 67 | 68 | for t in ["KB", "MB", "GB", "TB"]: 69 | _s *= 1024 70 | if unit == t or unit[0] == t[0]: 71 | return math.floor(_s) 72 | 73 | return math.floor(s) 74 | -------------------------------------------------------------------------------- /build.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from setuptools import setup 4 | from Cython.Build import cythonize 5 | from distutils.command.build_ext import build_ext 6 | 7 | 8 | def build(setup_kwargs): 9 | extensions = ["alipcs_py/common/simple_cipher.pyx"] 10 | 11 | # gcc arguments hack: enable optimizations 12 | os.environ["CFLAGS"] = "-O3" 13 | 14 | ext_modules = cythonize( 15 | extensions, 16 | language_level=3, 17 | compiler_directives={"linetrace": True}, 18 | ) 19 | 20 | ext_modules[0].name = "alipcs_py.common.simple_cipher" 21 | 22 | # Build 23 | setup_kwargs.update( 24 | { 25 | "ext_modules": ext_modules, 26 | "cmdclass": {"build_ext": build_ext}, 27 | } 28 | ) 29 | 30 | 31 | if __name__ == "__main__": 32 | setup_kwargs = {} 33 | build(setup_kwargs) 34 | setup(**setup_kwargs) 35 | -------------------------------------------------------------------------------- /imgs/refresh_token.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PeterDing/AliPCS-Py/beeb515ab8f963c795f99c82d2e3a88487b0f0ae/imgs/refresh_token.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "AliPCS-Py" 3 | homepage = "https://github.com/PeterDing/AliPCS-Py" 4 | version = "0.8.1" 5 | description = "Ali Pcs Api and App" 6 | authors = ["PeterDing "] 7 | license = "MIT" 8 | readme = "README.md" 9 | classifiers = [ 10 | "Environment :: Console", 11 | "Intended Audience :: Developers", 12 | "Operating System :: Microsoft :: Windows", 13 | "Operating System :: MacOS", 14 | "Operating System :: POSIX :: Linux", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.8", 17 | "Programming Language :: Python :: 3.9", 18 | "Programming Language :: Python :: 3.10", 19 | "Programming Language :: Python :: 3.11", 20 | "Programming Language :: Python :: 3.12", 21 | ] 22 | 23 | [tool.black] 24 | line-length = 119 25 | 26 | [tool.ruff] 27 | lint.ignore = ["E501", "E402", "F401", "F403", "F841"] 28 | line-length = 119 29 | 30 | [tool.poetry.dependencies] 31 | python = "^3.8" 32 | requests = ">=2.31" 33 | requests-toolbelt = ">=1.0" 34 | peewee = ">=3.17" 35 | toml = ">=0.10" 36 | python-dateutil = ">=2.8" 37 | qrcode = ">=7.4" 38 | rich = ">=13.7" 39 | pillow = ">=10.1" 40 | click = ">=8.1" 41 | typing-extensions = ">=4.8" 42 | aget = ">=0.2" 43 | chardet = ">=5.2" 44 | fastapi = ">=0.104" 45 | uvicorn = ">=0.24" 46 | jinja2 = ">=3.1" 47 | cryptography = ">=41.0" 48 | ecdsa = ">=0.18" 49 | cython = ">=3.0" 50 | passlib = ">=1.7" 51 | 52 | [tool.poetry.group.dev.dependencies] 53 | pytest = ">=7.4" 54 | pytest-cov = ">=5.0" 55 | Faker = ">=24" 56 | ruff = ">=0.3" 57 | setuptools = ">=69.0" 58 | cython = ">=3.0" 59 | 60 | [tool.poetry.build] 61 | script = "build.py" 62 | generate-setup-file = true 63 | 64 | [tool.poetry.scripts] 65 | AliPCS-Py = 'alipcs_py.app:main' 66 | 67 | [tool.pyright] 68 | reportGeneralTypeIssues = true 69 | 70 | [build-system] 71 | requires = ["poetry-core", "cython", "wheel", "setuptools"] 72 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # This is a shim to hopefully allow Github to detect the package, build is done with poetry 4 | 5 | import setuptools 6 | 7 | if __name__ == "__main__": 8 | setuptools.setup(name="alipcs-py") 9 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PeterDing/AliPCS-Py/beeb515ab8f963c795f99c82d2e3a88487b0f0ae/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | from alipcs_py import AliPCS, AliPCSApi 5 | from alipcs_py.commands.upload import upload, from_tos 6 | from alipcs_py.common.platform import IS_WIN 7 | 8 | from tests.datas import REFRESH_TOKEN, Datas 9 | 10 | import pytest 11 | 12 | 13 | TEST_ROOT = "/AliPCS-Py-test" 14 | LOCAL_DIR = Path("tests", "test-datas", "demo-directory") 15 | TEST_DATA_PATH = LOCAL_DIR.parent / "demo-directory.tar.gz" 16 | 17 | 18 | @pytest.fixture(scope="session") 19 | def uncompress_test_data(): 20 | if LOCAL_DIR.exists(): 21 | if IS_WIN: 22 | os.system(f"rd /s /q {LOCAL_DIR}") 23 | else: 24 | os.system(f"rm -rf {LOCAL_DIR}") 25 | 26 | assert TEST_DATA_PATH.exists() 27 | if IS_WIN: 28 | os.system(f"tar -xf {TEST_DATA_PATH} -C tests\\test-datas") 29 | else: 30 | os.system(f"tar -xf {TEST_DATA_PATH} -C tests/test-datas") 31 | 32 | yield 33 | 34 | if LOCAL_DIR.exists(): 35 | if IS_WIN: 36 | os.system(f"rd /s /q {LOCAL_DIR}") 37 | else: 38 | os.system(f"rm -rf {LOCAL_DIR}") 39 | 40 | 41 | @pytest.fixture(scope="session") 42 | def alipcsapi(uncompress_test_data) -> AliPCSApi: 43 | return AliPCSApi(refresh_token=REFRESH_TOKEN) 44 | 45 | 46 | @pytest.fixture(scope="session") 47 | def alipcs(alipcsapi: AliPCSApi) -> AliPCS: 48 | return alipcsapi._alipcs 49 | 50 | 51 | @pytest.fixture(scope="session") 52 | def datas(alipcsapi: AliPCSApi): 53 | if REFRESH_TOKEN == "": 54 | return 55 | 56 | local_paths = [] 57 | local_dir = LOCAL_DIR 58 | for root, _, files in os.walk(local_dir): 59 | for fl in files: 60 | local_paths.append(str(Path(root, fl))) 61 | 62 | remote_dir = TEST_ROOT + "/-------" 63 | remote_dir_pcs_file = alipcsapi.makedir_path(remote_dir)[0] 64 | from_paths = [str(local_dir / fn) for fn in os.listdir(local_dir)] 65 | from_to_list = from_tos(from_paths, remote_dir) 66 | 67 | upload(alipcsapi, from_to_list) 68 | 69 | yield Datas( 70 | local_dir=str(local_dir), 71 | local_paths=local_paths, 72 | remote_dir=remote_dir, 73 | remote_dir_pcs_file=remote_dir_pcs_file, 74 | remote_paths=[to_ for _, to_ in from_to_list], 75 | ) 76 | 77 | pf = alipcsapi.meta_by_path(TEST_ROOT) 78 | assert pf is not None 79 | alipcsapi.remove(pf.file_id) 80 | -------------------------------------------------------------------------------- /tests/datas.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dataclasses import dataclass 3 | from typing import List 4 | 5 | from alipcs_py.alipcs.inner import PcsFile 6 | 7 | REFRESH_TOKEN = os.getenv("REFRESH_TOKEN", "") 8 | 9 | 10 | @dataclass 11 | class Datas: 12 | local_dir: str 13 | local_paths: List[str] 14 | remote_dir: str 15 | remote_dir_pcs_file: PcsFile 16 | remote_paths: List[str] 17 | -------------------------------------------------------------------------------- /tests/test-datas/demo-directory.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PeterDing/AliPCS-Py/beeb515ab8f963c795f99c82d2e3a88487b0f0ae/tests/test-datas/demo-directory.tar.gz -------------------------------------------------------------------------------- /tests/test_alipcs.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | import time 4 | import random 5 | 6 | import pytest 7 | import qrcode 8 | 9 | from alipcs_py.alipcs import AliPCS, AliOpenPCS, AliOpenAuth 10 | 11 | from tests.datas import REFRESH_TOKEN, Datas 12 | 13 | 14 | class TestAliPCS: 15 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 16 | def test_get_token(self, alipcs: AliPCS): 17 | info = alipcs.get_token() 18 | assert info["access_token"] != "" 19 | 20 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 21 | def test_refresh(self, alipcs: AliPCS): 22 | info = alipcs.refresh() 23 | assert info["access_token"] != "" 24 | 25 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 26 | def test_get_drive_info(self, alipcs: AliPCS): 27 | assert alipcs.device_id != "" 28 | 29 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 30 | def test_create_session(self, alipcs: AliPCS): 31 | info = alipcs.create_session() 32 | assert info["result"] and info["success"] 33 | 34 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 35 | def test_path_traceback(self, alipcs: AliPCS, datas: Datas): 36 | local_dir = datas.local_dir 37 | local_paths = datas.local_paths 38 | remote_dir = datas.remote_dir 39 | 40 | local_path = random.choice(local_paths) 41 | remote_path = Path(remote_dir) / local_path[len(local_dir) + 1 :] 42 | remote_path_posix = remote_path.as_posix() 43 | file_id = alipcs.meta_by_path(remote_path_posix)["file_id"] 44 | 45 | info = alipcs.path_traceback(file_id) 46 | wanted_path = Path("/", *[p["name"] for p in info["items"][::-1]]) 47 | assert wanted_path == remote_path 48 | 49 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 50 | def test_meta_by_path(self, alipcs: AliPCS, datas: Datas): 51 | remote_dir = datas.remote_dir 52 | info = alipcs.meta_by_path(remote_dir) 53 | assert info["file_id"] != "" 54 | assert info["name"] == os.path.basename(remote_dir) 55 | 56 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 57 | def test_meta(self, alipcs: AliPCS, datas: Datas): 58 | pcs_file = datas.remote_dir_pcs_file 59 | info = alipcs.meta(pcs_file.file_id) 60 | assert info["file_id"] == pcs_file.file_id 61 | assert info["name"] == pcs_file.name 62 | 63 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 64 | def test_exists(self, alipcs: AliPCS, datas: Datas): 65 | pcs_file = datas.remote_dir_pcs_file 66 | assert alipcs.exists(pcs_file.file_id) 67 | 68 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 69 | def test_is_dir(self, alipcs: AliPCS, datas: Datas): 70 | pcs_file = datas.remote_dir_pcs_file 71 | assert alipcs.is_dir(pcs_file.file_id) 72 | 73 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 74 | def test_list(self, alipcs: AliPCS, datas: Datas): 75 | local_dir = datas.local_dir 76 | pcs_file = datas.remote_dir_pcs_file 77 | filenames = set(os.listdir(local_dir)) 78 | info = alipcs.list(pcs_file.file_id) 79 | for v in info["items"]: 80 | assert v["name"] in filenames 81 | 82 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 83 | def test_walk(self, alipcs: AliPCS, datas: Datas): 84 | pcs_file = datas.remote_dir_pcs_file 85 | alipcs.walk(pcs_file.file_id) 86 | 87 | # More tests in test_alipcsapi.py 88 | 89 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 90 | def test_create_file(self, alipcs: AliPCS): 91 | pass 92 | # Tested in conftest.py 93 | 94 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 95 | def test_rapid_upload_file(self, alipcs: AliPCS): 96 | pass 97 | # Tested in test_alipcsapi.py 98 | 99 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 100 | def test_search(self, alipcs: AliPCS, datas: Datas): 101 | time.sleep(10) # Wait for the file to be indexed 102 | 103 | local_paths = datas.local_paths 104 | local_path = random.choice(local_paths) 105 | name = os.path.basename(local_path) 106 | info = alipcs.search(name) 107 | assert any(v["name"] == name for v in info["items"]) 108 | 109 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 110 | def test_makedir(self, alipcs: AliPCS): 111 | pass 112 | # Tested in conftest.py 113 | 114 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 115 | def test_move(self, alipcs: AliPCS, datas: Datas): 116 | pass 117 | # Tested in test_alipcsapi.py 118 | 119 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 120 | def test_rename(self, alipcs: AliPCS): 121 | pass 122 | # Tested in test_alipcsapi.py 123 | 124 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 125 | def test_copy(self, alipcs: AliPCS): 126 | pass 127 | # Tested in test_alipcsapi.py 128 | 129 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 130 | def test_remove(self, alipcs: AliPCS): 131 | pass 132 | # Tested in test_alipcsapi.py 133 | 134 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 135 | def test_share(self, alipcs: AliPCS): 136 | pass 137 | # Tested in test_alipcsapi.py 138 | 139 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 140 | def test_list_shared(self, alipcs: AliPCS): 141 | pass 142 | # Tested in test_alipcsapi.py 143 | 144 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 145 | def test_cancel_shared(self, alipcs: AliPCS): 146 | pass 147 | # Tested in test_alipcsapi.py 148 | 149 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 150 | def test_get_share_token(self, alipcs: AliPCS): 151 | pass 152 | # Tested in test_alipcsapi.py 153 | 154 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 155 | def test_shared_info(self, alipcs: AliPCS): 156 | pass 157 | # Tested in test_alipcsapi.py 158 | 159 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 160 | def test_list_shared_files(self, alipcs: AliPCS): 161 | pass 162 | # Tested in test_alipcsapi.py 163 | 164 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 165 | def test_transfer_shared_files(self, alipcs: AliPCS): 166 | pass 167 | # Tested in test_alipcsapi.py 168 | 169 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 170 | def test_shared_file_download_url(self, alipcs: AliPCS): 171 | pass 172 | # Tested in test_alipcsapi.py 173 | 174 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 175 | def test_user(self, alipcs: AliPCS): 176 | info = alipcs.user_info() 177 | assert info["user_id"] != "" 178 | 179 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 180 | def test_download_link(self, alipcs: AliPCS, datas: Datas): 181 | pass 182 | # Tested in test_alipcsapi.py 183 | 184 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 185 | def test_file_stream(self, alipcs: AliPCS): 186 | pass 187 | # Tested in test_alipcsapi.py 188 | 189 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 190 | def test_shared_file_stream(self, alipcs: AliPCS): 191 | pass 192 | # Tested in test_alipcsapi.py 193 | 194 | # def test_qrcode_link(self, alipcs: AliPCS): 195 | # ali_auth = AliOpenAuth(client_server=ALIYUNDRIVE_OPENAPI_AUTH_DOMAIN) 196 | # info = ali_auth.get_qrcode_info() 197 | # print(info) 198 | # 199 | # def test_qrcode_auth(self, alipcs: AliPCS): 200 | # ali_auth = AliOpenAuth(client_server=ALIYUNDRIVE_OPENAPI_AUTH_DOMAIN) 201 | # 202 | # # Get qrcode info 203 | # info = ali_auth.get_qrcode_info() 204 | # print(info) 205 | # sid = info["sid"] 206 | # 207 | # qrcode_url = f"https://www.aliyundrive.com/o/oauth/authorize?sid={sid}" 208 | # 209 | # qr = qrcode.QRCode() 210 | # qr.add_data(qrcode_url) 211 | # f = io.StringIO() 212 | # qr.print_ascii(out=f, tty=False, invert=True) 213 | # f.seek(0) 214 | # print(f.read()) 215 | # 216 | # while True: 217 | # info = ali_auth.scan_status(sid) 218 | # print(info) 219 | # if info["status"] == "LoginSuccess": 220 | # auth_code = info["authCode"] 221 | # break 222 | # time.sleep(2) 223 | # 224 | # info = ali_auth.get_refresh_token(auth_code) 225 | # 226 | # print(info) 227 | -------------------------------------------------------------------------------- /tests/test_alipcsapi.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path, PosixPath 2 | import os 3 | import random 4 | import time 5 | 6 | from alipcs_py import AliPCSApi 7 | 8 | import pytest 9 | from rich import print 10 | 11 | from tests.datas import REFRESH_TOKEN, Datas 12 | 13 | 14 | class TestAliPCSApi: 15 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 16 | def test_refresh_token(self, alipcsapi: AliPCSApi, datas: Datas): 17 | assert alipcsapi.refresh_token != "" 18 | 19 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 20 | def test_access_token(self, alipcsapi: AliPCSApi, datas: Datas): 21 | assert alipcsapi.access_token != "" 22 | 23 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 24 | def test_expire_time(self, alipcsapi: AliPCSApi, datas: Datas): 25 | assert alipcsapi.expire_time > 0 26 | 27 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 28 | def test_user_id(self, alipcsapi: AliPCSApi, datas: Datas): 29 | assert alipcsapi.user_id != "" 30 | 31 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 32 | def test_device_id(self, alipcsapi: AliPCSApi, datas: Datas): 33 | assert alipcsapi.device_id != "" 34 | 35 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 36 | def test_default_drive_id(self, alipcsapi: AliPCSApi, datas: Datas): 37 | assert alipcsapi.default_drive_id != "" 38 | 39 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 40 | def test_path_traceback(self, alipcsapi: AliPCSApi, datas: Datas): 41 | remote_path = random.choice(datas.remote_paths) 42 | pcs_file = alipcsapi.meta_by_path(remote_path) 43 | assert pcs_file is not None 44 | files = alipcsapi.path_traceback(pcs_file.file_id) 45 | assert remote_path == files[0].path 46 | 47 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 48 | def test_meta_by_path(self, alipcsapi: AliPCSApi, datas: Datas): 49 | remote_path = random.choice(datas.remote_paths) 50 | pcs_file = alipcsapi.meta_by_path(remote_path) 51 | assert pcs_file is not None 52 | assert pcs_file.path == remote_path 53 | 54 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 55 | def test_meta(self, alipcsapi: AliPCSApi, datas: Datas): 56 | pcs_file = datas.remote_dir_pcs_file 57 | pf = alipcsapi.meta(pcs_file.file_id) 58 | assert pf is not None 59 | assert pf.name == pf.path 60 | 61 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 62 | def test_get_file(self, alipcsapi: AliPCSApi, datas: Datas): 63 | remote_path = random.choice(datas.remote_paths) 64 | pcs_file = alipcsapi.get_file(remotepath=remote_path) 65 | assert pcs_file is not None 66 | assert pcs_file.path == remote_path 67 | 68 | pcs_file = datas.remote_dir_pcs_file 69 | pf = alipcsapi.get_file(file_id=pcs_file.file_id) 70 | assert pf is not None 71 | assert pf.name == pf.path 72 | 73 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 74 | def test_exists(self, alipcsapi: AliPCSApi, datas: Datas): 75 | pcs_file = datas.remote_dir_pcs_file 76 | assert alipcsapi.exists(pcs_file.file_id) 77 | assert not alipcsapi.exists(pcs_file.file_id[::-1]) 78 | 79 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 80 | def test_is_file(self, alipcsapi: AliPCSApi, datas: Datas): 81 | pcs_file = datas.remote_dir_pcs_file 82 | assert not alipcsapi.is_file(pcs_file.file_id) 83 | 84 | remote_path = random.choice(datas.remote_paths) 85 | pcs_file = alipcsapi.meta_by_path(remote_path) 86 | assert pcs_file is not None 87 | assert alipcsapi.is_file(pcs_file.file_id) 88 | 89 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 90 | def test_is_dir(self, alipcsapi: AliPCSApi, datas: Datas): 91 | pass 92 | # Same as test_is_file 93 | 94 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 95 | def test_list(self, alipcsapi: AliPCSApi, datas: Datas): 96 | pcs_file = datas.remote_dir_pcs_file 97 | sub_pcs_files, _ = alipcsapi.list(pcs_file.file_id) 98 | local_dir = datas.local_dir 99 | for sub_pcs_file in sub_pcs_files: 100 | assert sub_pcs_file.path == sub_pcs_file.name 101 | assert Path(local_dir, sub_pcs_file.path).exists() 102 | 103 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 104 | def test_list_iter(self, alipcsapi: AliPCSApi, datas: Datas): 105 | pcs_file = datas.remote_dir_pcs_file 106 | sub_pcs_files = list(alipcsapi.list_iter(pcs_file.file_id, recursive=True, include_dir=True)) 107 | local_dir = datas.local_dir 108 | for sub_pcs_file in sub_pcs_files: 109 | assert not sub_pcs_file.path.startswith(pcs_file.name) 110 | assert Path(local_dir, PosixPath(sub_pcs_file.path)).exists() 111 | 112 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 113 | def test_path(self, alipcsapi: AliPCSApi, datas: Datas): 114 | remote_path = sorted(datas.remote_paths, key=lambda x: len(x))[-1] 115 | pcs_file = alipcsapi.path(remote_path) 116 | assert pcs_file is not None 117 | assert remote_path == pcs_file.path 118 | 119 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 120 | def test_paths(self, alipcsapi: AliPCSApi, datas: Datas): 121 | pass 122 | # Tested in test_path 123 | 124 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 125 | def test_list_path_iter(self, alipcsapi: AliPCSApi, datas: Datas): 126 | pass 127 | # Deprecated 128 | 129 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 130 | def test_list_path(self, alipcsapi: AliPCSApi, datas: Datas): 131 | pass 132 | # Deprecated 133 | 134 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 135 | def test_walk(self, alipcsapi: AliPCSApi, datas: Datas): 136 | remote_dir_pcs_file = datas.remote_dir_pcs_file 137 | remote_dir = datas.remote_dir 138 | local_dir = datas.local_dir 139 | remote_paths = set(datas.remote_paths) 140 | wanted_paths = set() 141 | for pcs_file in alipcsapi.walk(remote_dir_pcs_file.file_id): 142 | assert Path(local_dir, pcs_file.path).exists() 143 | if pcs_file.is_file: 144 | wanted_paths.add(remote_dir + "/" + pcs_file.path) 145 | assert wanted_paths == remote_paths 146 | 147 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 148 | def test_create_file(self, alipcsapi: AliPCSApi, datas: Datas): 149 | pass 150 | # Tested in test_commands.py 151 | 152 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 153 | def test_prepare_file(self, alipcsapi: AliPCSApi, datas: Datas): 154 | pass 155 | # Tested in test_commands.py 156 | 157 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 158 | def test_get_upload_url(self, alipcsapi: AliPCSApi, datas: Datas): 159 | pass 160 | # Tested in test_commands.py 161 | 162 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 163 | def test_rapid_upload_file(self, alipcsapi: AliPCSApi, datas: Datas): 164 | pass 165 | # Tested in test_commands.py 166 | 167 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 168 | def test_upload_slice(self, alipcsapi: AliPCSApi, datas: Datas): 169 | pass 170 | # Tested in test_commands.py 171 | 172 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 173 | def test_upload_complete(self, alipcsapi: AliPCSApi, datas: Datas): 174 | pass 175 | # Tested in test_commands.py 176 | 177 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 178 | def test_search(self, alipcsapi: AliPCSApi, datas: Datas): 179 | remote_path = random.choice(datas.remote_paths) 180 | name = os.path.basename(remote_path) 181 | time.sleep(10) # Wait for the file to be indexed 182 | assert any(pcs_file.name == name for pcs_file in alipcsapi.search(name)[0]) 183 | 184 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 185 | def test_search_all(self, alipcsapi: AliPCSApi, datas: Datas): 186 | pass 187 | # Tested in test_search 188 | 189 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 190 | def test_makedir(self, alipcsapi: AliPCSApi, datas: Datas): 191 | name = "test_makedir1" 192 | pcs_file = alipcsapi.makedir("root", name) 193 | assert pcs_file is not None 194 | assert pcs_file.name == name 195 | alipcsapi.remove(pcs_file.file_id) 196 | 197 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 198 | def test_makedir_path(self, alipcsapi: AliPCSApi, datas: Datas): 199 | path = "/test_makedir_path2/test_makedir_path3/test_makedir_path4" 200 | pcs_files = alipcsapi.makedir_path(path) 201 | try: 202 | parts = path.split("/") 203 | for i in range(1, len(parts)): 204 | assert pcs_files[i - 1].path == "/".join(parts[: len(parts) - i + 1]) 205 | finally: 206 | alipcsapi.remove(pcs_files[-1].file_id) 207 | 208 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 209 | def test_move(self, alipcsapi: AliPCSApi, datas: Datas): 210 | path = "/test_move/test_move1/test_move2" 211 | pcs_files = alipcsapi.makedir_path(path) 212 | try: 213 | result = alipcsapi.move(pcs_files[0].file_id, pcs_files[-1].file_id) 214 | assert all(result) 215 | 216 | assert alipcsapi.get_file(remotepath="/test_move/test_move2") is not None 217 | assert alipcsapi.get_file(remotepath="/test_move/test_move1/test_move2") is None 218 | finally: 219 | alipcsapi.remove(pcs_files[-1].file_id) 220 | 221 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 222 | def test_rename(self, alipcsapi: AliPCSApi, datas: Datas): 223 | path = "/test_rename/test_rename1/test_rename2" 224 | pcs_files = alipcsapi.makedir_path(path) 225 | try: 226 | pf = alipcsapi.rename(pcs_files[0].file_id, "test_rename3") 227 | assert pf is not None 228 | assert pf.name == "test_rename3" 229 | assert alipcsapi.get_file(remotepath=path) is None 230 | assert alipcsapi.get_file(remotepath=path.replace("2", "3")) is not None 231 | finally: 232 | pf = alipcsapi.get_file(remotepath="/test_rename") 233 | if pf is not None: 234 | alipcsapi.remove(pf.file_id) 235 | 236 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 237 | def test_copy(self, alipcsapi: AliPCSApi, datas: Datas): 238 | path = "/test_copy/test_copy1/test_copy2" 239 | pcs_files = alipcsapi.makedir_path(path) 240 | try: 241 | new_files = alipcsapi.copy(pcs_files[0].file_id, pcs_files[-1].file_id) 242 | assert len(new_files) == 1 243 | 244 | assert alipcsapi.get_file(remotepath="/test_copy/test_copy2") is not None 245 | finally: 246 | alipcsapi.remove(pcs_files[-1].file_id) 247 | 248 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 249 | def test_remove(self, alipcsapi: AliPCSApi, datas: Datas): 250 | path = "/test_remove/test_remove1/test_remove2" 251 | pcs_files = alipcsapi.makedir_path(path) 252 | try: 253 | assert alipcsapi.remove(pcs_files[0].file_id) 254 | assert alipcsapi.get_file(remotepath=path) is None 255 | finally: 256 | alipcsapi.remove(pcs_files[-1].file_id) 257 | 258 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 259 | def test_share(self, alipcsapi: AliPCSApi, datas: Datas): 260 | pass 261 | # share api changed, need to update 262 | 263 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 264 | def test_is_shared_valid(self, alipcsapi: AliPCSApi, datas: Datas): 265 | pass 266 | # share api changed, need to update 267 | 268 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 269 | def test_list_shared(self, alipcsapi: AliPCSApi, datas: Datas): 270 | pass 271 | # share api changed, need to update 272 | 273 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 274 | def test_list_shared_all(self, alipcsapi: AliPCSApi, datas: Datas): 275 | pass 276 | # share api changed, need to update 277 | 278 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 279 | def test_cancel_shared(self, alipcsapi: AliPCSApi, datas: Datas): 280 | pass 281 | # share api changed, need to update 282 | 283 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 284 | def test_get_share_token(self, alipcsapi: AliPCSApi, datas: Datas): 285 | pass 286 | # share api changed, need to update 287 | 288 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 289 | def test_shared_info(self, alipcsapi: AliPCSApi, datas: Datas): 290 | pass 291 | # share api changed, need to update 292 | 293 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 294 | def test_transfer_shared_files(self, alipcsapi: AliPCSApi, datas: Datas): 295 | pass 296 | # share api changed, need to update 297 | 298 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 299 | def test_shared_file_download_url(self, alipcsapi: AliPCSApi, datas: Datas): 300 | pass 301 | # share api changed, need to update 302 | 303 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 304 | def test_user_info(self, alipcsapi: AliPCSApi, datas: Datas): 305 | info = alipcsapi.user_info() 306 | assert info.user_id != "" 307 | 308 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 309 | def test_download_link(self, alipcsapi: AliPCSApi, datas: Datas): 310 | remote_path = random.choice(datas.remote_paths) 311 | pcs_file = alipcsapi.meta_by_path(remote_path) 312 | assert pcs_file is not None 313 | link = alipcsapi.download_link(pcs_file.file_id) 314 | assert link is not None 315 | assert link.download_url or link.url 316 | assert not link.expires() 317 | 318 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 319 | def test_update_download_url(self, alipcsapi: AliPCSApi, datas: Datas): 320 | remote_path = random.choice(datas.remote_paths) 321 | pcs_file = alipcsapi.meta_by_path(remote_path) 322 | assert pcs_file is not None 323 | pcs_file = alipcsapi.update_download_url(pcs_file) 324 | assert not pcs_file.download_url_expires() 325 | 326 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 327 | def test_file_stream(self, alipcsapi: AliPCSApi, datas: Datas): 328 | remote_path = random.choice(datas.remote_paths) 329 | remote_dir = datas.remote_dir 330 | local_path = Path(datas.local_dir, PosixPath(remote_path[len(remote_dir) + 1 :])) 331 | pcs_file = alipcsapi.meta_by_path(remote_path) 332 | assert pcs_file is not None 333 | stream = alipcsapi.file_stream(pcs_file.file_id) 334 | assert stream is not None 335 | assert stream.readable() 336 | assert stream.seekable() 337 | content = stream.read() 338 | assert content is not None 339 | assert len(content) == pcs_file.size 340 | assert content == local_path.read_bytes() 341 | 342 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 343 | def test_shared_file_stream(self, alipcsapi: AliPCSApi, datas: Datas): 344 | pass 345 | # share api changed, need to update 346 | -------------------------------------------------------------------------------- /tests/test_commands.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import random 4 | import time 5 | import io 6 | from pathlib import Path, PosixPath 7 | 8 | from alipcs_py.alipcs import AliPCSApi 9 | from alipcs_py.commands.list_files import list_files 10 | from alipcs_py.commands.search import search 11 | from alipcs_py.commands.file_operators import makedir, move, rename, copy, remove 12 | from alipcs_py.commands.upload import upload, from_tos, _rapid_upload 13 | from alipcs_py.commands.share import ( 14 | list_shared, 15 | share_files, 16 | cancel_shared, 17 | save_shared, 18 | list_shared_files, 19 | ) 20 | from alipcs_py.commands.user import show_user_info 21 | from alipcs_py.commands.download import download, Downloader 22 | from alipcs_py.commands.server import start_server 23 | from alipcs_py.commands.crypto import decrypt_file 24 | from alipcs_py.common.crypto import calc_proof_code, calc_sha1 25 | 26 | import pytest 27 | from faker import Faker 28 | 29 | from alipcs_py.common.io import EncryptType, reset_encrypt_io 30 | 31 | from tests.datas import REFRESH_TOKEN, Datas 32 | 33 | 34 | fake = Faker() 35 | 36 | 37 | class CaptureStdout: 38 | def __init__(self): 39 | self.sys_stdout = sys.stdout 40 | self.io = io.StringIO() 41 | sys.stdout = self.io 42 | 43 | def __enter__(self): 44 | return self 45 | 46 | def __exit__(self, exc_type, exc_value, traceback): 47 | sys.stdout = self.sys_stdout 48 | 49 | def get_output(self): 50 | return self.io.getvalue() 51 | 52 | 53 | class TestCommands: 54 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 55 | def test_list_file(self, alipcsapi: AliPCSApi, datas: Datas): 56 | remote_dir = datas.remote_dir 57 | 58 | with CaptureStdout() as cs: 59 | list_files( 60 | alipcsapi, 61 | remote_dir, 62 | show_size=True, 63 | recursive=False, 64 | sifters=[], 65 | highlight=True, 66 | show_file_id=True, 67 | show_date=True, 68 | ) 69 | 70 | output = cs.get_output() 71 | part1, part2 = remote_dir.rsplit("/", 1) 72 | assert part1 in output and part2 in output 73 | 74 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 75 | def test_search(self, alipcsapi: AliPCSApi, datas: Datas): 76 | remote_path = random.choice(datas.remote_paths) 77 | name = os.path.basename(remote_path) 78 | time.sleep(10) # Wait for the file to be indexed 79 | 80 | with CaptureStdout() as cs: 81 | search(alipcsapi, name) 82 | 83 | output = cs.get_output() 84 | assert name in output 85 | 86 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 87 | def test_makedir(self, alipcsapi: AliPCSApi): 88 | path = "/test_makedir_cmd/test_makedir_cmd1/test_makedir_cmd2" 89 | with CaptureStdout() as cs: 90 | makedir(alipcsapi, path, show=True) 91 | 92 | output = cs.get_output() 93 | try: 94 | assert alipcsapi.get_file(remotepath=path) is not None 95 | assert path in output 96 | finally: 97 | remove(alipcsapi, "/".join(path.split("/")[:2])) 98 | 99 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 100 | def test_move(self, alipcsapi: AliPCSApi): 101 | from_path = "/test_move_cmd/test_move_cmd1/test_move_cmd2" 102 | to_path = "/test_move_cmd/tmp/test_move_cmd3" 103 | from_paths = alipcsapi.makedir_path(from_path) 104 | 105 | with CaptureStdout() as cs: 106 | move(alipcsapi, from_path, to_path, show=True) 107 | 108 | output = cs.get_output() 109 | try: 110 | assert alipcsapi.get_file(remotepath=to_path) is not None 111 | assert alipcsapi.get_file(remotepath=from_path) is None 112 | assert to_path in output 113 | finally: 114 | alipcsapi.remove(from_paths[-1].file_id) 115 | 116 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 117 | def test_rename(self, alipcsapi: AliPCSApi): 118 | path = "/test_rename_cmd/test_rename_cmd1" 119 | new_name = "test_rename_cmd2" 120 | from_paths = alipcsapi.makedir_path(path) 121 | 122 | with CaptureStdout() as cs: 123 | rename(alipcsapi, path, new_name, show=True) 124 | 125 | output = cs.get_output() 126 | try: 127 | assert alipcsapi.get_file(remotepath="/".join(path.split("/")[:-1] + [new_name])) is not None 128 | assert new_name in output 129 | finally: 130 | alipcsapi.remove(from_paths[-1].file_id) 131 | 132 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 133 | def test_copy(self, alipcsapi: AliPCSApi): 134 | from_path = "/test_copy_cmd/test_copy_cmd1/test_copy_cmd2" 135 | to_path = "/test_copy_cmd/tmp" 136 | from_paths = alipcsapi.makedir_path(from_path) 137 | 138 | with CaptureStdout() as cs: 139 | copy(alipcsapi, from_path, to_path, show=True) 140 | 141 | output = cs.get_output() 142 | try: 143 | pcs_file = alipcsapi.get_file(remotepath=to_path + "/test_copy_cmd2") 144 | assert pcs_file is not None 145 | assert pcs_file.file_id in output 146 | finally: 147 | alipcsapi.remove(from_paths[-1].file_id) 148 | 149 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 150 | def test_remove(self, alipcsapi: AliPCSApi): 151 | path = "/test_remove_cmd" 152 | paths = alipcsapi.makedir_path(path) 153 | remove(alipcsapi, path) 154 | assert not alipcsapi.exists(paths[0].file_id) 155 | assert alipcsapi.exists_in_trash(paths[0].file_id) 156 | 157 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 158 | def test_upload(self, alipcsapi: AliPCSApi, tmp_path: str): 159 | remotedir = "/test_upload_cmd" 160 | file_size = 1024 * 1024 * 10 # 10MB 161 | content = os.urandom(file_size) 162 | sha1 = calc_sha1(content) 163 | name = "test_upload_cmd" 164 | local_path = Path(tmp_path) / name 165 | with open(local_path, "wb") as f: 166 | f.write(content) 167 | 168 | # Upload file 169 | upload(alipcsapi, from_to_list=from_tos([local_path], remotedir), show_progress=False) 170 | try: 171 | pcs_file = alipcsapi.get_file(remotepath=remotedir + "/" + name) 172 | assert pcs_file is not None 173 | assert pcs_file.size == file_size 174 | assert pcs_file.rapid_upload_info is not None 175 | assert pcs_file.rapid_upload_info.content_hash.lower() == sha1.lower() 176 | finally: 177 | remove(alipcsapi, remotedir) 178 | 179 | # Rapid Upload 180 | file_io = open(local_path, "rb") 181 | slice1k_bytes = file_io.read(1024) 182 | reset_encrypt_io(file_io) 183 | slice1k_hash = calc_sha1(slice1k_bytes) 184 | 185 | remote_pcs_file = alipcsapi.makedir_path(remotedir + "/tmp")[0] 186 | 187 | pcs_prepared_file = alipcsapi.prepare_file( 188 | name, 189 | remote_pcs_file.file_id, 190 | file_size, 191 | slice1k_hash, 192 | part_number=1, 193 | check_name_mode="overwrite", 194 | ) 195 | content_hash = calc_sha1(file_io) 196 | proof_code = calc_proof_code(file_io, file_size, alipcsapi.access_token) 197 | 198 | try: 199 | assert pcs_prepared_file.can_rapid_upload() 200 | assert _rapid_upload( 201 | alipcsapi, 202 | local_path.as_posix(), 203 | name, 204 | remote_pcs_file.file_id, 205 | content_hash, 206 | proof_code, 207 | file_size, 208 | check_name_mode="overwrite", 209 | task_id=None, 210 | ) 211 | assert alipcsapi.get_file(remotepath=remotedir + "/tmp/" + name) is not None 212 | finally: 213 | remove(alipcsapi, remotedir) 214 | 215 | # Encrypt Upload 216 | password = b"1234" 217 | for enc_type in EncryptType: 218 | upload( 219 | alipcsapi, 220 | from_to_list=from_tos([local_path], remotedir), 221 | encrypt_password=password, 222 | encrypt_type=enc_type, 223 | show_progress=False, 224 | ) 225 | try: 226 | pcs_file = alipcsapi.get_file(remotepath=remotedir + "/" + name) 227 | assert pcs_file is not None 228 | download( 229 | alipcsapi, [pcs_file.path], localdir=Path(tmp_path, enc_type.value), encrypt_password=password 230 | ) 231 | target_path = Path(tmp_path, enc_type.value, pcs_file.name) 232 | assert target_path.exists() 233 | target_sha1 = calc_sha1(target_path.read_bytes()) 234 | assert target_sha1 == sha1 235 | finally: 236 | remove(alipcsapi, remotedir) 237 | 238 | # Upload directory 239 | upload(alipcsapi, from_to_list=from_tos([tmp_path], remotedir), show_progress=False) 240 | try: 241 | pcs_file = alipcsapi.get_file(remotepath=remotedir + "/" + os.path.basename(tmp_path) + "/" + name) 242 | assert pcs_file is not None 243 | assert pcs_file.size == file_size 244 | assert pcs_file.rapid_upload_info is not None 245 | assert pcs_file.rapid_upload_info.content_hash.lower() == sha1.lower() 246 | finally: 247 | remove(alipcsapi, remotedir) 248 | os.remove(local_path) 249 | 250 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 251 | def test_list_shared(self, alipcsapi: AliPCSApi): 252 | pass 253 | # share api changed, need to update 254 | 255 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 256 | def test_share(self, alipcsapi: AliPCSApi): 257 | pass 258 | # share api changed, need to update 259 | 260 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 261 | def test_cancel_shared(self, alipcsapi: AliPCSApi): 262 | pass 263 | # share api changed, need to update 264 | 265 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 266 | def test_save_shared(self, alipcsapi: AliPCSApi): 267 | pass 268 | # share api changed, need to update 269 | 270 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 271 | def test_list_shared_files(self, alipcsapi: AliPCSApi): 272 | pass 273 | # share api changed, need to update 274 | 275 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 276 | def test_show_user_info(self, alipcsapi: AliPCSApi): 277 | with CaptureStdout() as cs: 278 | show_user_info(alipcsapi) 279 | 280 | output = cs.get_output() 281 | assert alipcsapi.refresh_token in output 282 | 283 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 284 | def test_download(self, alipcsapi: AliPCSApi, datas: Datas, tmp_path): 285 | # Download file 286 | remote_path = random.choice(datas.remote_paths) 287 | download(alipcsapi, [remote_path], localdir=tmp_path, downloader=Downloader.me, show_progress=False) 288 | pcs_file = alipcsapi.get_file(remotepath=remote_path) 289 | assert pcs_file is not None 290 | assert pcs_file.rapid_upload_info is not None 291 | local_path = Path(tmp_path) / os.path.basename(remote_path) 292 | assert os.path.exists(local_path) 293 | sha1 = calc_sha1(local_path.open("rb")) 294 | assert sha1.lower() == pcs_file.rapid_upload_info.content_hash.lower() 295 | 296 | # Download directory 297 | remote_dir = datas.remote_dir 298 | download( 299 | alipcsapi, 300 | [remote_dir], 301 | localdir=tmp_path, 302 | downloader=Downloader.me, 303 | recursive=True, 304 | show_progress=False, 305 | ) 306 | 307 | remote_dir_name = os.path.basename(remote_dir) 308 | remote_pcs_file = datas.remote_dir_pcs_file 309 | pcs_files = alipcsapi.walk(remote_pcs_file.file_id) 310 | for pcs_file in pcs_files: 311 | if pcs_file.is_dir: 312 | continue 313 | local_path = Path(tmp_path) / remote_dir_name / PosixPath(pcs_file.path) 314 | assert local_path.exists() 315 | sha1 = calc_sha1(local_path.open("rb")) 316 | assert pcs_file.rapid_upload_info is not None 317 | assert sha1.lower() == pcs_file.rapid_upload_info.content_hash.lower() 318 | 319 | @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 320 | def test_play(self, alipcsapi: AliPCSApi, datas: Datas): 321 | pass 322 | # No support at IC 323 | 324 | # 325 | # @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 326 | # def test_http_server(self, alipcsapi: AliPCSApi, datas: Datas): 327 | # print() 328 | # start_server(alipcsapi, "/") 329 | # 330 | # @pytest.mark.skipif(not REFRESH_TOKEN, reason="No REFRESH_TOKEN") 331 | # def test_decrypt_file(self, alipcsapi: AliPCSApi, datas: Datas): 332 | # decrypt_file("f60m", "f60m_dec", "CK-QEpQ)T@@P{kXV/GGw") 333 | -------------------------------------------------------------------------------- /tests/test_common.py: -------------------------------------------------------------------------------- 1 | import time 2 | import os 3 | import io 4 | import subprocess 5 | 6 | import requests 7 | 8 | from alipcs_py.common import constant 9 | from alipcs_py.common.number import u64_to_u8x8, u8x8_to_u64 10 | from alipcs_py.common.path import join_path 11 | from alipcs_py.common.platform import IS_WIN 12 | from alipcs_py.common.io import ( 13 | PADDED_ENCRYPT_HEAD_WITH_SALT_LEN, 14 | total_len, 15 | ChunkIO, 16 | generate_nonce_or_iv, 17 | RangeRequestIO, 18 | SimpleEncryptIO, 19 | ChaCha20EncryptIO, 20 | AES256CBCEncryptIO, 21 | to_decryptio, 22 | rapid_upload_params, 23 | ) 24 | from alipcs_py.common.crypto import ( 25 | generate_key_iv, 26 | padding_key, 27 | padding_size, 28 | random_bytes, 29 | _md5_cmd, 30 | calc_file_md5, 31 | SimpleCryptography, 32 | ChaCha20Cryptography, 33 | AES256CBCCryptography, 34 | ) 35 | 36 | from alipcs_py.utils import human_size, human_size_to_int 37 | 38 | 39 | def test_join_path(): 40 | a = "/foo" 41 | b = "bar" 42 | assert join_path(a, b) == "/foo/bar" 43 | 44 | a = "foo" 45 | b = "bar" 46 | assert join_path(a, b) == "foo/bar" 47 | 48 | a = "/foo" 49 | b = "../bar" 50 | assert join_path(a, b) == "/bar" 51 | 52 | a = "foo" 53 | b = "../bar" 54 | assert join_path(a, b) == "bar" 55 | 56 | 57 | def test_padding_key(): 58 | key = os.urandom(5) 59 | pad_key = padding_key(key, 10) 60 | assert pad_key == key + b"\xff" * 5 61 | 62 | pad_key = padding_key(key, 11, value=b"\x00") 63 | assert pad_key == key + b"\x00" * 6 64 | 65 | pad_key = padding_key(key, 12, value=b"") 66 | assert len(pad_key) == 12 67 | 68 | 69 | def test_generate_nonce_or_iv(): 70 | salt = os.urandom(20) 71 | buf = io.BytesIO(b"123456789") 72 | 73 | ni1 = generate_nonce_or_iv(salt, buf) 74 | buf.seek(0, 0) 75 | 76 | ni2 = generate_nonce_or_iv(salt, buf) 77 | buf.seek(0, 0) 78 | 79 | print(ni1) 80 | 81 | assert len(ni1) == 16 82 | assert ni1 == ni2 83 | 84 | 85 | def test_rangerequestio(): 86 | url = "http://mirror.arizona.edu/ubuntu/dists/xenial/Release.gpg" 87 | io = RangeRequestIO("GET", url, max_chunk_size=300) 88 | 89 | b = b"" 90 | while True: 91 | cn = io.read(300) 92 | if not cn: 93 | break 94 | b += cn 95 | 96 | o = requests.get(url).content 97 | assert b == o 98 | 99 | 100 | def test_calu_file_md5(): 101 | # Github action fail on windows 102 | if IS_WIN: 103 | return 104 | 105 | path = "temp-file" 106 | fd = open(path, "w") 107 | fd.write("asdf") 108 | 109 | cp = subprocess.run(_md5_cmd(path), universal_newlines=True, stdout=subprocess.PIPE) 110 | output = cp.stdout.strip() 111 | print("calc_file_md5: cmd output:", output) 112 | 113 | try: 114 | r = calc_file_md5(path) 115 | print("calc_file_md5:", r) 116 | finally: 117 | os.remove(path) 118 | assert r 119 | 120 | 121 | def test_simplecryptography(): 122 | key = os.urandom(32) 123 | c = SimpleCryptography(key) 124 | buf = os.urandom(100) 125 | enc = c.encrypt(buf) 126 | dec = c.decrypt(enc) 127 | assert buf == dec 128 | 129 | 130 | def test_chacha20cryptography(): 131 | key = os.urandom(32) 132 | nonce = os.urandom(16) 133 | c = ChaCha20Cryptography(key, nonce) 134 | buf = os.urandom(100) 135 | enc = c.encrypt(buf) 136 | dec = c.decrypt(enc) 137 | assert buf == dec 138 | 139 | 140 | def test_aescryptography(): 141 | key = os.urandom(32) 142 | iv = os.urandom(16) 143 | c = AES256CBCCryptography(key, iv) 144 | buf = b"a" * 16 * 2 145 | 146 | enc = c.encrypt(buf) 147 | print("enc:", enc, len(enc)) 148 | enc += c._encryptor.finalize() 149 | 150 | dec = c.decrypt(enc[:16]) 151 | print("dec:", dec, len(dec)) 152 | dec += c.decrypt(enc[16:]) 153 | print("dec:", dec, len(dec)) 154 | dec += c._decryptor.finalize() 155 | assert buf == dec 156 | 157 | 158 | def test_simplecryptography_time(): 159 | key = os.urandom(32) 160 | c = SimpleCryptography(key) 161 | buf = b"a" * 1024 * 1024 * 100 162 | start = time.time() 163 | c.encrypt(buf) 164 | end = time.time() 165 | print("100M:", end - start) 166 | 167 | 168 | def test_chacha20cryptography_time(): 169 | key = os.urandom(32) 170 | nonce = os.urandom(16) 171 | c = ChaCha20Cryptography(key, nonce) 172 | buf = b"a" * 1024 * 1024 * 100 173 | start = time.time() 174 | c.encrypt(buf) 175 | end = time.time() 176 | print("100M:", end - start) 177 | 178 | 179 | def test_aes256cbccryptography_time(): 180 | key = os.urandom(32) 181 | iv = os.urandom(16) 182 | c = AES256CBCCryptography(key, iv) 183 | buf = b"a" * 1024 * 1024 * 100 184 | start = time.time() 185 | enc = c.encrypt(buf) 186 | end = time.time() 187 | print("100M:", end - start, len(enc)) 188 | 189 | 190 | def test_noencryptio(): 191 | key = b"123" 192 | buf = os.urandom(1024 * 1024 * 50) 193 | c = io.BytesIO(buf) 194 | enc = c.read() 195 | d = to_decryptio(io.BytesIO(enc), key) 196 | dec = d.read() 197 | assert buf == dec 198 | 199 | 200 | def test_simpleencryptio(): 201 | key = b"123" 202 | buf = os.urandom(1024 * 1024 * 50) 203 | bio = io.BytesIO(buf) 204 | c = SimpleEncryptIO(bio, key, len(buf)) 205 | assert total_len(c) == len(buf) + PADDED_ENCRYPT_HEAD_WITH_SALT_LEN 206 | enc = c.read() 207 | d = to_decryptio(io.BytesIO(enc), key) 208 | assert total_len(d) == len(buf) 209 | dec = d.read() 210 | assert buf == dec 211 | 212 | 213 | def test_chacha20encryptio(): 214 | key = os.urandom(32) 215 | buf = os.urandom(1024 * 1024 * 50) 216 | bio = io.BytesIO(buf) 217 | c = ChaCha20EncryptIO(bio, key, len(buf)) 218 | assert total_len(c) == len(buf) + PADDED_ENCRYPT_HEAD_WITH_SALT_LEN 219 | enc = c.read() 220 | d = to_decryptio(io.BytesIO(enc), key) 221 | assert total_len(d) == len(buf) 222 | dec = d.read() 223 | assert buf == dec 224 | 225 | 226 | def test_aes256cbcencryptio(): 227 | key = os.urandom(32) 228 | buf = os.urandom(1024 * 1024 * 50 + 14) 229 | bio = io.BytesIO(buf) 230 | c = AES256CBCEncryptIO(bio, key, len(buf)) 231 | 232 | assert total_len(c) == padding_size(len(buf), 16) + PADDED_ENCRYPT_HEAD_WITH_SALT_LEN 233 | 234 | enc = c.read() 235 | print("enc", len(enc)) 236 | dio = to_decryptio(io.BytesIO(enc), key) 237 | # assert total_len(d) == len(buf) # can be wrong 238 | dec = dio.read() 239 | print("dec", len(dec)) 240 | assert buf == dec 241 | 242 | # Encrypt 243 | # Assert length of Read(size), size > 0 244 | buf = os.urandom(1024 * 50) 245 | bio = io.BytesIO(buf) 246 | c = AES256CBCEncryptIO(bio, key, len(buf)) 247 | length = 0 248 | while True: 249 | d = c.read(1) 250 | if not d: 251 | break 252 | assert len(d) == 1 253 | length += 1 254 | assert total_len(c) == padding_size(len(buf), 16) + PADDED_ENCRYPT_HEAD_WITH_SALT_LEN 255 | 256 | buf = os.urandom(1024 * 50 + 14) 257 | bio = io.BytesIO(buf) 258 | c = AES256CBCEncryptIO(bio, key, len(buf)) 259 | length = 0 260 | while True: 261 | d = c.read(1) 262 | if not d: 263 | break 264 | assert len(d) == 1 265 | length += 1 266 | assert total_len(c) == padding_size(len(buf), 16) + PADDED_ENCRYPT_HEAD_WITH_SALT_LEN 267 | 268 | # Decrypt 269 | # Assert length of Read(size), size > 0 270 | buf = os.urandom(1024 * 50) 271 | bio = io.BytesIO(buf) 272 | c = AES256CBCEncryptIO(bio, key, len(buf)) 273 | enc = b"" 274 | while True: 275 | d = c.read(1) 276 | if not d: 277 | break 278 | enc += d 279 | dio = to_decryptio(io.BytesIO(enc), key) 280 | length = 0 281 | while True: 282 | d = dio.read(1) 283 | if not d: 284 | break 285 | assert len(d) == 1 286 | length += 1 287 | assert length == len(buf) 288 | 289 | buf = os.urandom(1024 * 50 + 14) 290 | bio = io.BytesIO(buf) 291 | c = AES256CBCEncryptIO(bio, key, len(buf)) 292 | enc = b"" 293 | while True: 294 | d = c.read(1) 295 | if not d: 296 | break 297 | enc += d 298 | dio = to_decryptio(io.BytesIO(enc), key) 299 | length = 0 300 | while True: 301 | d = dio.read(1) 302 | if not d: 303 | break 304 | assert len(d) == 1 305 | length += 1 306 | assert length == len(buf) 307 | 308 | 309 | def test_linked_crypted_io(): 310 | key = os.urandom(32) 311 | buf = os.urandom(1024 * 50 + 14) 312 | raw_len = len(buf) 313 | 314 | raw_io = io.BytesIO(buf) 315 | eio = SimpleEncryptIO(raw_io, key, raw_len) 316 | dio = to_decryptio(eio, key) 317 | eio = ChaCha20EncryptIO(dio, key, raw_len) 318 | dio = to_decryptio(eio, key) 319 | eio = AES256CBCEncryptIO(dio, key, raw_len) 320 | dio = to_decryptio(eio, key) 321 | 322 | length = 0 323 | dbuf = b"" 324 | while True: 325 | d = dio.read(1) 326 | if not d: 327 | break 328 | dbuf += d 329 | assert len(d) == 1 330 | length += 1 331 | 332 | assert length == raw_len 333 | assert dbuf == buf 334 | 335 | 336 | def test_aes256cbcencryptio_uniq(): 337 | key = os.urandom(32) 338 | buf = os.urandom(1024 * 1024 * 50) 339 | 340 | bio = io.BytesIO(buf) 341 | c = AES256CBCEncryptIO(bio, key, len(buf)) 342 | enc1 = c.read() 343 | 344 | time.sleep(1) 345 | 346 | c.reset() 347 | enc2 = c.read() 348 | 349 | assert enc1 == enc2 350 | 351 | 352 | def test_rapid_upload_params(): 353 | key = os.urandom(32) 354 | buf = os.urandom(60 * constant.OneM) 355 | 356 | eio = ChaCha20EncryptIO(io.BytesIO(buf), key, len(buf)) 357 | enc0 = rapid_upload_params(eio) 358 | 359 | eio.reset() 360 | enc1 = rapid_upload_params(eio) 361 | 362 | assert enc0 == enc1 363 | 364 | 365 | def test_chunkio(): 366 | f = io.BytesIO(b"0123") 367 | b = ChunkIO(f, 2) 368 | 369 | assert b.read() == b"01" 370 | assert b.tell() == 2 371 | 372 | b.seek(0) 373 | assert b.tell() == 0 374 | 375 | b = ChunkIO(f, 2) 376 | assert b.read() == b"01" 377 | assert b.tell() == 2 378 | 379 | b = ChunkIO(f, 2) 380 | assert b.read() == b"23" 381 | assert b.tell() == 2 382 | 383 | 384 | def test_u64_u8x8(): 385 | i = 2**32 386 | b = u64_to_u8x8(i) 387 | x = u8x8_to_u64(b) 388 | assert i == x 389 | 390 | 391 | def test_random_bytes(): 392 | b1 = random_bytes(32, "abc") 393 | b2 = random_bytes(32, "abc") 394 | assert b1 == b2 395 | 396 | 397 | def test_padding_size(): 398 | i = 13 399 | bs = 16 400 | r = padding_size(i, bs) 401 | assert r == bs 402 | 403 | i = 16 404 | bs = 16 405 | r = padding_size(i, bs) 406 | assert r == bs 407 | 408 | i = 13 409 | bs = 16 410 | r = padding_size(i, bs, ceil=False) 411 | assert r == 0 412 | 413 | 414 | def test_generate_key_iv(): 415 | pwd = b"test" 416 | salt = b"\xf6\x81\x8c\xae\x13\x18r\xbd" 417 | key, iv = generate_key_iv(pwd, salt, 32, 16) 418 | 419 | assert key == b"l\x04\xcb\xae\xc4\xd7\xa05^\x04\x93\xa2M\xe5\x0ee\\?\xc1C;\xca\xab|Z\xda\x98\xe8\xdb\x01\xdb\xa0" 420 | assert iv == b"\x8b\xe2\x02\x8e\xee6j\x1cLv\xa2&\xa2\x8a\x1d\xfd" 421 | 422 | 423 | def test_human_size(): 424 | s = constant.OneM * 10 425 | 426 | s_str = human_size(s) 427 | s_int = human_size_to_int(s_str) 428 | 429 | assert s == s_int 430 | --------------------------------------------------------------------------------