├── .deepsource.toml ├── .editorconfig ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── cd.yml │ └── ci.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode └── extensions.json ├── CHANGELOG.md ├── CONFIG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── cliff.toml ├── devine ├── __main__.py ├── commands │ ├── __init__.py │ ├── cfg.py │ ├── dl.py │ ├── env.py │ ├── kv.py │ ├── search.py │ ├── serve.py │ ├── util.py │ └── wvd.py ├── core │ ├── __init__.py │ ├── __main__.py │ ├── binaries.py │ ├── cacher.py │ ├── commands.py │ ├── config.py │ ├── console.py │ ├── constants.py │ ├── credential.py │ ├── downloaders │ │ ├── __init__.py │ │ ├── aria2c.py │ │ ├── curl_impersonate.py │ │ └── requests.py │ ├── drm │ │ ├── __init__.py │ │ ├── clearkey.py │ │ └── widevine.py │ ├── events.py │ ├── manifests │ │ ├── __init__.py │ │ ├── dash.py │ │ └── hls.py │ ├── proxies │ │ ├── __init__.py │ │ ├── basic.py │ │ ├── hola.py │ │ ├── nordvpn.py │ │ └── proxy.py │ ├── search_result.py │ ├── service.py │ ├── services.py │ ├── titles │ │ ├── __init__.py │ │ ├── episode.py │ │ ├── movie.py │ │ ├── song.py │ │ └── title.py │ ├── tracks │ │ ├── __init__.py │ │ ├── attachment.py │ │ ├── audio.py │ │ ├── chapter.py │ │ ├── chapters.py │ │ ├── subtitle.py │ │ ├── track.py │ │ ├── tracks.py │ │ └── video.py │ ├── utilities.py │ ├── utils │ │ ├── __init__.py │ │ ├── click_types.py │ │ ├── collections.py │ │ ├── sslciphers.py │ │ ├── subprocess.py │ │ ├── webvtt.py │ │ └── xml.py │ ├── vault.py │ └── vaults.py └── vaults │ ├── API.py │ ├── MySQL.py │ ├── SQLite.py │ └── __init__.py ├── poetry.lock └── pyproject.toml /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | 3 | [[analyzers]] 4 | name = "python" 5 | enabled = true 6 | 7 | [analyzers.meta] 8 | runtime_version = "3.x.x" 9 | max_line_length = 120 10 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | charset = utf-8 6 | insert_final_newline = true 7 | indent_style = space 8 | indent_size = 4 9 | trim_trailing_whitespace = true 10 | 11 | [*.{feature,json,md,yaml,yml,toml}] 12 | indent_size = 2 13 | 14 | [*.md] 15 | trim_trailing_whitespace = false 16 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 16 | 1. Clone repository '...' 17 | 2. Run '....' 18 | 3. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/cd.yml: -------------------------------------------------------------------------------- 1 | name: cd 2 | permissions: 3 | contents: "write" 4 | id-token: "write" 5 | packages: "write" 6 | pull-requests: "read" 7 | 8 | on: 9 | push: 10 | tags: 11 | - "v*" 12 | 13 | jobs: 14 | tagged-release: 15 | name: Tagged Release 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v4 19 | - name: Set up Python 20 | uses: actions/setup-python@v4 21 | with: 22 | python-version: "3.11" 23 | - name: Install Poetry 24 | uses: abatilo/actions-poetry@v2 25 | with: 26 | poetry-version: 1.6.1 27 | - name: Install project 28 | run: poetry install --only main 29 | - name: Build project 30 | run: poetry build 31 | - name: Upload wheel 32 | uses: actions/upload-artifact@v3 33 | with: 34 | name: Python Wheel 35 | path: "dist/*.whl" 36 | - name: Deploy release 37 | uses: marvinpinto/action-automatic-releases@latest 38 | with: 39 | prerelease: false 40 | repo_token: "${{ secrets.GITHUB_TOKEN }}" 41 | files: | 42 | dist/*.whl 43 | - name: Publish to PyPI 44 | env: 45 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} 46 | run: poetry publish 47 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | lint: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Set up Python 15 | uses: actions/setup-python@v4 16 | with: 17 | python-version: "3.11" 18 | - name: Install poetry 19 | uses: abatilo/actions-poetry@v2 20 | with: 21 | poetry-version: 1.6.1 22 | - name: Install project 23 | run: poetry install --all-extras 24 | - name: Run pre-commit which does various checks 25 | run: poetry run pre-commit run --all-files --show-diff-on-failure 26 | build: 27 | runs-on: ubuntu-latest 28 | strategy: 29 | matrix: 30 | python-version: ["3.9", "3.10", "3.11"] 31 | steps: 32 | - uses: actions/checkout@v4 33 | - name: Set up Python ${{ matrix.python-version }} 34 | uses: actions/setup-python@v4 35 | with: 36 | python-version: ${{ matrix.python-version }} 37 | - name: Install poetry 38 | uses: abatilo/actions-poetry@v2 39 | with: 40 | poetry-version: 1.6.1 41 | - name: Install project 42 | run: poetry install --all-extras --only main 43 | - name: Build project 44 | run: poetry build 45 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # devine 2 | devine.yaml 3 | devine.yml 4 | *.mkv 5 | *.mp4 6 | *.exe 7 | *.dll 8 | *.crt 9 | *.wvd 10 | *.der 11 | *.pem 12 | *.bin 13 | *.db 14 | *.ttf 15 | *.otf 16 | device_cert 17 | device_client_id_blob 18 | device_private_key 19 | device_vmp_blob 20 | 21 | # Byte-compiled / optimized / DLL files 22 | __pycache__/ 23 | *.py[cod] 24 | *$py.class 25 | 26 | # C extensions 27 | *.so 28 | 29 | # Distribution / packaging 30 | .Python 31 | build/ 32 | develop-eggs/ 33 | dist/ 34 | downloads/ 35 | eggs/ 36 | .eggs/ 37 | lib/ 38 | lib64/ 39 | parts/ 40 | sdist/ 41 | var/ 42 | wheels/ 43 | share/python-wheels/ 44 | *.egg-info/ 45 | .installed.cfg 46 | *.egg 47 | MANIFEST 48 | 49 | # PyInstaller 50 | # Usually these files are written by a python script from a template 51 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 52 | *.manifest 53 | *.spec 54 | 55 | # Installer logs 56 | pip-log.txt 57 | pip-delete-this-directory.txt 58 | 59 | # Unit test / coverage reports 60 | htmlcov/ 61 | .tox/ 62 | .nox/ 63 | .coverage 64 | .coverage.* 65 | .cache 66 | nosetests.xml 67 | coverage.xml 68 | *.cover 69 | *.py,cover 70 | .hypothesis/ 71 | .pytest_cache/ 72 | cover/ 73 | 74 | # Translations 75 | *.mo 76 | *.pot 77 | 78 | # Django stuff: 79 | *.log 80 | local_settings.py 81 | db.sqlite3 82 | db.sqlite3-journal 83 | 84 | # Flask stuff: 85 | instance/ 86 | .webassets-cache 87 | 88 | # Scrapy stuff: 89 | .scrapy 90 | 91 | # Sphinx documentation 92 | docs/_build/ 93 | 94 | # PyBuilder 95 | .pybuilder/ 96 | target/ 97 | 98 | # Jupyter Notebook 99 | .ipynb_checkpoints 100 | 101 | # IPython 102 | profile_default/ 103 | ipython_config.py 104 | 105 | # pyenv 106 | # For a library or package, you might want to ignore these files since the code is 107 | # intended to run in multiple environments; otherwise, check them in: 108 | # .python-version 109 | 110 | # pipenv 111 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 112 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 113 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 114 | # install all needed dependencies. 115 | #Pipfile.lock 116 | 117 | # poetry 118 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 119 | # This is especially recommended for binary packages to ensure reproducibility, and is more 120 | # commonly ignored for libraries. 121 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 122 | #poetry.lock 123 | 124 | # pdm 125 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 126 | #pdm.lock 127 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 128 | # in version control. 129 | # https://pdm.fming.dev/#use-with-ide 130 | .pdm.toml 131 | 132 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 133 | __pypackages__/ 134 | 135 | # Celery stuff 136 | celerybeat-schedule 137 | celerybeat.pid 138 | 139 | # SageMath parsed files 140 | *.sage.py 141 | 142 | # Environments 143 | .env 144 | .venv 145 | env/ 146 | venv/ 147 | ENV/ 148 | env.bak/ 149 | venv.bak/ 150 | 151 | # Spyder project settings 152 | .spyderproject 153 | .spyproject 154 | 155 | # Rope project settings 156 | .ropeproject 157 | 158 | # mkdocs documentation 159 | /site 160 | 161 | # mypy 162 | .mypy_cache/ 163 | .dmypy.json 164 | dmypy.json 165 | 166 | # Pyre type checker 167 | .pyre/ 168 | 169 | # pytype static type analyzer 170 | .pytype/ 171 | 172 | # Cython debug symbols 173 | cython_debug/ 174 | 175 | # PyCharm 176 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 177 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 178 | # and can be added to the global gitignore or merged into this file. For a more nuclear 179 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 180 | .idea/ 181 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | 4 | repos: 5 | - repo: https://github.com/compilerla/conventional-pre-commit 6 | rev: v3.2.0 7 | hooks: 8 | - id: conventional-pre-commit 9 | stages: [commit-msg] 10 | - repo: https://github.com/mtkennerly/pre-commit-hooks 11 | rev: v0.4.0 12 | hooks: 13 | - id: poetry-ruff-check 14 | - repo: https://github.com/pycqa/isort 15 | rev: 5.13.2 16 | hooks: 17 | - id: isort 18 | - repo: https://github.com/pre-commit/pre-commit-hooks 19 | rev: v4.5.0 20 | hooks: 21 | - id: end-of-file-fixer 22 | - id: trailing-whitespace 23 | args: [--markdown-linebreak-ext=md] 24 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "EditorConfig.EditorConfig", 4 | "streetsidesoftware.code-spell-checker", 5 | "ms-python.python", 6 | "ms-python.vscode-pylance", 7 | "charliermarsh.ruff", 8 | "ms-python.isort", 9 | "ms-python.mypy-type-checker", 10 | "redhat.vscode-yaml", 11 | "tamasfe.even-better-toml" 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Development 2 | 3 | This project is managed using [Poetry](https://python-poetry.org), a fantastic Python packaging and dependency manager. 4 | Install the latest version of Poetry before continuing. Development currently requires Python 3.9+. 5 | 6 | ## Set up 7 | 8 | Starting from Zero? Not sure where to begin? Here's steps on setting up this Python project using Poetry. Note that 9 | Poetry installation instructions should be followed from the Poetry Docs: https://python-poetry.org/docs/#installation 10 | 11 | 1. While optional, It's recommended to configure Poetry to install Virtual environments within project folders: 12 | ```shell 13 | poetry config virtualenvs.in-project true 14 | ``` 15 | This makes it easier for Visual Studio Code to detect the Virtual Environment, as well as other IDEs and systems. 16 | I've also had issues with Poetry creating duplicate Virtual environments in the default folder for an unknown 17 | reason which quickly filled up my System storage. 18 | 2. Clone the Repository: 19 | ```shell 20 | git clone https://github.com/devine-dl/devine 21 | cd devine 22 | ``` 23 | 3. Install the Project with Poetry: 24 | ```shell 25 | poetry install 26 | ``` 27 | This creates a Virtual environment and then installs all project dependencies and executables into the Virtual 28 | environment. Your System Python environment is not affected at all. 29 | 4. Now activate the Virtual environment: 30 | ```shell 31 | poetry shell 32 | ``` 33 | Note: 34 | - You can alternatively just prefix `poetry run` to any command you wish to run under the Virtual environment. 35 | - I recommend entering the Virtual environment and all further instructions will have assumed you did. 36 | - JetBrains PyCharm has integrated support for Poetry and automatically enters Poetry Virtual environments, assuming 37 | the Python Interpreter on the bottom right is set up correctly. 38 | - For more information, see: https://python-poetry.org/docs/basic-usage/#using-your-virtual-environment 39 | 5. Install Pre-commit tooling to ensure safe and quality commits: 40 | ```shell 41 | pre-commit install 42 | ``` 43 | 44 | ## Building Source and Wheel distributions 45 | 46 | poetry build 47 | 48 | You can optionally specify `-f` to build `sdist` or `wheel` only. 49 | Built files can be found in the `/dist` directory. 50 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ default configuration file 2 | # https://git-cliff.org/docs/configuration 3 | 4 | [changelog] 5 | header = """ 6 | # Changelog\n 7 | All notable changes to this project will be documented in this file. 8 | 9 | This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 10 | 11 | Versions [3.0.0] and older use a format based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 12 | but versions thereafter use a custom changelog format using [git-cliff](https://git-cliff.org).\n 13 | """ 14 | body = """ 15 | {% if version -%} 16 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 17 | {% else -%} 18 | ## [Unreleased] 19 | {% endif -%} 20 | {% for group, commits in commits | group_by(attribute="group") %} 21 | ### {{ group | striptags | trim | upper_first }} 22 | {% for commit in commits %} 23 | - {% if commit.scope %}*{{ commit.scope }}*: {% endif %}\ 24 | {% if commit.breaking %}[**breaking**] {% endif %}\ 25 | {{ commit.message | upper_first }}\ 26 | {% endfor %} 27 | {% endfor %}\n 28 | """ 29 | footer = """ 30 | {% for release in releases -%} 31 | {% if release.version -%} 32 | {% if release.previous.version -%} 33 | [{{ release.version | trim_start_matches(pat="v") }}]: \ 34 | https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}\ 35 | /compare/{{ release.previous.version }}..{{ release.version }} 36 | {% endif -%} 37 | {% else -%} 38 | [unreleased]: https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}\ 39 | /compare/{{ release.previous.version }}..HEAD 40 | {% endif -%} 41 | {% endfor %} 42 | """ 43 | trim = true 44 | postprocessors = [ 45 | # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL 46 | ] 47 | 48 | [git] 49 | conventional_commits = true 50 | filter_unconventional = true 51 | split_commits = false 52 | commit_preprocessors = [] 53 | commit_parsers = [ 54 | { message = "^feat", group = "Features" }, 55 | { message = "^fix|revert", group = "Bug Fixes" }, 56 | { message = "^docs", group = "Documentation" }, 57 | { message = "^style", skip = true }, 58 | { message = "^refactor", group = "Changes" }, 59 | { message = "^perf", group = "Performance Improvements" }, 60 | { message = "^test", skip = true }, 61 | { message = "^build", group = "Builds" }, 62 | { message = "^ci", skip = true }, 63 | { message = "^chore", skip = true }, 64 | ] 65 | protect_breaking_commits = false 66 | filter_commits = false 67 | # tag_pattern = "v[0-9].*" 68 | # skip_tags = "" 69 | # ignore_tags = "" 70 | topo_order = false 71 | sort_commits = "oldest" 72 | -------------------------------------------------------------------------------- /devine/__main__.py: -------------------------------------------------------------------------------- 1 | if __name__ == "__main__": 2 | from devine.core.__main__ import main 3 | main() 4 | -------------------------------------------------------------------------------- /devine/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stabbedbybrick/devine/09eda168824157851e30003b196f4851298ec3ac/devine/commands/__init__.py -------------------------------------------------------------------------------- /devine/commands/cfg.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import logging 3 | import sys 4 | 5 | import click 6 | from ruamel.yaml import YAML 7 | 8 | from devine.core.config import config, get_config_path 9 | from devine.core.constants import context_settings 10 | 11 | 12 | @click.command( 13 | short_help="Manage configuration values for the program and its services.", 14 | context_settings=context_settings) 15 | @click.argument("key", type=str, required=False) 16 | @click.argument("value", type=str, required=False) 17 | @click.option("--unset", is_flag=True, default=False, help="Unset/remove the configuration value.") 18 | @click.option("--list", "list_", is_flag=True, default=False, help="List all set configuration values.") 19 | @click.pass_context 20 | def cfg(ctx: click.Context, key: str, value: str, unset: bool, list_: bool) -> None: 21 | """ 22 | Manage configuration values for the program and its services. 23 | 24 | \b 25 | Known Issues: 26 | - Config changes remove all comments of the changed files, which may hold critical data. (#14) 27 | """ 28 | if not key and not value and not list_: 29 | raise click.UsageError("Nothing to do.", ctx) 30 | 31 | if value: 32 | try: 33 | value = ast.literal_eval(value) 34 | except (ValueError, SyntaxError): 35 | pass # probably a str without quotes or similar, assume it's a string value 36 | 37 | log = logging.getLogger("cfg") 38 | 39 | yaml, data = YAML(), None 40 | yaml.default_flow_style = False 41 | 42 | config_path = get_config_path() or config.directories.user_configs / config.filenames.root_config 43 | if config_path.exists(): 44 | data = yaml.load(config_path) 45 | 46 | if not data: 47 | log.warning("No config file was found or it has no data, yet") 48 | # yaml.load() returns `None` if the input data is blank instead of a usable object 49 | # force a usable object by making one and removing the only item within it 50 | data = yaml.load("""__TEMP__: null""") 51 | del data["__TEMP__"] 52 | 53 | if list_: 54 | yaml.dump(data, sys.stdout) 55 | return 56 | 57 | key_items = key.split(".") 58 | parent_key = key_items[:-1] 59 | trailing_key = key_items[-1] 60 | 61 | is_write = value is not None 62 | is_delete = unset 63 | if is_write and is_delete: 64 | raise click.ClickException("You cannot set a value and use --unset at the same time.") 65 | 66 | if not is_write and not is_delete: 67 | data = data.mlget(key_items, default=KeyError) 68 | if data == KeyError: 69 | raise click.ClickException(f"Key '{key}' does not exist in the config.") 70 | yaml.dump(data, sys.stdout) 71 | else: 72 | try: 73 | parent_data = data 74 | if parent_key: 75 | parent_data = data.mlget(parent_key, default=data) 76 | if parent_data == data: 77 | for key in parent_key: 78 | if not hasattr(parent_data, key): 79 | parent_data[key] = {} 80 | parent_data = parent_data[key] 81 | if is_write: 82 | parent_data[trailing_key] = value 83 | log.info(f"Set {key} to {repr(value)}") 84 | elif is_delete: 85 | del parent_data[trailing_key] 86 | log.info(f"Unset {key}") 87 | except KeyError: 88 | raise click.ClickException(f"Key '{key}' does not exist in the config.") 89 | config_path.parent.mkdir(parents=True, exist_ok=True) 90 | yaml.dump(data, config_path) 91 | -------------------------------------------------------------------------------- /devine/commands/env.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import shutil 4 | import sys 5 | from pathlib import Path 6 | from typing import Optional 7 | 8 | import click 9 | from rich.padding import Padding 10 | from rich.table import Table 11 | from rich.tree import Tree 12 | 13 | from devine.core.config import POSSIBLE_CONFIG_PATHS, config, config_path 14 | from devine.core.console import console 15 | from devine.core.constants import context_settings 16 | from devine.core.services import Services 17 | 18 | 19 | @click.group(short_help="Manage and configure the project environment.", context_settings=context_settings) 20 | def env() -> None: 21 | """Manage and configure the project environment.""" 22 | 23 | 24 | @env.command() 25 | def info() -> None: 26 | """Displays information about the current environment.""" 27 | log = logging.getLogger("env") 28 | 29 | if config_path: 30 | log.info(f"Config loaded from {config_path}") 31 | else: 32 | tree = Tree("No config file found, you can use any of the following locations:") 33 | for i, path in enumerate(POSSIBLE_CONFIG_PATHS, start=1): 34 | tree.add(f"[repr.number]{i}.[/] [text2]{path.resolve()}[/]") 35 | console.print(Padding( 36 | tree, 37 | (0, 5) 38 | )) 39 | 40 | table = Table(title="Directories", expand=True) 41 | table.add_column("Name", no_wrap=True) 42 | table.add_column("Path") 43 | 44 | path_vars = { 45 | x: Path(os.getenv(x)) 46 | for x in ("TEMP", "APPDATA", "LOCALAPPDATA", "USERPROFILE") 47 | if sys.platform == "win32" and os.getenv(x) 48 | } 49 | 50 | for name in sorted(dir(config.directories)): 51 | if name.startswith("__") or name == "app_dirs": 52 | continue 53 | path = getattr(config.directories, name).resolve() 54 | for var, var_path in path_vars.items(): 55 | if path.is_relative_to(var_path): 56 | path = rf"%{var}%\{path.relative_to(var_path)}" 57 | break 58 | table.add_row(name.title(), str(path)) 59 | 60 | console.print(Padding( 61 | table, 62 | (1, 5) 63 | )) 64 | 65 | 66 | @env.group(name="clear", short_help="Clear an environment directory.", context_settings=context_settings) 67 | def clear() -> None: 68 | """Clear an environment directory.""" 69 | 70 | 71 | @clear.command() 72 | @click.argument("service", type=str, required=False) 73 | def cache(service: Optional[str]) -> None: 74 | """Clear the environment cache directory.""" 75 | log = logging.getLogger("env") 76 | cache_dir = config.directories.cache 77 | if service: 78 | cache_dir = cache_dir / Services.get_tag(service) 79 | log.info(f"Clearing cache directory: {cache_dir}") 80 | files_count = len(list(cache_dir.glob("**/*"))) 81 | if not files_count: 82 | log.info("No files to delete") 83 | else: 84 | log.info(f"Deleting {files_count} files...") 85 | shutil.rmtree(cache_dir) 86 | log.info("Cleared") 87 | 88 | 89 | @clear.command() 90 | def temp() -> None: 91 | """Clear the environment temp directory.""" 92 | log = logging.getLogger("env") 93 | log.info(f"Clearing temp directory: {config.directories.temp}") 94 | files_count = len(list(config.directories.temp.glob("**/*"))) 95 | if not files_count: 96 | log.info("No files to delete") 97 | else: 98 | log.info(f"Deleting {files_count} files...") 99 | shutil.rmtree(config.directories.temp) 100 | log.info("Cleared") 101 | -------------------------------------------------------------------------------- /devine/commands/kv.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | from pathlib import Path 4 | from typing import Optional 5 | 6 | import click 7 | 8 | from devine.core.config import config 9 | from devine.core.constants import context_settings 10 | from devine.core.services import Services 11 | from devine.core.vault import Vault 12 | from devine.core.vaults import Vaults 13 | 14 | 15 | @click.group(short_help="Manage and configure Key Vaults.", context_settings=context_settings) 16 | def kv() -> None: 17 | """Manage and configure Key Vaults.""" 18 | 19 | 20 | @kv.command() 21 | @click.argument("to_vault", type=str) 22 | @click.argument("from_vaults", nargs=-1, type=click.UNPROCESSED) 23 | @click.option("-s", "--service", type=str, default=None, 24 | help="Only copy data to and from a specific service.") 25 | def copy(to_vault: str, from_vaults: list[str], service: Optional[str] = None) -> None: 26 | """ 27 | Copy data from multiple Key Vaults into a single Key Vault. 28 | Rows with matching KIDs are skipped unless there's no KEY set. 29 | Existing data is not deleted or altered. 30 | 31 | The `to_vault` argument is the key vault you wish to copy data to. 32 | It should be the name of a Key Vault defined in the config. 33 | 34 | The `from_vaults` argument is the key vault(s) you wish to take 35 | data from. You may supply multiple key vaults. 36 | """ 37 | if not from_vaults: 38 | raise click.ClickException("No Vaults were specified to copy data from.") 39 | 40 | log = logging.getLogger("kv") 41 | 42 | vaults = Vaults() 43 | for vault_name in [to_vault] + list(from_vaults): 44 | vault = next((x for x in config.key_vaults if x["name"] == vault_name), None) 45 | if not vault: 46 | raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.") 47 | vault_type = vault["type"] 48 | vault_args = vault.copy() 49 | del vault_args["type"] 50 | vaults.load(vault_type, **vault_args) 51 | 52 | to_vault: Vault = vaults.vaults[0] 53 | from_vaults: list[Vault] = vaults.vaults[1:] 54 | 55 | log.info(f"Copying data from {', '.join([x.name for x in from_vaults])}, into {to_vault.name}") 56 | if service: 57 | service = Services.get_tag(service) 58 | log.info(f"Only copying data for service {service}") 59 | 60 | total_added = 0 61 | for from_vault in from_vaults: 62 | if service: 63 | services = [service] 64 | else: 65 | services = from_vault.get_services() 66 | 67 | for service_ in services: 68 | log.info(f"Getting data from {from_vault} for {service_}") 69 | content_keys = list(from_vault.get_keys(service_)) # important as it's a generator we iterate twice 70 | 71 | bad_keys = { 72 | kid: key 73 | for kid, key in content_keys 74 | if not key or key.count("0") == len(key) 75 | } 76 | 77 | for kid, key in bad_keys.items(): 78 | log.warning(f"Cannot add a NULL Content Key to a Vault, skipping: {kid}:{key}") 79 | 80 | content_keys = { 81 | kid: key 82 | for kid, key in content_keys 83 | if kid not in bad_keys 84 | } 85 | 86 | total_count = len(content_keys) 87 | log.info(f"Adding {total_count} Content Keys to {to_vault} for {service_}") 88 | 89 | try: 90 | added = to_vault.add_keys(service_, content_keys) 91 | except PermissionError: 92 | log.warning(f" - No permission to create table ({service_}) in {to_vault}, skipping...") 93 | continue 94 | 95 | total_added += added 96 | existed = total_count - added 97 | 98 | log.info(f"{to_vault} ({service_}): {added} newly added, {existed} already existed (skipped)") 99 | 100 | log.info(f"{to_vault}: {total_added} total newly added") 101 | 102 | 103 | @kv.command() 104 | @click.argument("vaults", nargs=-1, type=click.UNPROCESSED) 105 | @click.option("-s", "--service", type=str, default=None, 106 | help="Only sync data to and from a specific service.") 107 | @click.pass_context 108 | def sync(ctx: click.Context, vaults: list[str], service: Optional[str] = None) -> None: 109 | """ 110 | Ensure multiple Key Vaults copies of all keys as each other. 111 | It's essentially just a bi-way copy between each vault. 112 | To see the precise details of what it's doing between each 113 | provided vault, see the documentation for the `copy` command. 114 | """ 115 | if not len(vaults) > 1: 116 | raise click.ClickException("You must provide more than one Vault to sync.") 117 | 118 | ctx.invoke(copy, to_vault=vaults[0], from_vaults=vaults[1:], service=service) 119 | for i in range(1, len(vaults)): 120 | ctx.invoke(copy, to_vault=vaults[i], from_vaults=[vaults[i-1]], service=service) 121 | 122 | 123 | @kv.command() 124 | @click.argument("file", type=Path) 125 | @click.argument("service", type=str) 126 | @click.argument("vaults", nargs=-1, type=click.UNPROCESSED) 127 | def add(file: Path, service: str, vaults: list[str]) -> None: 128 | """ 129 | Add new Content Keys to Key Vault(s) by service. 130 | 131 | File should contain one key per line in the format KID:KEY (HEX:HEX). 132 | Each line should have nothing else within it except for the KID:KEY. 133 | Encoding is presumed to be UTF8. 134 | """ 135 | if not file.exists(): 136 | raise click.ClickException(f"File provided ({file}) does not exist.") 137 | if not file.is_file(): 138 | raise click.ClickException(f"File provided ({file}) is not a file.") 139 | if not service or not isinstance(service, str): 140 | raise click.ClickException(f"Service provided ({service}) is invalid.") 141 | if len(vaults) < 1: 142 | raise click.ClickException("You must provide at least one Vault.") 143 | 144 | log = logging.getLogger("kv") 145 | service = Services.get_tag(service) 146 | 147 | vaults_ = Vaults() 148 | for vault_name in vaults: 149 | vault = next((x for x in config.key_vaults if x["name"] == vault_name), None) 150 | if not vault: 151 | raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.") 152 | vault_type = vault["type"] 153 | vault_args = vault.copy() 154 | del vault_args["type"] 155 | vaults_.load(vault_type, **vault_args) 156 | 157 | data = file.read_text(encoding="utf8") 158 | kid_keys: dict[str, str] = {} 159 | for line in data.splitlines(keepends=False): 160 | line = line.strip() 161 | match = re.search(r"^(?P[0-9a-fA-F]{32}):(?P[0-9a-fA-F]{32})$", line) 162 | if not match: 163 | continue 164 | kid = match.group("kid").lower() 165 | key = match.group("key").lower() 166 | kid_keys[kid] = key 167 | 168 | total_count = len(kid_keys) 169 | 170 | for vault in vaults_: 171 | log.info(f"Adding {total_count} Content Keys to {vault}") 172 | added_count = vault.add_keys(service, kid_keys) 173 | existed_count = total_count - added_count 174 | log.info(f"{vault}: {added_count} newly added, {existed_count} already existed (skipped)") 175 | 176 | log.info("Done!") 177 | 178 | 179 | @kv.command() 180 | @click.argument("vaults", nargs=-1, type=click.UNPROCESSED) 181 | def prepare(vaults: list[str]) -> None: 182 | """Create Service Tables on Vaults if not yet created.""" 183 | log = logging.getLogger("kv") 184 | 185 | vaults_ = Vaults() 186 | for vault_name in vaults: 187 | vault = next((x for x in config.key_vaults if x["name"] == vault_name), None) 188 | if not vault: 189 | raise click.ClickException(f"Vault ({vault_name}) is not defined in the config.") 190 | vault_type = vault["type"] 191 | vault_args = vault.copy() 192 | del vault_args["type"] 193 | vaults_.load(vault_type, **vault_args) 194 | 195 | for vault in vaults_: 196 | if hasattr(vault, "has_table") and hasattr(vault, "create_table"): 197 | for service_tag in Services.get_tags(): 198 | if vault.has_table(service_tag): 199 | log.info(f"{vault} already has a {service_tag} Table") 200 | else: 201 | try: 202 | vault.create_table(service_tag, commit=True) 203 | log.info(f"{vault}: Created {service_tag} Table") 204 | except PermissionError: 205 | log.error(f"{vault} user has no create table permission, skipping...") 206 | continue 207 | else: 208 | log.info(f"{vault} does not use tables, skipping...") 209 | 210 | log.info("Done!") 211 | -------------------------------------------------------------------------------- /devine/commands/search.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | import re 5 | import sys 6 | from typing import Any, Optional 7 | 8 | import click 9 | import yaml 10 | from rich.padding import Padding 11 | from rich.rule import Rule 12 | from rich.tree import Tree 13 | 14 | from devine.commands.dl import dl 15 | from devine.core import binaries 16 | from devine.core.config import config 17 | from devine.core.console import console 18 | from devine.core.constants import context_settings 19 | from devine.core.proxies import Basic, Hola, NordVPN 20 | from devine.core.service import Service 21 | from devine.core.services import Services 22 | from devine.core.utils.click_types import ContextData 23 | from devine.core.utils.collections import merge_dict 24 | 25 | 26 | @click.command( 27 | short_help="Search for titles from a Service.", 28 | cls=Services, 29 | context_settings=dict( 30 | **context_settings, 31 | token_normalize_func=Services.get_tag 32 | )) 33 | @click.option("-p", "--profile", type=str, default=None, 34 | help="Profile to use for Credentials and Cookies (if available).") 35 | @click.option("--proxy", type=str, default=None, 36 | help="Proxy URI to use. If a 2-letter country is provided, it will try get a proxy from the config.") 37 | @click.option("--no-proxy", is_flag=True, default=False, 38 | help="Force disable all proxy use.") 39 | @click.pass_context 40 | def search( 41 | ctx: click.Context, 42 | no_proxy: bool, 43 | profile: Optional[str] = None, 44 | proxy: Optional[str] = None 45 | ): 46 | if not ctx.invoked_subcommand: 47 | raise ValueError("A subcommand to invoke was not specified, the main code cannot continue.") 48 | 49 | log = logging.getLogger("search") 50 | 51 | service = Services.get_tag(ctx.invoked_subcommand) 52 | profile = profile 53 | 54 | if profile: 55 | log.info(f"Using profile: '{profile}'") 56 | 57 | with console.status("Loading Service Config...", spinner="dots"): 58 | service_config_path = Services.get_path(service) / config.filenames.config 59 | if service_config_path.exists(): 60 | service_config = yaml.safe_load(service_config_path.read_text(encoding="utf8")) 61 | log.info("Service Config loaded") 62 | else: 63 | service_config = {} 64 | merge_dict(config.services.get(service), service_config) 65 | 66 | proxy_providers = [] 67 | if no_proxy: 68 | ctx.params["proxy"] = None 69 | else: 70 | with console.status("Loading Proxy Providers...", spinner="dots"): 71 | if config.proxy_providers.get("basic"): 72 | proxy_providers.append(Basic(**config.proxy_providers["basic"])) 73 | if config.proxy_providers.get("nordvpn"): 74 | proxy_providers.append(NordVPN(**config.proxy_providers["nordvpn"])) 75 | if binaries.HolaProxy: 76 | proxy_providers.append(Hola()) 77 | for proxy_provider in proxy_providers: 78 | log.info(f"Loaded {proxy_provider.__class__.__name__}: {proxy_provider}") 79 | 80 | if proxy: 81 | requested_provider = None 82 | if re.match(r"^[a-z]+:.+$", proxy, re.IGNORECASE): 83 | # requesting proxy from a specific proxy provider 84 | requested_provider, proxy = proxy.split(":", maxsplit=1) 85 | if re.match(r"^[a-z]{2}(?:\d+)?$", proxy, re.IGNORECASE): 86 | proxy = proxy.lower() 87 | with console.status(f"Getting a Proxy to {proxy}...", spinner="dots"): 88 | if requested_provider: 89 | proxy_provider = next(( 90 | x 91 | for x in proxy_providers 92 | if x.__class__.__name__.lower() == requested_provider 93 | ), None) 94 | if not proxy_provider: 95 | log.error(f"The proxy provider '{requested_provider}' was not recognised.") 96 | sys.exit(1) 97 | proxy_uri = proxy_provider.get_proxy(proxy) 98 | if not proxy_uri: 99 | log.error(f"The proxy provider {requested_provider} had no proxy for {proxy}") 100 | sys.exit(1) 101 | proxy = ctx.params["proxy"] = proxy_uri 102 | log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy}") 103 | else: 104 | for proxy_provider in proxy_providers: 105 | proxy_uri = proxy_provider.get_proxy(proxy) 106 | if proxy_uri: 107 | proxy = ctx.params["proxy"] = proxy_uri 108 | log.info(f"Using {proxy_provider.__class__.__name__} Proxy: {proxy}") 109 | break 110 | else: 111 | log.info(f"Using explicit Proxy: {proxy}") 112 | 113 | ctx.obj = ContextData( 114 | config=service_config, 115 | cdm=None, 116 | proxy_providers=proxy_providers, 117 | profile=profile 118 | ) 119 | 120 | 121 | @search.result_callback() 122 | def result(service: Service, profile: Optional[str] = None, **_: Any) -> None: 123 | log = logging.getLogger("search") 124 | 125 | service_tag = service.__class__.__name__ 126 | 127 | with console.status("Authenticating with Service...", spinner="dots"): 128 | cookies = dl.get_cookie_jar(service_tag, profile) 129 | credential = dl.get_credentials(service_tag, profile) 130 | service.authenticate(cookies, credential) 131 | if cookies or credential: 132 | log.info("Authenticated with Service") 133 | 134 | search_results = Tree("Search Results", hide_root=True) 135 | with console.status("Searching...", spinner="dots"): 136 | for result in service.search(): 137 | result_text = f"[bold text]{result.title}[/]" 138 | if result.url: 139 | result_text = f"[link={result.url}]{result_text}[/link]" 140 | if result.label: 141 | result_text += f" [pink]{result.label}[/]" 142 | if result.description: 143 | result_text += f"\n[text2]{result.description}[/]" 144 | result_text += f"\n[bright_black]id: {result.id}[/]" 145 | search_results.add(result_text + "\n") 146 | 147 | # update cookies 148 | cookie_file = dl.get_cookie_path(service_tag, profile) 149 | if cookie_file: 150 | dl.save_cookies(cookie_file, service.session.cookies) 151 | 152 | console.print(Padding( 153 | Rule(f"[rule.text]{len(search_results.children)} Search Results"), 154 | (1, 2) 155 | )) 156 | 157 | if search_results.children: 158 | console.print(Padding( 159 | search_results, 160 | (0, 5) 161 | )) 162 | else: 163 | console.print(Padding( 164 | "[bold text]No matches[/]\n[bright_black]Please check spelling and search again....[/]", 165 | (0, 5) 166 | )) 167 | -------------------------------------------------------------------------------- /devine/commands/serve.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | 3 | import click 4 | 5 | from devine.core import binaries 6 | from devine.core.config import config 7 | from devine.core.constants import context_settings 8 | 9 | 10 | @click.command( 11 | short_help="Serve your Local Widevine Devices for Remote Access.", 12 | context_settings=context_settings) 13 | @click.option("-h", "--host", type=str, default="0.0.0.0", help="Host to serve from.") 14 | @click.option("-p", "--port", type=int, default=8786, help="Port to serve from.") 15 | @click.option("--caddy", is_flag=True, default=False, help="Also serve with Caddy.") 16 | def serve(host: str, port: int, caddy: bool) -> None: 17 | """ 18 | Serve your Local Widevine Devices for Remote Access. 19 | 20 | \b 21 | Host as 127.0.0.1 may block remote access even if port-forwarded. 22 | Instead, use 0.0.0.0 and ensure the TCP port you choose is forwarded. 23 | 24 | \b 25 | You may serve with Caddy at the same time with --caddy. You can use Caddy 26 | as a reverse-proxy to serve with HTTPS. The config used will be the Caddyfile 27 | next to the devine config. 28 | """ 29 | from pywidevine import serve 30 | 31 | if caddy: 32 | if not binaries.Caddy: 33 | raise click.ClickException("Caddy executable \"caddy\" not found but is required for --caddy.") 34 | caddy_p = subprocess.Popen([ 35 | binaries.Caddy, 36 | "run", 37 | "--config", str(config.directories.user_configs / "Caddyfile") 38 | ]) 39 | else: 40 | caddy_p = None 41 | 42 | try: 43 | if not config.serve.get("devices"): 44 | config.serve["devices"] = [] 45 | config.serve["devices"].extend(list(config.directories.wvds.glob("*.wvd"))) 46 | serve.run(config.serve, host, port) 47 | finally: 48 | if caddy_p: 49 | caddy_p.kill() 50 | -------------------------------------------------------------------------------- /devine/commands/util.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | from pathlib import Path 3 | 4 | import click 5 | from pymediainfo import MediaInfo 6 | 7 | from devine.core import binaries 8 | from devine.core.constants import context_settings 9 | 10 | 11 | @click.group(short_help="Various helper scripts and programs.", context_settings=context_settings) 12 | def util() -> None: 13 | """Various helper scripts and programs.""" 14 | 15 | 16 | @util.command() 17 | @click.argument("path", type=Path) 18 | @click.argument("aspect", type=str) 19 | @click.option("--letter/--pillar", default=True, 20 | help="Specify which direction to crop. Top and Bottom would be --letter, Sides would be --pillar.") 21 | @click.option("-o", "--offset", type=int, default=0, 22 | help="Fine tune the computed crop area if not perfectly centered.") 23 | @click.option("-p", "--preview", is_flag=True, default=False, 24 | help="Instantly preview the newly-set aspect crop in MPV (or ffplay if mpv is unavailable).") 25 | def crop(path: Path, aspect: str, letter: bool, offset: int, preview: bool) -> None: 26 | """ 27 | Losslessly crop H.264 and H.265 video files at the bit-stream level. 28 | You may provide a path to a file, or a folder of mkv and/or mp4 files. 29 | 30 | Note: If you notice that the values you put in are not quite working, try 31 | tune -o/--offset. This may be necessary on videos with sub-sampled chroma. 32 | 33 | Do note that you may not get an ideal lossless cropping result on some 34 | cases, again due to sub-sampled chroma. 35 | 36 | It's recommended that you try -o about 10 or so pixels and lower it until 37 | you get as close in as possible. Do make sure it's not over-cropping either 38 | as it may go from being 2px away from a perfect crop, to 20px over-cropping 39 | again due to sub-sampled chroma. 40 | """ 41 | if not binaries.FFMPEG: 42 | raise click.ClickException("FFmpeg executable \"ffmpeg\" not found but is required.") 43 | 44 | if path.is_dir(): 45 | paths = list(path.glob("*.mkv")) + list(path.glob("*.mp4")) 46 | else: 47 | paths = [path] 48 | for video_path in paths: 49 | try: 50 | video_track = next(iter(MediaInfo.parse(video_path).video_tracks or [])) 51 | except StopIteration: 52 | raise click.ClickException("There's no video tracks in the provided file.") 53 | 54 | crop_filter = { 55 | "HEVC": "hevc_metadata", 56 | "AVC": "h264_metadata" 57 | }.get(video_track.commercial_name) 58 | if not crop_filter: 59 | raise click.ClickException(f"{video_track.commercial_name} Codec not supported.") 60 | 61 | aspect_w, aspect_h = list(map(float, aspect.split(":"))) 62 | if letter: 63 | crop_value = (video_track.height - (video_track.width / (aspect_w * aspect_h))) / 2 64 | left, top, right, bottom = map(int, [0, crop_value + offset, 0, crop_value - offset]) 65 | else: 66 | crop_value = (video_track.width - (video_track.height * (aspect_w / aspect_h))) / 2 67 | left, top, right, bottom = map(int, [crop_value + offset, 0, crop_value - offset, 0]) 68 | crop_filter += f"=crop_left={left}:crop_top={top}:crop_right={right}:crop_bottom={bottom}" 69 | 70 | if min(left, top, right, bottom) < 0: 71 | raise click.ClickException("Cannot crop less than 0, are you cropping in the right direction?") 72 | 73 | if preview: 74 | out_path = ["-f", "mpegts", "-"] # pipe 75 | else: 76 | out_path = [str(video_path.with_name(".".join(filter(bool, [ 77 | video_path.stem, 78 | video_track.language, 79 | "crop", 80 | str(offset or ""), 81 | { 82 | # ffmpeg's MKV muxer does not yet support HDR 83 | "HEVC": "h265", 84 | "AVC": "h264" 85 | }.get(video_track.commercial_name, ".mp4") 86 | ]))))] 87 | 88 | ffmpeg_call = subprocess.Popen([ 89 | binaries.FFMPEG, "-y", 90 | "-i", str(video_path), 91 | "-map", "0:v:0", 92 | "-c", "copy", 93 | "-bsf:v", crop_filter 94 | ] + out_path, stdout=subprocess.PIPE) 95 | try: 96 | if preview: 97 | previewer = binaries.MPV or binaries.FFPlay 98 | if not previewer: 99 | raise click.ClickException("MPV/FFplay executables weren't found but are required for previewing.") 100 | subprocess.Popen((previewer, "-"), stdin=ffmpeg_call.stdout) 101 | finally: 102 | if ffmpeg_call.stdout: 103 | ffmpeg_call.stdout.close() 104 | ffmpeg_call.wait() 105 | 106 | 107 | @util.command(name="range") 108 | @click.argument("path", type=Path) 109 | @click.option("--full/--limited", is_flag=True, 110 | help="Full: 0..255, Limited: 16..235 (16..240 YUV luma)") 111 | @click.option("-p", "--preview", is_flag=True, default=False, 112 | help="Instantly preview the newly-set video range in MPV (or ffplay if mpv is unavailable).") 113 | def range_(path: Path, full: bool, preview: bool) -> None: 114 | """ 115 | Losslessly set the Video Range flag to full or limited at the bit-stream level. 116 | You may provide a path to a file, or a folder of mkv and/or mp4 files. 117 | 118 | If you ever notice blacks not being quite black, and whites not being quite white, 119 | then you're video may have the range set to the wrong value. Flip its range to the 120 | opposite value and see if that fixes it. 121 | """ 122 | if not binaries.FFMPEG: 123 | raise click.ClickException("FFmpeg executable \"ffmpeg\" not found but is required.") 124 | 125 | if path.is_dir(): 126 | paths = list(path.glob("*.mkv")) + list(path.glob("*.mp4")) 127 | else: 128 | paths = [path] 129 | for video_path in paths: 130 | try: 131 | video_track = next(iter(MediaInfo.parse(video_path).video_tracks or [])) 132 | except StopIteration: 133 | raise click.ClickException("There's no video tracks in the provided file.") 134 | 135 | metadata_key = { 136 | "HEVC": "hevc_metadata", 137 | "AVC": "h264_metadata" 138 | }.get(video_track.commercial_name) 139 | if not metadata_key: 140 | raise click.ClickException(f"{video_track.commercial_name} Codec not supported.") 141 | 142 | if preview: 143 | out_path = ["-f", "mpegts", "-"] # pipe 144 | else: 145 | out_path = [str(video_path.with_name(".".join(filter(bool, [ 146 | video_path.stem, 147 | video_track.language, 148 | "range", 149 | ["limited", "full"][full], 150 | { 151 | # ffmpeg's MKV muxer does not yet support HDR 152 | "HEVC": "h265", 153 | "AVC": "h264" 154 | }.get(video_track.commercial_name, ".mp4") 155 | ]))))] 156 | 157 | ffmpeg_call = subprocess.Popen([ 158 | binaries.FFMPEG, "-y", 159 | "-i", str(video_path), 160 | "-map", "0:v:0", 161 | "-c", "copy", 162 | "-bsf:v", f"{metadata_key}=video_full_range_flag={int(full)}" 163 | ] + out_path, stdout=subprocess.PIPE) 164 | try: 165 | if preview: 166 | previewer = binaries.MPV or binaries.FFPlay 167 | if not previewer: 168 | raise click.ClickException("MPV/FFplay executables weren't found but are required for previewing.") 169 | subprocess.Popen((previewer, "-"), stdin=ffmpeg_call.stdout) 170 | finally: 171 | if ffmpeg_call.stdout: 172 | ffmpeg_call.stdout.close() 173 | ffmpeg_call.wait() 174 | 175 | 176 | @util.command() 177 | @click.argument("path", type=Path) 178 | @click.option("-m", "--map", "map_", type=str, default="0", 179 | help="Test specific streams by setting FFmpeg's -map parameter.") 180 | def test(path: Path, map_: str) -> None: 181 | """ 182 | Decode an entire video and check for any corruptions or errors using FFmpeg. 183 | You may provide a path to a file, or a folder of mkv and/or mp4 files. 184 | 185 | Tests all streams within the file by default. Subtitles cannot be tested. 186 | You may choose specific streams using the -m/--map parameter. E.g., 187 | '0:v:0' to test the first video stream, or '0:a' to test all audio streams. 188 | """ 189 | if not binaries.FFMPEG: 190 | raise click.ClickException("FFmpeg executable \"ffmpeg\" not found but is required.") 191 | 192 | if path.is_dir(): 193 | paths = list(path.glob("*.mkv")) + list(path.glob("*.mp4")) 194 | else: 195 | paths = [path] 196 | for video_path in paths: 197 | print("Starting...") 198 | p = subprocess.Popen([ 199 | binaries.FFMPEG, "-hide_banner", 200 | "-benchmark", 201 | "-i", str(video_path), 202 | "-map", map_, 203 | "-sn", 204 | "-f", "null", 205 | "-" 206 | ], stderr=subprocess.PIPE, universal_newlines=True) 207 | reached_output = False 208 | errors = 0 209 | for line in p.stderr: 210 | line = line.strip() 211 | if "speed=" in line: 212 | reached_output = True 213 | if not reached_output: 214 | continue 215 | if line.startswith("["): # error of some kind 216 | errors += 1 217 | stream, error = line.split("] ", maxsplit=1) 218 | stream = stream.split(" @ ")[0] 219 | line = f"{stream} ERROR: {error}" 220 | print(line) 221 | p.stderr.close() 222 | print(f"Finished with {errors} Errors, Cleaning up...") 223 | p.terminate() 224 | p.wait() 225 | -------------------------------------------------------------------------------- /devine/commands/wvd.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import shutil 3 | from pathlib import Path 4 | from typing import Optional 5 | 6 | import click 7 | import yaml 8 | from google.protobuf.json_format import MessageToDict 9 | from pywidevine.device import Device, DeviceTypes 10 | from pywidevine.license_protocol_pb2 import FileHashes 11 | from rich.prompt import Prompt 12 | from unidecode import UnidecodeError, unidecode 13 | 14 | from devine.core.config import config 15 | from devine.core.console import console 16 | from devine.core.constants import context_settings 17 | 18 | 19 | @click.group( 20 | short_help="Manage configuration and creation of WVD (Widevine Device) files.", 21 | context_settings=context_settings) 22 | def wvd() -> None: 23 | """Manage configuration and creation of WVD (Widevine Device) files.""" 24 | 25 | 26 | @wvd.command() 27 | @click.argument("paths", type=Path, nargs=-1) 28 | def add(paths: list[Path]) -> None: 29 | """Add one or more WVD (Widevine Device) files to the WVDs Directory.""" 30 | log = logging.getLogger("wvd") 31 | for path in paths: 32 | dst_path = config.directories.wvds / path.name 33 | 34 | if not path.exists(): 35 | log.error(f"The WVD path '{path}' does not exist...") 36 | elif dst_path.exists(): 37 | log.error(f"WVD named '{path.stem}' already exists...") 38 | else: 39 | # TODO: Check for and log errors 40 | _ = Device.load(path) # test if WVD is valid 41 | dst_path.parent.mkdir(parents=True, exist_ok=True) 42 | shutil.move(path, dst_path) 43 | log.info(f"Added {path.stem}") 44 | 45 | 46 | @wvd.command() 47 | @click.argument("names", type=str, nargs=-1) 48 | def delete(names: list[str]) -> None: 49 | """Delete one or more WVD (Widevine Device) files from the WVDs Directory.""" 50 | log = logging.getLogger("wvd") 51 | for name in names: 52 | path = (config.directories.wvds / name).with_suffix(".wvd") 53 | if not path.exists(): 54 | log.error(f"No WVD file exists by the name '{name}'...") 55 | continue 56 | 57 | answer = Prompt.ask( 58 | f"[red]Deleting '{name}'[/], are you sure you want to continue?", 59 | choices=["y", "n"], 60 | default="n", 61 | console=console 62 | ) 63 | if answer == "n": 64 | log.info("Aborting...") 65 | continue 66 | 67 | Path.unlink(path) 68 | log.info(f"Deleted {name}") 69 | 70 | 71 | @wvd.command() 72 | @click.argument("path", type=Path) 73 | def parse(path: Path) -> None: 74 | """ 75 | Parse a .WVD Widevine Device file to check information. 76 | Relative paths are relative to the WVDs directory. 77 | """ 78 | try: 79 | named = not path.suffix and path.relative_to(Path("")) 80 | except ValueError: 81 | named = False 82 | if named: 83 | path = config.directories.wvds / f"{path.name}.wvd" 84 | 85 | log = logging.getLogger("wvd") 86 | 87 | if not path.exists(): 88 | console.log(f"[bright_blue]{path.absolute()}[/] does not exist...") 89 | return 90 | 91 | device = Device.load(path) 92 | 93 | log.info(f"System ID: {device.system_id}") 94 | log.info(f"Security Level: {device.security_level}") 95 | log.info(f"Type: {device.type}") 96 | log.info(f"Flags: {device.flags}") 97 | log.info(f"Private Key: {bool(device.private_key)}") 98 | log.info(f"Client ID: {bool(device.client_id)}") 99 | log.info(f"VMP: {bool(device.client_id.vmp_data)}") 100 | 101 | log.info("Client ID:") 102 | log.info(device.client_id) 103 | 104 | log.info("VMP:") 105 | if device.client_id.vmp_data: 106 | file_hashes = FileHashes() 107 | file_hashes.ParseFromString(device.client_id.vmp_data) 108 | log.info(str(file_hashes)) 109 | else: 110 | log.info("None") 111 | 112 | 113 | @wvd.command() 114 | @click.argument("wvd_paths", type=Path, nargs=-1) 115 | @click.argument("out_dir", type=Path, nargs=1) 116 | def dump(wvd_paths: list[Path], out_dir: Path) -> None: 117 | """ 118 | Extract data from a .WVD Widevine Device file to a folder structure. 119 | 120 | If the path is relative, with no file extension, it will dump the WVD in the WVDs 121 | directory. 122 | """ 123 | log = logging.getLogger("wvd") 124 | 125 | if wvd_paths == (): 126 | if not config.directories.wvds.exists(): 127 | console.log(f"[bright_blue]{config.directories.wvds.absolute()}[/] does not exist...") 128 | wvd_paths = list( 129 | x 130 | for x in config.directories.wvds.iterdir() 131 | if x.is_file() and x.suffix.lower() == ".wvd" 132 | ) 133 | if not wvd_paths: 134 | console.log(f"[bright_blue]{config.directories.wvds.absolute()}[/] is empty...") 135 | 136 | for i, (wvd_path, out_path) in enumerate(zip(wvd_paths, (out_dir / x.stem for x in wvd_paths))): 137 | if i > 0: 138 | log.info("") 139 | 140 | try: 141 | named = not wvd_path.suffix and wvd_path.relative_to(Path("")) 142 | except ValueError: 143 | named = False 144 | if named: 145 | wvd_path = config.directories.wvds / f"{wvd_path.stem}.wvd" 146 | out_path.mkdir(parents=True, exist_ok=True) 147 | 148 | log.info(f"Dumping: {wvd_path}") 149 | device = Device.load(wvd_path) 150 | 151 | log.info(f"L{device.security_level} {device.system_id} {device.type.name}") 152 | log.info(f"Saving to: {out_path}") 153 | 154 | device_meta = { 155 | "wvd": { 156 | "device_type": device.type.name, 157 | "security_level": device.security_level, 158 | **device.flags 159 | }, 160 | "client_info": {}, 161 | "capabilities": MessageToDict(device.client_id, preserving_proto_field_name=True)["client_capabilities"] 162 | } 163 | for client_info in device.client_id.client_info: 164 | device_meta["client_info"][client_info.name] = client_info.value 165 | 166 | device_meta_path = out_path / "metadata.yml" 167 | device_meta_path.write_text(yaml.dump(device_meta), encoding="utf8") 168 | log.info(" + Device Metadata") 169 | 170 | if device.private_key: 171 | private_key_path = out_path / "private_key.pem" 172 | private_key_path.write_text( 173 | data=device.private_key.export_key().decode(), 174 | encoding="utf8" 175 | ) 176 | private_key_path.with_suffix(".der").write_bytes( 177 | device.private_key.export_key(format="DER") 178 | ) 179 | log.info(" + Private Key") 180 | else: 181 | log.warning(" - No Private Key available") 182 | 183 | if device.client_id: 184 | client_id_path = out_path / "client_id.bin" 185 | client_id_path.write_bytes(device.client_id.SerializeToString()) 186 | log.info(" + Client ID") 187 | else: 188 | log.warning(" - No Client ID available") 189 | 190 | if device.client_id.vmp_data: 191 | vmp_path = out_path / "vmp.bin" 192 | vmp_path.write_bytes(device.client_id.vmp_data) 193 | log.info(" + VMP (File Hashes)") 194 | else: 195 | log.info(" - No VMP (File Hashes) available") 196 | 197 | 198 | @wvd.command() 199 | @click.argument("name", type=str) 200 | @click.argument("private_key", type=Path) 201 | @click.argument("client_id", type=Path) 202 | @click.argument("file_hashes", type=Path, required=False) 203 | @click.option("-t", "--type", "type_", type=click.Choice([x.name for x in DeviceTypes], case_sensitive=False), 204 | default="Android", help="Device Type") 205 | @click.option("-l", "--level", type=click.IntRange(1, 3), default=1, help="Device Security Level") 206 | @click.option("-o", "--output", type=Path, default=None, help="Output Directory") 207 | @click.pass_context 208 | def new( 209 | ctx: click.Context, 210 | name: str, 211 | private_key: Path, 212 | client_id: Path, 213 | file_hashes: Optional[Path], 214 | type_: str, 215 | level: int, 216 | output: Optional[Path] 217 | ) -> None: 218 | """ 219 | Create a new .WVD Widevine provision file. 220 | 221 | name: The origin device name of the provided data. e.g. `Nexus 6P`. You do not need to 222 | specify the security level, that will be done automatically. 223 | private_key: A PEM file of a Device's private key. 224 | client_id: A binary blob file which follows the Widevine ClientIdentification protobuf 225 | schema. 226 | file_hashes: A binary blob file with follows the Widevine FileHashes protobuf schema. 227 | Also known as VMP as it's used for VMP (Verified Media Path) assurance. 228 | """ 229 | try: 230 | # TODO: Remove need for name, create name based on Client IDs ClientInfo values 231 | name = unidecode(name.strip().lower().replace(" ", "_")) 232 | except UnidecodeError as e: 233 | raise click.UsageError(f"name: Failed to sanitize name, {e}", ctx) 234 | if not name: 235 | raise click.UsageError("name: Empty after sanitizing, please make sure the name is valid.", ctx) 236 | if not private_key.is_file(): 237 | raise click.UsageError("private_key: Not a path to a file, or it doesn't exist.", ctx) 238 | if not client_id.is_file(): 239 | raise click.UsageError("client_id: Not a path to a file, or it doesn't exist.", ctx) 240 | if file_hashes and not file_hashes.is_file(): 241 | raise click.UsageError("file_hashes: Not a path to a file, or it doesn't exist.", ctx) 242 | 243 | device = Device( 244 | type_=DeviceTypes[type_.upper()], 245 | security_level=level, 246 | flags=None, 247 | private_key=private_key.read_bytes(), 248 | client_id=client_id.read_bytes() 249 | ) 250 | 251 | if file_hashes: 252 | device.client_id.vmp_data = file_hashes.read_bytes() 253 | 254 | out_path = (output or config.directories.wvds) / f"{name}_{device.system_id}_l{device.security_level}.wvd" 255 | device.dump(out_path) 256 | 257 | log = logging.getLogger("wvd") 258 | 259 | log.info(f"Created binary WVD file, {out_path.name}") 260 | log.info(f" + Saved to: {out_path.absolute()}") 261 | 262 | log.info(f"System ID: {device.system_id}") 263 | log.info(f"Security Level: {device.security_level}") 264 | log.info(f"Type: {device.type}") 265 | log.info(f"Flags: {device.flags}") 266 | log.info(f"Private Key: {bool(device.private_key)}") 267 | log.info(f"Client ID: {bool(device.client_id)}") 268 | log.info(f"VMP: {bool(device.client_id.vmp_data)}") 269 | 270 | log.info("Client ID:") 271 | log.info(device.client_id) 272 | 273 | log.info("VMP:") 274 | if device.client_id.vmp_data: 275 | file_hashes = FileHashes() 276 | file_hashes.ParseFromString(device.client_id.vmp_data) 277 | log.info(str(file_hashes)) 278 | else: 279 | log.info("None") 280 | -------------------------------------------------------------------------------- /devine/core/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "3.3.3" 2 | -------------------------------------------------------------------------------- /devine/core/__main__.py: -------------------------------------------------------------------------------- 1 | import atexit 2 | import logging 3 | from datetime import datetime 4 | from pathlib import Path 5 | 6 | import click 7 | import urllib3 8 | from rich import traceback 9 | from rich.console import Group 10 | from rich.padding import Padding 11 | from rich.text import Text 12 | from urllib3.exceptions import InsecureRequestWarning 13 | 14 | from devine.core import __version__ 15 | from devine.core.commands import Commands 16 | from devine.core.config import config 17 | from devine.core.console import ComfyRichHandler, console 18 | from devine.core.constants import context_settings 19 | from devine.core.utilities import rotate_log_file 20 | 21 | LOGGING_PATH = None 22 | 23 | 24 | @click.command(cls=Commands, invoke_without_command=True, context_settings=context_settings) 25 | @click.option("-v", "--version", is_flag=True, default=False, help="Print version information.") 26 | @click.option("-d", "--debug", is_flag=True, default=False, help="Enable DEBUG level logs.") 27 | @click.option("--log", "log_path", type=Path, default=config.directories.logs / config.filenames.log, 28 | help="Log path (or filename). Path can contain the following f-string args: {name} {time}.") 29 | def main(version: bool, debug: bool, log_path: Path) -> None: 30 | """Devine—Modular Movie, TV, and Music Archival Software.""" 31 | logging.basicConfig( 32 | level=logging.DEBUG if debug else logging.INFO, 33 | format="%(message)s", 34 | handlers=[ComfyRichHandler( 35 | show_time=False, 36 | show_path=debug, 37 | console=console, 38 | rich_tracebacks=True, 39 | tracebacks_suppress=[click], 40 | log_renderer=console._log_render # noqa 41 | )] 42 | ) 43 | 44 | if log_path: 45 | global LOGGING_PATH 46 | console.record = True 47 | new_log_path = rotate_log_file(log_path) 48 | LOGGING_PATH = new_log_path 49 | 50 | urllib3.disable_warnings(InsecureRequestWarning) 51 | 52 | traceback.install( 53 | console=console, 54 | width=80, 55 | suppress=[click] 56 | ) 57 | 58 | console.print( 59 | Padding( 60 | Group( 61 | Text( 62 | r" / __ \/ ____/ | / / _/ | / / ____/" + "\n" 63 | r" / / / / __/ | | / // // |/ / __/ " + "\n" 64 | r" / /_/ / /___ | |/ // // /| / /___ " + "\n" 65 | r"/_____/_____/ |___/___/_/ |_/_____/ ⠀", 66 | style="ascii.art" 67 | ), 68 | f"v[repr.number]{__version__}[/] Copyright © 2019-{datetime.now().year} rlaphoenix", 69 | " [bright_blue]https://github.com/devine-dl/devine[/]" 70 | ), 71 | (1, 21, 1, 20), 72 | expand=True 73 | ), 74 | justify="left" 75 | ) 76 | 77 | if version: 78 | return 79 | 80 | 81 | @atexit.register 82 | def save_log(): 83 | if console.record and LOGGING_PATH: 84 | # TODO: Currently semi-bust. Everything that refreshes gets duplicated. 85 | console.save_text(LOGGING_PATH) 86 | 87 | 88 | if __name__ == "__main__": 89 | main() 90 | -------------------------------------------------------------------------------- /devine/core/binaries.py: -------------------------------------------------------------------------------- 1 | import shutil 2 | import sys 3 | from pathlib import Path 4 | from typing import Optional 5 | 6 | __shaka_platform = { 7 | "win32": "win", 8 | "darwin": "osx" 9 | }.get(sys.platform, sys.platform) 10 | 11 | 12 | def find(*names: str) -> Optional[Path]: 13 | """Find the path of the first found binary name.""" 14 | for name in names: 15 | path = shutil.which(name) 16 | if path: 17 | return Path(path) 18 | return None 19 | 20 | 21 | FFMPEG = find("ffmpeg") 22 | FFProbe = find("ffprobe") 23 | FFPlay = find("ffplay") 24 | SubtitleEdit = find("SubtitleEdit") 25 | ShakaPackager = find( 26 | "shaka-packager", 27 | "packager", 28 | f"packager-{__shaka_platform}", 29 | f"packager-{__shaka_platform}-arm64", 30 | f"packager-{__shaka_platform}-x64" 31 | ) 32 | Aria2 = find("aria2c", "aria2") 33 | CCExtractor = find( 34 | "ccextractor", 35 | "ccextractorwin", 36 | "ccextractorwinfull" 37 | ) 38 | HolaProxy = find("hola-proxy") 39 | MPV = find("mpv") 40 | Caddy = find("caddy") 41 | 42 | 43 | __all__ = ( 44 | "FFMPEG", "FFProbe", "FFPlay", "SubtitleEdit", "ShakaPackager", 45 | "Aria2", "CCExtractor", "HolaProxy", "MPV", "Caddy", "find" 46 | ) 47 | -------------------------------------------------------------------------------- /devine/core/cacher.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import zlib 4 | from datetime import datetime, timedelta 5 | from os import stat_result 6 | from pathlib import Path 7 | from typing import Any, Optional, Union 8 | 9 | import jsonpickle 10 | import jwt 11 | 12 | from devine.core.config import config 13 | 14 | EXP_T = Union[datetime, str, int, float] 15 | 16 | 17 | class Cacher: 18 | """Cacher for Services to get and set arbitrary data with expiration dates.""" 19 | 20 | def __init__( 21 | self, 22 | service_tag: str, 23 | key: Optional[str] = None, 24 | version: Optional[int] = 1, 25 | data: Optional[Any] = None, 26 | expiration: Optional[datetime] = None 27 | ) -> None: 28 | self.service_tag = service_tag 29 | self.key = key 30 | self.version = version 31 | self.data = data or {} 32 | self.expiration = expiration 33 | 34 | if self.expiration and self.expired: 35 | # if its expired, remove the data for safety and delete cache file 36 | self.data = None 37 | self.path.unlink() 38 | 39 | def __bool__(self) -> bool: 40 | return bool(self.data) 41 | 42 | @property 43 | def path(self) -> Path: 44 | """Get the path at which the cache will be read and written.""" 45 | return (config.directories.cache / self.service_tag / self.key).with_suffix(".json") 46 | 47 | @property 48 | def expired(self) -> bool: 49 | return self.expiration and self.expiration < datetime.now() 50 | 51 | def get(self, key: str, version: int = 1) -> Cacher: 52 | """ 53 | Get Cached data for the Service by Key. 54 | :param key: the filename to save the data to, should be url-safe. 55 | :param version: the config data version you expect to use. 56 | :returns: Cache object containing the cached data or None if the file does not exist. 57 | """ 58 | cache = Cacher(self.service_tag, key, version) 59 | if cache.path.is_file(): 60 | data = jsonpickle.loads(cache.path.read_text(encoding="utf8")) 61 | payload = data.copy() 62 | del payload["crc32"] 63 | checksum = data["crc32"] 64 | calculated = zlib.crc32(jsonpickle.dumps(payload).encode("utf8")) 65 | if calculated != checksum: 66 | raise ValueError( 67 | f"The checksum of the Cache payload mismatched. " 68 | f"Checksum: {checksum} !== Calculated: {calculated}" 69 | ) 70 | cache.data = data["data"] 71 | cache.expiration = data["expiration"] 72 | cache.version = data["version"] 73 | if cache.version != version: 74 | raise ValueError( 75 | f"The version of your {self.service_tag} {key} cache is outdated. " 76 | f"Please delete: {cache.path}" 77 | ) 78 | return cache 79 | 80 | def set(self, data: Any, expiration: Optional[EXP_T] = None) -> Any: 81 | """ 82 | Set Cached data for the Service by Key. 83 | :param data: absolutely anything including None. 84 | :param expiration: when the data expires, optional. Can be ISO 8601, seconds 85 | til expiration, unix timestamp, or a datetime object. 86 | :returns: the data provided for quick wrapping of functions or vars. 87 | """ 88 | self.data = data 89 | 90 | if not expiration: 91 | try: 92 | expiration = jwt.decode(self.data, options={"verify_signature": False})["exp"] 93 | except jwt.DecodeError: 94 | pass 95 | 96 | self.expiration = self._resolve_datetime(expiration) if expiration else None 97 | 98 | payload = { 99 | "data": self.data, 100 | "expiration": self.expiration, 101 | "version": self.version 102 | } 103 | payload["crc32"] = zlib.crc32(jsonpickle.dumps(payload).encode("utf8")) 104 | 105 | self.path.parent.mkdir(parents=True, exist_ok=True) 106 | self.path.write_text(jsonpickle.dumps(payload)) 107 | 108 | return self.data 109 | 110 | def stat(self) -> stat_result: 111 | """ 112 | Get Cache file OS Stat data like Creation Time, Modified Time, and such. 113 | :returns: an os.stat_result tuple 114 | """ 115 | return self.path.stat() 116 | 117 | @staticmethod 118 | def _resolve_datetime(timestamp: EXP_T) -> datetime: 119 | """ 120 | Resolve multiple formats of a Datetime or Timestamp to an absolute Datetime. 121 | 122 | Examples: 123 | >>> now = datetime.now() 124 | datetime.datetime(2022, 6, 27, 9, 49, 13, 657208) 125 | >>> iso8601 = now.isoformat() 126 | '2022-06-27T09:49:13.657208' 127 | >>> Cacher._resolve_datetime(iso8601) 128 | datetime.datetime(2022, 6, 27, 9, 49, 13, 657208) 129 | >>> Cacher._resolve_datetime(iso8601 + "Z") 130 | datetime.datetime(2022, 6, 27, 9, 49, 13, 657208) 131 | >>> Cacher._resolve_datetime(3600) 132 | datetime.datetime(2022, 6, 27, 10, 52, 50, 657208) 133 | >>> Cacher._resolve_datetime('3600') 134 | datetime.datetime(2022, 6, 27, 10, 52, 51, 657208) 135 | >>> Cacher._resolve_datetime(7800.113) 136 | datetime.datetime(2022, 6, 27, 11, 59, 13, 770208) 137 | 138 | In the int/float examples you may notice that it did not return now + 3600 seconds 139 | but rather something a bit more than that. This is because it did not resolve 3600 140 | seconds from the `now` variable but from right now as the function was called. 141 | """ 142 | if isinstance(timestamp, datetime): 143 | return timestamp 144 | if isinstance(timestamp, str): 145 | if timestamp.endswith("Z"): 146 | # fromisoformat doesn't accept the final Z 147 | timestamp = timestamp.split("Z")[0] 148 | try: 149 | return datetime.fromisoformat(timestamp) 150 | except ValueError: 151 | timestamp = float(timestamp) 152 | try: 153 | if len(str(int(timestamp))) == 13: # JS-style timestamp 154 | timestamp /= 1000 155 | timestamp = datetime.fromtimestamp(timestamp) 156 | except ValueError: 157 | raise ValueError(f"Unrecognized Timestamp value {timestamp!r}") 158 | if timestamp < datetime.now(): 159 | # timestamp is likely an amount of seconds til expiration 160 | # or, it's an already expired timestamp which is unlikely 161 | timestamp = timestamp + timedelta(seconds=datetime.now().timestamp()) 162 | return timestamp 163 | -------------------------------------------------------------------------------- /devine/core/commands.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import click 4 | 5 | from devine.core.config import config 6 | from devine.core.utilities import import_module_by_path 7 | 8 | _COMMANDS = sorted( 9 | ( 10 | path 11 | for path in config.directories.commands.glob("*.py") 12 | if path.stem.lower() != "__init__" 13 | ), 14 | key=lambda x: x.stem 15 | ) 16 | 17 | _MODULES = { 18 | path.stem: getattr(import_module_by_path(path), path.stem) 19 | for path in _COMMANDS 20 | } 21 | 22 | 23 | class Commands(click.MultiCommand): 24 | """Lazy-loaded command group of project commands.""" 25 | 26 | def list_commands(self, ctx: click.Context) -> list[str]: 27 | """Returns a list of command names from the command filenames.""" 28 | return [x.stem for x in _COMMANDS] 29 | 30 | def get_command(self, ctx: click.Context, name: str) -> Optional[click.Command]: 31 | """Load the command code and return the main click command function.""" 32 | module = _MODULES.get(name) 33 | if not module: 34 | raise click.ClickException(f"Unable to find command by the name '{name}'") 35 | 36 | if hasattr(module, "cli"): 37 | return module.cli 38 | 39 | return module 40 | 41 | 42 | # Hide direct access to commands from quick import form, they shouldn't be accessed directly 43 | __all__ = ("Commands",) 44 | -------------------------------------------------------------------------------- /devine/core/config.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import tempfile 4 | from pathlib import Path 5 | from typing import Any, Optional 6 | 7 | import yaml 8 | from appdirs import AppDirs 9 | 10 | 11 | class Config: 12 | class _Directories: 13 | # default directories, do not modify here, set via config 14 | app_dirs = AppDirs("devine", False) 15 | core_dir = Path(__file__).resolve().parent 16 | namespace_dir = core_dir.parent 17 | commands = namespace_dir / "commands" 18 | services = namespace_dir / "services" 19 | vaults = namespace_dir / "vaults" 20 | fonts = namespace_dir / "fonts" 21 | user_configs = Path(app_dirs.user_config_dir) 22 | data = Path(app_dirs.user_data_dir) 23 | downloads = Path.home() / "Downloads" / "devine" 24 | temp = Path(tempfile.gettempdir()) / "devine" 25 | cache = Path(app_dirs.user_cache_dir) 26 | cookies = data / "Cookies" 27 | logs = Path(app_dirs.user_log_dir) 28 | wvds = data / "WVDs" 29 | dcsl = data / "DCSL" 30 | 31 | class _Filenames: 32 | # default filenames, do not modify here, set via config 33 | log = "devine_{name}_{time}.log" # Directories.logs 34 | config = "config.yaml" # Directories.services / tag 35 | root_config = "devine.yaml" # Directories.user_configs 36 | chapters = "Chapters_{title}_{random}.txt" # Directories.temp 37 | subtitle = "Subtitle_{id}_{language}.srt" # Directories.temp 38 | 39 | def __init__(self, **kwargs: Any): 40 | self.dl: dict = kwargs.get("dl") or {} 41 | self.aria2c: dict = kwargs.get("aria2c") or {} 42 | self.cdm: dict = kwargs.get("cdm") or {} 43 | self.chapter_fallback_name: str = kwargs.get("chapter_fallback_name") or "" 44 | self.curl_impersonate: dict = kwargs.get("curl_impersonate") or {} 45 | self.remote_cdm: list[dict] = kwargs.get("remote_cdm") or [] 46 | self.credentials: dict = kwargs.get("credentials") or {} 47 | 48 | self.directories = self._Directories() 49 | for name, path in (kwargs.get("directories") or {}).items(): 50 | if name.lower() in ("app_dirs", "core_dir", "namespace_dir", "user_configs", "data"): 51 | # these must not be modified by the user 52 | continue 53 | setattr(self.directories, name, Path(path).expanduser()) 54 | 55 | self.downloader = kwargs.get("downloader") or "requests" 56 | 57 | self.filenames = self._Filenames() 58 | for name, filename in (kwargs.get("filenames") or {}).items(): 59 | setattr(self.filenames, name, filename) 60 | 61 | self.headers: dict = kwargs.get("headers") or {} 62 | self.key_vaults: list[dict[str, Any]] = kwargs.get("key_vaults", []) 63 | self.muxing: dict = kwargs.get("muxing") or {} 64 | self.nordvpn: dict = kwargs.get("nordvpn") or {} 65 | self.proxy_providers: dict = kwargs.get("proxy_providers") or {} 66 | self.serve: dict = kwargs.get("serve") or {} 67 | self.services: dict = kwargs.get("services") or {} 68 | self.set_terminal_bg: bool = kwargs.get("set_terminal_bg", True) 69 | self.tag: str = kwargs.get("tag") or "" 70 | 71 | @classmethod 72 | def from_yaml(cls, path: Path) -> Config: 73 | if not path.exists(): 74 | raise FileNotFoundError(f"Config file path ({path}) was not found") 75 | if not path.is_file(): 76 | raise FileNotFoundError(f"Config file path ({path}) is not to a file.") 77 | return cls(**yaml.safe_load(path.read_text(encoding="utf8")) or {}) 78 | 79 | 80 | # noinspection PyProtectedMember 81 | POSSIBLE_CONFIG_PATHS = ( 82 | # The Devine Namespace Folder (e.g., %appdata%/Python/Python311/site-packages/devine) 83 | Config._Directories.namespace_dir / Config._Filenames.root_config, 84 | # The Parent Folder to the Devine Namespace Folder (e.g., %appdata%/Python/Python311/site-packages) 85 | Config._Directories.namespace_dir.parent / Config._Filenames.root_config, 86 | # The AppDirs User Config Folder (e.g., %localappdata%/devine) 87 | Config._Directories.user_configs / Config._Filenames.root_config 88 | ) 89 | 90 | 91 | def get_config_path() -> Optional[Path]: 92 | """ 93 | Get Path to Config from any one of the possible locations. 94 | 95 | Returns None if no config file could be found. 96 | """ 97 | for path in POSSIBLE_CONFIG_PATHS: 98 | if path.exists(): 99 | return path 100 | return None 101 | 102 | 103 | config_path = get_config_path() 104 | if config_path: 105 | config = Config.from_yaml(config_path) 106 | else: 107 | config = Config() 108 | 109 | __all__ = ("config",) 110 | -------------------------------------------------------------------------------- /devine/core/constants.py: -------------------------------------------------------------------------------- 1 | from threading import Event 2 | from typing import TypeVar, Union 3 | 4 | DOWNLOAD_CANCELLED = Event() 5 | DOWNLOAD_LICENCE_ONLY = Event() 6 | 7 | DRM_SORT_MAP = ["ClearKey", "Widevine"] 8 | LANGUAGE_MAX_DISTANCE = 5 # this is max to be considered "same", e.g., en, en-US, en-AU 9 | VIDEO_CODEC_MAP = { 10 | "AVC": "H.264", 11 | "HEVC": "H.265" 12 | } 13 | DYNAMIC_RANGE_MAP = { 14 | "HDR10": "HDR", 15 | "HDR10+": "HDR", 16 | "Dolby Vision": "DV" 17 | } 18 | AUDIO_CODEC_MAP = { 19 | "E-AC-3": "DDP", 20 | "AC-3": "DD" 21 | } 22 | 23 | context_settings = dict( 24 | help_option_names=["-?", "-h", "--help"], # default only has --help 25 | max_content_width=116, # max PEP8 line-width, -4 to adjust for initial indent 26 | ) 27 | 28 | # For use in signatures of functions which take one specific type of track at a time 29 | # (it can't be a list that contains e.g. both Video and Audio objects) 30 | TrackT = TypeVar("TrackT", bound="Track") # noqa: F821 31 | 32 | # For general use in lists that can contain mixed types of tracks. 33 | # list[Track] won't work because list is invariant. 34 | # TODO: Add Chapter? 35 | AnyTrack = Union["Video", "Audio", "Subtitle"] # noqa: F821 36 | -------------------------------------------------------------------------------- /devine/core/credential.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import base64 4 | import hashlib 5 | import re 6 | from pathlib import Path 7 | from typing import Optional, Union 8 | 9 | 10 | class Credential: 11 | """Username (or Email) and Password Credential.""" 12 | 13 | def __init__(self, username: str, password: str, extra: Optional[str] = None): 14 | self.username = username 15 | self.password = password 16 | self.extra = extra 17 | self.sha1 = hashlib.sha1(self.dumps().encode()).hexdigest() 18 | 19 | def __bool__(self) -> bool: 20 | return bool(self.username) and bool(self.password) 21 | 22 | def __str__(self) -> str: 23 | return self.dumps() 24 | 25 | def __repr__(self) -> str: 26 | return "{name}({items})".format( 27 | name=self.__class__.__name__, 28 | items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()]) 29 | ) 30 | 31 | def dumps(self) -> str: 32 | """Return credential data as a string.""" 33 | return f"{self.username}:{self.password}" + (f":{self.extra}" if self.extra else "") 34 | 35 | def dump(self, path: Union[Path, str]) -> int: 36 | """Write credential data to a file.""" 37 | if isinstance(path, str): 38 | path = Path(path) 39 | return path.write_text(self.dumps(), encoding="utf8") 40 | 41 | def as_base64(self, with_extra: bool = False, encode_password: bool = False, encode_extra: bool = False) -> str: 42 | """ 43 | Dump Credential as a Base64-encoded string in Basic Authorization style. 44 | encode_password and encode_extra will also Base64-encode the password and extra respectively. 45 | """ 46 | value = f"{self.username}:" 47 | if encode_password: 48 | value += base64.b64encode(self.password.encode()).decode() 49 | else: 50 | value += self.password 51 | if with_extra and self.extra: 52 | if encode_extra: 53 | value += f":{base64.b64encode(self.extra.encode()).decode()}" 54 | else: 55 | value += f":{self.extra}" 56 | return base64.b64encode(value.encode()).decode() 57 | 58 | @classmethod 59 | def loads(cls, text: str) -> Credential: 60 | """ 61 | Load credential from a text string. 62 | 63 | Format: {username}:{password} 64 | Rules: 65 | Only one Credential must be in this text contents. 66 | All whitespace before and after all text will be removed. 67 | Any whitespace between text will be kept and used. 68 | The credential can be spanned across one or multiple lines as long as it 69 | abides with all the above rules and the format. 70 | 71 | Example that follows the format and rules: 72 | `\tJohnd\noe@gm\n\rail.com\n:Pass1\n23\n\r \t \t` 73 | >>>Credential(username='Johndoe@gmail.com', password='Pass123') 74 | """ 75 | text = "".join([ 76 | x.strip() for x in text.splitlines(keepends=False) 77 | ]).strip() 78 | credential = re.fullmatch(r"^([^:]+?):([^:]+?)(?::(.+))?$", text) 79 | if credential: 80 | return cls(*credential.groups()) 81 | raise ValueError("No credentials found in text string. Expecting the format `username:password`") 82 | 83 | @classmethod 84 | def load(cls, path: Path) -> Credential: 85 | """ 86 | Load Credential from a file path. 87 | Use Credential.loads() for loading from text content and seeing the rules and 88 | format expected to be found in the URIs contents. 89 | """ 90 | return cls.loads(path.read_text("utf8")) 91 | -------------------------------------------------------------------------------- /devine/core/downloaders/__init__.py: -------------------------------------------------------------------------------- 1 | from .aria2c import aria2c 2 | from .curl_impersonate import curl_impersonate 3 | from .requests import requests 4 | 5 | __all__ = ("aria2c", "curl_impersonate", "requests") 6 | -------------------------------------------------------------------------------- /devine/core/drm/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from devine.core.drm.clearkey import ClearKey 4 | from devine.core.drm.widevine import Widevine 5 | 6 | DRM_T = Union[ClearKey, Widevine] 7 | 8 | 9 | __all__ = ("ClearKey", "Widevine", "DRM_T") 10 | -------------------------------------------------------------------------------- /devine/core/drm/clearkey.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import base64 4 | import shutil 5 | from pathlib import Path 6 | from typing import Optional, Union 7 | from urllib.parse import urljoin 8 | 9 | from Cryptodome.Cipher import AES 10 | from Cryptodome.Util.Padding import unpad 11 | from m3u8.model import Key 12 | from requests import Session 13 | 14 | 15 | class ClearKey: 16 | """AES Clear Key DRM System.""" 17 | def __init__(self, key: Union[bytes, str], iv: Optional[Union[bytes, str]] = None): 18 | """ 19 | Generally IV should be provided where possible. If not provided, it will be 20 | set to \x00 of the same bit-size of the key. 21 | """ 22 | if isinstance(key, str): 23 | key = bytes.fromhex(key.replace("0x", "")) 24 | if not isinstance(key, bytes): 25 | raise ValueError(f"Expected AES Key to be bytes, not {key!r}") 26 | if not iv: 27 | iv = b"\x00" 28 | if isinstance(iv, str): 29 | iv = bytes.fromhex(iv.replace("0x", "")) 30 | if not isinstance(iv, bytes): 31 | raise ValueError(f"Expected IV to be bytes, not {iv!r}") 32 | 33 | if len(iv) < len(key): 34 | iv = iv * (len(key) - len(iv) + 1) 35 | 36 | self.key: bytes = key 37 | self.iv: bytes = iv 38 | 39 | def decrypt(self, path: Path) -> None: 40 | """Decrypt a Track with AES Clear Key DRM.""" 41 | if not path or not path.exists(): 42 | raise ValueError("Tried to decrypt a file that does not exist.") 43 | 44 | decrypted = AES. \ 45 | new(self.key, AES.MODE_CBC, self.iv). \ 46 | decrypt(path.read_bytes()) 47 | 48 | try: 49 | decrypted = unpad(decrypted, AES.block_size) 50 | except ValueError: 51 | # the decrypted data is likely already in the block size boundary 52 | pass 53 | 54 | decrypted_path = path.with_suffix(f".decrypted{path.suffix}") 55 | decrypted_path.write_bytes(decrypted) 56 | 57 | path.unlink() 58 | shutil.move(decrypted_path, path) 59 | 60 | @classmethod 61 | def from_m3u_key(cls, m3u_key: Key, session: Optional[Session] = None) -> ClearKey: 62 | """ 63 | Load a ClearKey from an M3U(8) Playlist's EXT-X-KEY. 64 | 65 | Parameters: 66 | m3u_key: A Key object parsed from a m3u(8) playlist using 67 | the `m3u8` library. 68 | session: Optional session used to request external URIs with. 69 | Useful to set headers, proxies, cookies, and so forth. 70 | """ 71 | if not isinstance(m3u_key, Key): 72 | raise ValueError(f"Provided M3U Key is in an unexpected type {m3u_key!r}") 73 | if not isinstance(session, (Session, type(None))): 74 | raise TypeError(f"Expected session to be a {Session}, not a {type(session)}") 75 | 76 | if not m3u_key.method.startswith("AES"): 77 | raise ValueError(f"Provided M3U Key is not an AES Clear Key, {m3u_key.method}") 78 | if not m3u_key.uri: 79 | raise ValueError("No URI in M3U Key, unable to get Key.") 80 | 81 | if not session: 82 | session = Session() 83 | 84 | if not session.headers.get("User-Agent"): 85 | # commonly needed default for HLS playlists 86 | session.headers["User-Agent"] = "smartexoplayer/1.1.0 (Linux;Android 8.0.0) ExoPlayerLib/2.13.3" 87 | 88 | if m3u_key.uri.startswith("data:"): 89 | media_types, data = m3u_key.uri[5:].split(",") 90 | media_types = media_types.split(";") 91 | if "base64" in media_types: 92 | data = base64.b64decode(data) 93 | key = data 94 | else: 95 | url = urljoin(m3u_key.base_uri, m3u_key.uri) 96 | res = session.get(url) 97 | res.raise_for_status() 98 | if not res.content: 99 | raise EOFError("Unexpected Empty Response by M3U Key URI.") 100 | if len(res.content) < 16: 101 | raise EOFError(f"Unexpected Length of Key ({len(res.content)} bytes) in M3U Key.") 102 | key = res.content 103 | 104 | if m3u_key.iv: 105 | iv = bytes.fromhex(m3u_key.iv.replace("0x", "")) 106 | else: 107 | iv = None 108 | 109 | return cls(key=key, iv=iv) 110 | 111 | 112 | __all__ = ("ClearKey",) 113 | -------------------------------------------------------------------------------- /devine/core/events.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from copy import deepcopy 4 | from enum import Enum 5 | from typing import Any, Callable 6 | 7 | 8 | class Events: 9 | class Types(Enum): 10 | _reserved = 0 11 | # A Track's segment has finished downloading 12 | SEGMENT_DOWNLOADED = 1 13 | # Track has finished downloading 14 | TRACK_DOWNLOADED = 2 15 | # Track has finished decrypting 16 | TRACK_DECRYPTED = 3 17 | # Track has finished repacking 18 | TRACK_REPACKED = 4 19 | # Track is about to be Multiplexed into a Container 20 | TRACK_MULTIPLEX = 5 21 | 22 | def __init__(self): 23 | self.__subscriptions: dict[Events.Types, list[Callable]] = {} 24 | self.__ephemeral: dict[Events.Types, list[Callable]] = {} 25 | self.reset() 26 | 27 | def reset(self): 28 | """Reset Event Observer clearing all Subscriptions.""" 29 | self.__subscriptions = { 30 | k: [] 31 | for k in Events.Types.__members__.values() 32 | } 33 | self.__ephemeral = deepcopy(self.__subscriptions) 34 | 35 | def subscribe(self, event_type: Events.Types, callback: Callable, ephemeral: bool = False) -> None: 36 | """ 37 | Subscribe to an Event with a Callback. 38 | 39 | Parameters: 40 | event_type: The Events.Type to subscribe to. 41 | callback: The function or lambda to call on event emit. 42 | ephemeral: Unsubscribe the callback from the event on first emit. 43 | Note that this is not thread-safe and may be called multiple 44 | times at roughly the same time. 45 | """ 46 | [self.__subscriptions, self.__ephemeral][ephemeral][event_type].append(callback) 47 | 48 | def unsubscribe(self, event_type: Events.Types, callback: Callable) -> None: 49 | """ 50 | Unsubscribe a Callback from an Event. 51 | 52 | Parameters: 53 | event_type: The Events.Type to unsubscribe from. 54 | callback: The function or lambda to remove from event emit. 55 | """ 56 | if callback in self.__subscriptions[event_type]: 57 | self.__subscriptions[event_type].remove(callback) 58 | if callback in self.__ephemeral[event_type]: 59 | self.__ephemeral[event_type].remove(callback) 60 | 61 | def emit(self, event_type: Events.Types, *args: Any, **kwargs: Any) -> None: 62 | """ 63 | Emit an Event, executing all subscribed Callbacks. 64 | 65 | Parameters: 66 | event_type: The Events.Type to emit. 67 | args: Positional arguments to pass to callbacks. 68 | kwargs: Keyword arguments to pass to callbacks. 69 | """ 70 | if event_type not in self.__subscriptions: 71 | raise ValueError(f"Event type \"{event_type}\" is invalid") 72 | 73 | for callback in self.__subscriptions[event_type] + self.__ephemeral[event_type]: 74 | callback(*args, **kwargs) 75 | 76 | self.__ephemeral[event_type].clear() 77 | 78 | 79 | events = Events() 80 | -------------------------------------------------------------------------------- /devine/core/manifests/__init__.py: -------------------------------------------------------------------------------- 1 | from .dash import DASH 2 | from .hls import HLS 3 | 4 | __all__ = ("DASH", "HLS") 5 | -------------------------------------------------------------------------------- /devine/core/proxies/__init__.py: -------------------------------------------------------------------------------- 1 | from .basic import Basic 2 | from .hola import Hola 3 | from .nordvpn import NordVPN 4 | 5 | __all__ = ("Basic", "Hola", "NordVPN") 6 | -------------------------------------------------------------------------------- /devine/core/proxies/basic.py: -------------------------------------------------------------------------------- 1 | import random 2 | import re 3 | from typing import Optional, Union 4 | 5 | from requests.utils import prepend_scheme_if_needed 6 | from urllib3.util import parse_url 7 | 8 | from devine.core.proxies.proxy import Proxy 9 | 10 | 11 | class Basic(Proxy): 12 | def __init__(self, **countries: dict[str, Union[str, list[str]]]): 13 | """Basic Proxy Service using Proxies specified in the config.""" 14 | self.countries = { 15 | k.lower(): v 16 | for k, v in countries.items() 17 | } 18 | 19 | def __repr__(self) -> str: 20 | countries = len(self.countries) 21 | servers = len(self.countries.values()) 22 | 23 | return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})" 24 | 25 | def get_proxy(self, query: str) -> Optional[str]: 26 | """Get a proxy URI from the config.""" 27 | query = query.lower() 28 | 29 | match = re.match(r"^([a-z]{2})(\d+)?$", query, re.IGNORECASE) 30 | if not match: 31 | raise ValueError(f"The query \"{query}\" was not recognized...") 32 | 33 | country_code = match.group(1) 34 | entry = match.group(2) 35 | 36 | servers: Optional[Union[str, list[str]]] = self.countries.get(country_code) 37 | if not servers: 38 | return None 39 | 40 | if isinstance(servers, str): 41 | proxy = servers 42 | elif entry: 43 | try: 44 | proxy = servers[int(entry) - 1] 45 | except IndexError: 46 | raise ValueError( 47 | f"There's only {len(servers)} prox{'y' if len(servers) == 1 else 'ies'} " 48 | f"for \"{country_code}\"..." 49 | ) 50 | else: 51 | proxy = random.choice(servers) 52 | 53 | proxy = prepend_scheme_if_needed(proxy, "http") 54 | parsed_proxy = parse_url(proxy) 55 | if not parsed_proxy.host: 56 | raise ValueError(f"The proxy '{proxy}' is not a valid proxy URI supported by Python-Requests.") 57 | 58 | return proxy 59 | -------------------------------------------------------------------------------- /devine/core/proxies/hola.py: -------------------------------------------------------------------------------- 1 | import random 2 | import re 3 | import subprocess 4 | from typing import Optional 5 | 6 | from devine.core import binaries 7 | from devine.core.proxies.proxy import Proxy 8 | 9 | 10 | class Hola(Proxy): 11 | def __init__(self): 12 | """ 13 | Proxy Service using Hola's direct connections via the hola-proxy project. 14 | https://github.com/Snawoot/hola-proxy 15 | """ 16 | self.binary = binaries.HolaProxy 17 | if not self.binary: 18 | raise EnvironmentError("hola-proxy executable not found but is required for the Hola proxy provider.") 19 | 20 | self.countries = self.get_countries() 21 | 22 | def __repr__(self) -> str: 23 | countries = len(self.countries) 24 | 25 | return f"{countries} Countr{['ies', 'y'][countries == 1]}" 26 | 27 | def get_proxy(self, query: str) -> Optional[str]: 28 | """ 29 | Get an HTTP proxy URI for a Datacenter ('direct') or Residential ('lum') Hola server. 30 | 31 | TODO: - Add ability to select 'lum' proxies (residential proxies). 32 | - Return and use Proxy Authorization 33 | """ 34 | query = query.lower() 35 | 36 | p = subprocess.check_output([ 37 | self.binary, 38 | "-country", query, 39 | "-list-proxies" 40 | ], stderr=subprocess.STDOUT).decode() 41 | 42 | if "Transaction error: temporary ban detected." in p: 43 | raise ConnectionError("Hola banned your IP temporarily from it's services. Try change your IP.") 44 | 45 | username, password, proxy_authorization = re.search( 46 | r"Login: (.*)\nPassword: (.*)\nProxy-Authorization: (.*)", p 47 | ).groups() 48 | 49 | servers = re.findall(r"(zagent.*)", p) 50 | proxies = [] 51 | for server in servers: 52 | host, ip_address, direct, peer, hola, trial, trial_peer, vendor = server.split(",") 53 | proxies.append(f"http://{username}:{password}@{ip_address}:{peer}") 54 | 55 | proxy = random.choice(proxies) 56 | return proxy 57 | 58 | def get_countries(self) -> list[dict[str, str]]: 59 | """Get a list of available Countries.""" 60 | p = subprocess.check_output([ 61 | self.binary, 62 | "-list-countries" 63 | ]).decode("utf8") 64 | 65 | return [ 66 | {code: name} 67 | for country in p.splitlines() 68 | for (code, name) in [country.split(" - ", maxsplit=1)] 69 | ] 70 | -------------------------------------------------------------------------------- /devine/core/proxies/nordvpn.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | from typing import Optional 4 | 5 | import requests 6 | 7 | from devine.core.proxies.proxy import Proxy 8 | 9 | 10 | class NordVPN(Proxy): 11 | def __init__(self, username: str, password: str, server_map: Optional[dict[str, int]] = None): 12 | """ 13 | Proxy Service using NordVPN Service Credentials. 14 | 15 | A username and password must be provided. These are Service Credentials, not your Login Credentials. 16 | The Service Credentials can be found here: https://my.nordaccount.com/dashboard/nordvpn/ 17 | """ 18 | if not username: 19 | raise ValueError("No Username was provided to the NordVPN Proxy Service.") 20 | if not password: 21 | raise ValueError("No Password was provided to the NordVPN Proxy Service.") 22 | if not re.match(r"^[a-z0-9]{48}$", username + password, re.IGNORECASE) or "@" in username: 23 | raise ValueError( 24 | "The Username and Password must be NordVPN Service Credentials, not your Login Credentials. " 25 | "The Service Credentials can be found here: https://my.nordaccount.com/dashboard/nordvpn/" 26 | ) 27 | 28 | if server_map is not None and not isinstance(server_map, dict): 29 | raise TypeError(f"Expected server_map to be a dict mapping a region to a server ID, not '{server_map!r}'.") 30 | 31 | self.username = username 32 | self.password = password 33 | self.server_map = server_map or {} 34 | 35 | self.countries = self.get_countries() 36 | 37 | def __repr__(self) -> str: 38 | countries = len(self.countries) 39 | servers = sum(x["servers_count"] for x in self.countries) 40 | 41 | return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})" 42 | 43 | def get_proxy(self, query: str) -> Optional[str]: 44 | """ 45 | Get an HTTP(SSL) proxy URI for a NordVPN server. 46 | 47 | HTTP proxies under port 80 were disabled on the 15th of Feb, 2021: 48 | https://nordvpn.com/blog/removing-http-proxies 49 | """ 50 | query = query.lower() 51 | if re.match(r"^[a-z]{2}\d+$", query): 52 | # country and nordvpn server id, e.g., us1, fr1234 53 | hostname = f"{query}.nordvpn.com" 54 | else: 55 | if query.isdigit(): 56 | # country id 57 | country = self.get_country(by_id=int(query)) 58 | elif re.match(r"^[a-z]+$", query): 59 | # country code 60 | country = self.get_country(by_code=query) 61 | else: 62 | raise ValueError(f"The query provided is unsupported and unrecognized: {query}") 63 | if not country: 64 | # NordVPN doesnt have servers in this region 65 | return 66 | 67 | server_mapping = self.server_map.get(country["code"].lower()) 68 | if server_mapping: 69 | # country was set to a specific server ID in config 70 | hostname = f"{country['code'].lower()}{server_mapping}.nordvpn.com" 71 | else: 72 | # get the recommended server ID 73 | recommended_servers = self.get_recommended_servers(country["id"]) 74 | if not recommended_servers: 75 | raise ValueError( 76 | f"The NordVPN Country {query} currently has no recommended servers. " 77 | "Try again later. If the issue persists, double-check the query." 78 | ) 79 | hostname = recommended_servers[0]["hostname"] 80 | 81 | if hostname.startswith("gb"): 82 | # NordVPN uses the alpha2 of 'GB' in API responses, but 'UK' in the hostname 83 | hostname = f"gb{hostname[2:]}" 84 | 85 | return f"https://{self.username}:{self.password}@{hostname}:89" 86 | 87 | def get_country( 88 | self, 89 | by_id: Optional[int] = None, 90 | by_code: Optional[str] = None 91 | ) -> Optional[dict]: 92 | """Search for a Country and it's metadata.""" 93 | if all(x is None for x in (by_id, by_code)): 94 | raise ValueError("At least one search query must be made.") 95 | 96 | for country in self.countries: 97 | if all([ 98 | by_id is None or country["id"] == int(by_id), 99 | by_code is None or country["code"] == by_code.upper() 100 | ]): 101 | return country 102 | 103 | @staticmethod 104 | def get_recommended_servers(country_id: int) -> list[dict]: 105 | """ 106 | Get the list of recommended Servers for a Country. 107 | 108 | Note: There may not always be more than one recommended server. 109 | """ 110 | res = requests.get( 111 | url="https://nordvpn.com/wp-admin/admin-ajax.php", 112 | params={ 113 | "action": "servers_recommendations", 114 | "filters": json.dumps({"country_id": country_id}) 115 | } 116 | ) 117 | if not res.ok: 118 | raise ValueError(f"Failed to get a list of NordVPN countries [{res.status_code}]") 119 | 120 | try: 121 | return res.json() 122 | except json.JSONDecodeError: 123 | raise ValueError("Could not decode list of NordVPN countries, not JSON data.") 124 | 125 | @staticmethod 126 | def get_countries() -> list[dict]: 127 | """Get a list of available Countries and their metadata.""" 128 | res = requests.get( 129 | url="https://nordvpn.com/wp-admin/admin-ajax.php", 130 | params={"action": "servers_countries"} 131 | ) 132 | if not res.ok: 133 | raise ValueError(f"Failed to get a list of NordVPN countries [{res.status_code}]") 134 | 135 | try: 136 | return res.json() 137 | except json.JSONDecodeError: 138 | raise ValueError("Could not decode list of NordVPN countries, not JSON data.") 139 | -------------------------------------------------------------------------------- /devine/core/proxies/proxy.py: -------------------------------------------------------------------------------- 1 | from abc import abstractmethod 2 | from typing import Optional 3 | 4 | 5 | class Proxy: 6 | @abstractmethod 7 | def __init__(self, **kwargs): 8 | """ 9 | The constructor initializes the Service using passed configuration data. 10 | 11 | Any authorization or pre-fetching of data should be done here. 12 | """ 13 | 14 | @abstractmethod 15 | def __repr__(self) -> str: 16 | """Return a string denoting a list of Countries and Servers (if possible).""" 17 | countries = ... 18 | servers = ... 19 | return f"{countries} Countr{['ies', 'y'][countries == 1]} ({servers} Server{['s', ''][servers == 1]})" 20 | 21 | @abstractmethod 22 | def get_proxy(self, query: str) -> Optional[str]: 23 | """ 24 | Get a Proxy URI from the Proxy Service. 25 | 26 | Only return None if the query was accepted, but no proxy could be returned. 27 | Otherwise, please use exceptions to denote any errors with the call or query. 28 | 29 | The returned Proxy URI must be a string supported by Python-Requests: 30 | '{scheme}://[{user}:{pass}@]{host}:{port}' 31 | """ 32 | -------------------------------------------------------------------------------- /devine/core/search_result.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Union 2 | 3 | 4 | class SearchResult: 5 | def __init__( 6 | self, 7 | id_: Union[str, int], 8 | title: str, 9 | description: Optional[str] = None, 10 | label: Optional[str] = None, 11 | url: Optional[str] = None 12 | ): 13 | """ 14 | A Search Result for any support Title Type. 15 | 16 | Parameters: 17 | id_: The search result's Title ID. 18 | title: The primary display text, e.g., the Title's Name. 19 | description: The secondary display text, e.g., the Title's Description or 20 | further title information. 21 | label: The tertiary display text. This will typically be used to display 22 | an informative label or tag to the result. E.g., "unavailable", the 23 | title's price tag, region, etc. 24 | url: A hyperlink to the search result or title's page. 25 | """ 26 | if not isinstance(id_, (str, int)): 27 | raise TypeError(f"Expected id_ to be a {str} or {int}, not {type(id_)}") 28 | if not isinstance(title, str): 29 | raise TypeError(f"Expected title to be a {str}, not {type(title)}") 30 | if not isinstance(description, (str, type(None))): 31 | raise TypeError(f"Expected description to be a {str}, not {type(description)}") 32 | if not isinstance(label, (str, type(None))): 33 | raise TypeError(f"Expected label to be a {str}, not {type(label)}") 34 | if not isinstance(url, (str, type(None))): 35 | raise TypeError(f"Expected url to be a {str}, not {type(url)}") 36 | 37 | self.id = id_ 38 | self.title = title 39 | self.description = description 40 | self.label = label 41 | self.url = url 42 | 43 | 44 | __all__ = ("SearchResult",) 45 | -------------------------------------------------------------------------------- /devine/core/services.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import click 4 | 5 | from devine.core.config import config 6 | from devine.core.service import Service 7 | from devine.core.utilities import import_module_by_path 8 | 9 | _SERVICES = sorted( 10 | ( 11 | path 12 | for path in config.directories.services.glob("*/__init__.py") 13 | ), 14 | key=lambda x: x.parent.stem 15 | ) 16 | 17 | _MODULES = { 18 | path.parent.stem: getattr(import_module_by_path(path), path.parent.stem) 19 | for path in _SERVICES 20 | } 21 | 22 | _ALIASES = { 23 | tag: getattr(module, "ALIASES") 24 | for tag, module in _MODULES.items() 25 | } 26 | 27 | 28 | class Services(click.MultiCommand): 29 | """Lazy-loaded command group of project services.""" 30 | 31 | # Click-specific methods 32 | 33 | def list_commands(self, ctx: click.Context) -> list[str]: 34 | """Returns a list of all available Services as command names for Click.""" 35 | return Services.get_tags() 36 | 37 | def get_command(self, ctx: click.Context, name: str) -> click.Command: 38 | """Load the Service and return the Click CLI method.""" 39 | tag = Services.get_tag(name) 40 | try: 41 | service = Services.load(tag) 42 | except KeyError as e: 43 | available_services = self.list_commands(ctx) 44 | if not available_services: 45 | raise click.ClickException( 46 | f"There are no Services added yet, therefore the '{name}' Service could not be found." 47 | ) 48 | raise click.ClickException(f"{e}. Available Services: {', '.join(available_services)}") 49 | 50 | if hasattr(service, "cli"): 51 | return service.cli 52 | 53 | raise click.ClickException(f"Service '{tag}' has no 'cli' method configured.") 54 | 55 | # Methods intended to be used anywhere 56 | 57 | @staticmethod 58 | def get_tags() -> list[str]: 59 | """Returns a list of service tags from all available Services.""" 60 | return [x.parent.stem for x in _SERVICES] 61 | 62 | @staticmethod 63 | def get_path(name: str) -> Path: 64 | """Get the directory path of a command.""" 65 | tag = Services.get_tag(name) 66 | for service in _SERVICES: 67 | if service.parent.stem == tag: 68 | return service.parent 69 | raise KeyError(f"There is no Service added by the Tag '{name}'") 70 | 71 | @staticmethod 72 | def get_tag(value: str) -> str: 73 | """ 74 | Get the Service Tag (e.g. DSNP, not DisneyPlus/Disney+, etc.) by an Alias. 75 | Input value can be of any case-sensitivity. 76 | Original input value is returned if it did not match a service tag. 77 | """ 78 | original_value = value 79 | value = value.lower() 80 | for path in _SERVICES: 81 | tag = path.parent.stem 82 | if value in (tag.lower(), *_ALIASES.get(tag, [])): 83 | return tag 84 | return original_value 85 | 86 | @staticmethod 87 | def load(tag: str) -> Service: 88 | """Load a Service module by Service tag.""" 89 | module = _MODULES.get(tag) 90 | if not module: 91 | raise KeyError(f"There is no Service added by the Tag '{tag}'") 92 | return module 93 | 94 | 95 | __all__ = ("Services",) 96 | -------------------------------------------------------------------------------- /devine/core/titles/__init__.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from .episode import Episode, Series 4 | from .movie import Movie, Movies 5 | from .song import Album, Song 6 | 7 | Title_T = Union[Movie, Episode, Song] 8 | Titles_T = Union[Movies, Series, Album] 9 | 10 | 11 | __all__ = ("Episode", "Series", "Movie", "Movies", "Album", "Song", "Title_T", "Titles_T") 12 | -------------------------------------------------------------------------------- /devine/core/titles/episode.py: -------------------------------------------------------------------------------- 1 | import re 2 | from abc import ABC 3 | from collections import Counter 4 | from typing import Any, Iterable, Optional, Union 5 | 6 | from langcodes import Language 7 | from pymediainfo import MediaInfo 8 | from rich.tree import Tree 9 | from sortedcontainers import SortedKeyList 10 | 11 | from devine.core.config import config 12 | from devine.core.constants import AUDIO_CODEC_MAP, DYNAMIC_RANGE_MAP, VIDEO_CODEC_MAP 13 | from devine.core.titles.title import Title 14 | from devine.core.utilities import sanitize_filename 15 | 16 | 17 | class Episode(Title): 18 | def __init__( 19 | self, 20 | id_: Any, 21 | service: type, 22 | title: str, 23 | season: Union[int, str], 24 | number: Union[int, str], 25 | name: Optional[str] = None, 26 | year: Optional[Union[int, str]] = None, 27 | language: Optional[Union[str, Language]] = None, 28 | data: Optional[Any] = None, 29 | ) -> None: 30 | super().__init__(id_, service, language, data) 31 | 32 | if not title: 33 | raise ValueError("Episode title must be provided") 34 | if not isinstance(title, str): 35 | raise TypeError(f"Expected title to be a str, not {title!r}") 36 | 37 | if season != 0 and not season: 38 | raise ValueError("Episode season must be provided") 39 | if isinstance(season, str) and season.isdigit(): 40 | season = int(season) 41 | elif not isinstance(season, int): 42 | raise TypeError(f"Expected season to be an int, not {season!r}") 43 | 44 | if number != 0 and not number: 45 | raise ValueError("Episode number must be provided") 46 | if isinstance(number, str) and number.isdigit(): 47 | number = int(number) 48 | elif not isinstance(number, int): 49 | raise TypeError(f"Expected number to be an int, not {number!r}") 50 | 51 | if name is not None and not isinstance(name, str): 52 | raise TypeError(f"Expected name to be a str, not {name!r}") 53 | 54 | if year is not None: 55 | if isinstance(year, str) and year.isdigit(): 56 | year = int(year) 57 | elif not isinstance(year, int): 58 | raise TypeError(f"Expected year to be an int, not {year!r}") 59 | 60 | title = title.strip() 61 | 62 | if name is not None: 63 | name = name.strip() 64 | # ignore episode names that are the episode number or title name 65 | if re.match(r"Episode ?#?\d+", name, re.IGNORECASE): 66 | name = None 67 | elif name.lower() == title.lower(): 68 | name = None 69 | 70 | if year is not None and year <= 0: 71 | raise ValueError(f"Episode year cannot be {year}") 72 | 73 | self.title = title 74 | self.season = season 75 | self.number = number 76 | self.name = name 77 | self.year = year 78 | 79 | def __str__(self) -> str: 80 | return "{title} S{season:02}E{number:02} {name}".format( 81 | title=self.title, 82 | season=self.season, 83 | number=self.number, 84 | name=self.name or "" 85 | ).strip() 86 | 87 | def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str: 88 | primary_video_track = next(iter(media_info.video_tracks), None) 89 | primary_audio_track = next(iter(media_info.audio_tracks), None) 90 | unique_audio_languages = len({ 91 | x.language.split("-")[0] 92 | for x in media_info.audio_tracks 93 | if x.language 94 | }) 95 | 96 | # Title SXXEXX Name (or Title SXX if folder) 97 | if folder: 98 | name = f"{self.title} S{self.season:02}" 99 | else: 100 | name = "{title} S{season:02}E{number:02} {name}".format( 101 | title=self.title.replace("$", "S"), # e.g., Arli$$ 102 | season=self.season, 103 | number=self.number, 104 | name=self.name or "" 105 | ).strip() 106 | 107 | # MULTi 108 | if unique_audio_languages > 1: 109 | name += " MULTi" 110 | 111 | # Resolution 112 | if primary_video_track: 113 | resolution = primary_video_track.height 114 | aspect_ratio = [ 115 | int(float(plane)) 116 | for plane in primary_video_track.other_display_aspect_ratio[0].split(":") 117 | ] 118 | if len(aspect_ratio) == 1: 119 | # e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1 120 | aspect_ratio.append(1) 121 | if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3): 122 | # We want the resolution represented in a 4:3 or 16:9 canvas. 123 | # If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas, 124 | # otherwise the track's height value is fine. 125 | # We are assuming this title is some weird aspect ratio so most 126 | # likely a movie or HD source, so it's most likely widescreen so 127 | # 16:9 canvas makes the most sense. 128 | resolution = int(primary_video_track.width * (9 / 16)) 129 | name += f" {resolution}p" 130 | 131 | # Service 132 | if show_service: 133 | name += f" {self.service.__name__}" 134 | 135 | # 'WEB-DL' 136 | name += " WEB-DL" 137 | 138 | # Audio Codec + Channels (+ feature) 139 | if primary_audio_track: 140 | codec = primary_audio_track.format 141 | channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original 142 | channels = float(sum( 143 | {"LFE": 0.1}.get(position.upper(), 1) 144 | for position in channel_layout.split(" ") 145 | )) 146 | features = primary_audio_track.format_additionalfeatures or "" 147 | name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}" 148 | if "JOC" in features: 149 | name += " Atmos" 150 | 151 | # Video (dynamic range + hfr +) Codec 152 | if primary_video_track: 153 | codec = primary_video_track.format 154 | hdr_format = primary_video_track.hdr_format_commercial 155 | trc = primary_video_track.transfer_characteristics or primary_video_track.transfer_characteristics_original 156 | frame_rate = float(primary_video_track.frame_rate) 157 | if hdr_format: 158 | name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} " 159 | elif trc and "HLG" in trc: 160 | name += " HLG" 161 | if frame_rate > 30: 162 | name += " HFR" 163 | name += f" {VIDEO_CODEC_MAP.get(codec, codec)}" 164 | 165 | if config.tag: 166 | name += f"-{config.tag}" 167 | 168 | return sanitize_filename(name) 169 | 170 | 171 | class Series(SortedKeyList, ABC): 172 | def __init__(self, iterable: Optional[Iterable] = None): 173 | super().__init__( 174 | iterable, 175 | key=lambda x: (x.season, x.number, x.year or 0) 176 | ) 177 | 178 | def __str__(self) -> str: 179 | if not self: 180 | return super().__str__() 181 | return self[0].title + (f" ({self[0].year})" if self[0].year else "") 182 | 183 | def tree(self, verbose: bool = False) -> Tree: 184 | seasons = Counter(x.season for x in self) 185 | num_seasons = len(seasons) 186 | num_episodes = sum(seasons.values()) 187 | tree = Tree( 188 | f"{num_seasons} Season{['s', ''][num_seasons == 1]}, {num_episodes} Episode{['s', ''][num_episodes == 1]}", 189 | guide_style="bright_black" 190 | ) 191 | if verbose: 192 | for season, episodes in seasons.items(): 193 | season_tree = tree.add( 194 | f"[bold]Season {str(season).zfill(len(str(num_seasons)))}[/]: [bright_black]{episodes} episodes", 195 | guide_style="bright_black" 196 | ) 197 | for episode in self: 198 | if episode.season == season: 199 | if episode.name: 200 | season_tree.add( 201 | f"[bold]{str(episode.number).zfill(len(str(episodes)))}.[/] " 202 | f"[bright_black]{episode.name}" 203 | ) 204 | else: 205 | season_tree.add(f"[bright_black]Episode {str(episode.number).zfill(len(str(episodes)))}") 206 | 207 | return tree 208 | 209 | 210 | __all__ = ("Episode", "Series") 211 | -------------------------------------------------------------------------------- /devine/core/titles/movie.py: -------------------------------------------------------------------------------- 1 | from abc import ABC 2 | from typing import Any, Iterable, Optional, Union 3 | 4 | from langcodes import Language 5 | from pymediainfo import MediaInfo 6 | from rich.tree import Tree 7 | from sortedcontainers import SortedKeyList 8 | 9 | from devine.core.config import config 10 | from devine.core.constants import AUDIO_CODEC_MAP, DYNAMIC_RANGE_MAP, VIDEO_CODEC_MAP 11 | from devine.core.titles.title import Title 12 | from devine.core.utilities import sanitize_filename 13 | 14 | 15 | class Movie(Title): 16 | def __init__( 17 | self, 18 | id_: Any, 19 | service: type, 20 | name: str, 21 | year: Optional[Union[int, str]] = None, 22 | language: Optional[Union[str, Language]] = None, 23 | data: Optional[Any] = None, 24 | ) -> None: 25 | super().__init__(id_, service, language, data) 26 | 27 | if not name: 28 | raise ValueError("Movie name must be provided") 29 | if not isinstance(name, str): 30 | raise TypeError(f"Expected name to be a str, not {name!r}") 31 | 32 | if year is not None: 33 | if isinstance(year, str) and year.isdigit(): 34 | year = int(year) 35 | elif not isinstance(year, int): 36 | raise TypeError(f"Expected year to be an int, not {year!r}") 37 | 38 | name = name.strip() 39 | 40 | if year is not None and year <= 0: 41 | raise ValueError(f"Movie year cannot be {year}") 42 | 43 | self.name = name 44 | self.year = year 45 | 46 | def __str__(self) -> str: 47 | if self.year: 48 | return f"{self.name} ({self.year})" 49 | return self.name 50 | 51 | def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str: 52 | primary_video_track = next(iter(media_info.video_tracks), None) 53 | primary_audio_track = next(iter(media_info.audio_tracks), None) 54 | unique_audio_languages = len({ 55 | x.language.split("-")[0] 56 | for x in media_info.audio_tracks 57 | if x.language 58 | }) 59 | 60 | # Name (Year) 61 | name = str(self).replace("$", "S") # e.g., Arli$$ 62 | 63 | # MULTi 64 | if unique_audio_languages > 1: 65 | name += " MULTi" 66 | 67 | # Resolution 68 | if primary_video_track: 69 | resolution = primary_video_track.height 70 | aspect_ratio = [ 71 | int(float(plane)) 72 | for plane in primary_video_track.other_display_aspect_ratio[0].split(":") 73 | ] 74 | if len(aspect_ratio) == 1: 75 | # e.g., aspect ratio of 2 (2.00:1) would end up as `(2.0,)`, add 1 76 | aspect_ratio.append(1) 77 | if aspect_ratio[0] / aspect_ratio[1] not in (16 / 9, 4 / 3): 78 | # We want the resolution represented in a 4:3 or 16:9 canvas. 79 | # If it's not 4:3 or 16:9, calculate as if it's inside a 16:9 canvas, 80 | # otherwise the track's height value is fine. 81 | # We are assuming this title is some weird aspect ratio so most 82 | # likely a movie or HD source, so it's most likely widescreen so 83 | # 16:9 canvas makes the most sense. 84 | resolution = int(primary_video_track.width * (9 / 16)) 85 | name += f" {resolution}p" 86 | 87 | # Service 88 | if show_service: 89 | name += f" {self.service.__name__}" 90 | 91 | # 'WEB-DL' 92 | name += " WEB-DL" 93 | 94 | # Audio Codec + Channels (+ feature) 95 | if primary_audio_track: 96 | codec = primary_audio_track.format 97 | channel_layout = primary_audio_track.channel_layout or primary_audio_track.channellayout_original 98 | channels = float(sum( 99 | {"LFE": 0.1}.get(position.upper(), 1) 100 | for position in channel_layout.split(" ") 101 | )) 102 | features = primary_audio_track.format_additionalfeatures or "" 103 | name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}" 104 | if "JOC" in features: 105 | name += " Atmos" 106 | 107 | # Video (dynamic range + hfr +) Codec 108 | if primary_video_track: 109 | codec = primary_video_track.format 110 | hdr_format = primary_video_track.hdr_format_commercial 111 | trc = primary_video_track.transfer_characteristics or primary_video_track.transfer_characteristics_original 112 | frame_rate = float(primary_video_track.frame_rate) 113 | if hdr_format: 114 | name += f" {DYNAMIC_RANGE_MAP.get(hdr_format)} " 115 | elif trc and "HLG" in trc: 116 | name += " HLG" 117 | if frame_rate > 30: 118 | name += " HFR" 119 | name += f" {VIDEO_CODEC_MAP.get(codec, codec)}" 120 | 121 | if config.tag: 122 | name += f"-{config.tag}" 123 | 124 | return sanitize_filename(name) 125 | 126 | 127 | class Movies(SortedKeyList, ABC): 128 | def __init__(self, iterable: Optional[Iterable] = None): 129 | super().__init__( 130 | iterable, 131 | key=lambda x: x.year or 0 132 | ) 133 | 134 | def __str__(self) -> str: 135 | if not self: 136 | return super().__str__() 137 | # TODO: Assumes there's only one movie 138 | return self[0].name + (f" ({self[0].year})" if self[0].year else "") 139 | 140 | def tree(self, verbose: bool = False) -> Tree: 141 | num_movies = len(self) 142 | tree = Tree( 143 | f"{num_movies} Movie{['s', ''][num_movies == 1]}", 144 | guide_style="bright_black" 145 | ) 146 | if verbose: 147 | for movie in self: 148 | tree.add( 149 | f"[bold]{movie.name}[/] [bright_black]({movie.year or '?'})", 150 | guide_style="bright_black" 151 | ) 152 | 153 | return tree 154 | 155 | 156 | __all__ = ("Movie", "Movies") 157 | -------------------------------------------------------------------------------- /devine/core/titles/song.py: -------------------------------------------------------------------------------- 1 | from abc import ABC 2 | from typing import Any, Iterable, Optional, Union 3 | 4 | from langcodes import Language 5 | from pymediainfo import MediaInfo 6 | from rich.tree import Tree 7 | from sortedcontainers import SortedKeyList 8 | 9 | from devine.core.config import config 10 | from devine.core.constants import AUDIO_CODEC_MAP 11 | from devine.core.titles.title import Title 12 | from devine.core.utilities import sanitize_filename 13 | 14 | 15 | class Song(Title): 16 | def __init__( 17 | self, 18 | id_: Any, 19 | service: type, 20 | name: str, 21 | artist: str, 22 | album: str, 23 | track: int, 24 | disc: int, 25 | year: int, 26 | language: Optional[Union[str, Language]] = None, 27 | data: Optional[Any] = None, 28 | ) -> None: 29 | super().__init__(id_, service, language, data) 30 | 31 | if not name: 32 | raise ValueError("Song name must be provided") 33 | if not isinstance(name, str): 34 | raise TypeError(f"Expected name to be a str, not {name!r}") 35 | 36 | if not artist: 37 | raise ValueError("Song artist must be provided") 38 | if not isinstance(artist, str): 39 | raise TypeError(f"Expected artist to be a str, not {artist!r}") 40 | 41 | if not album: 42 | raise ValueError("Song album must be provided") 43 | if not isinstance(album, str): 44 | raise TypeError(f"Expected album to be a str, not {name!r}") 45 | 46 | if not track: 47 | raise ValueError("Song track must be provided") 48 | if not isinstance(track, int): 49 | raise TypeError(f"Expected track to be an int, not {track!r}") 50 | 51 | if not disc: 52 | raise ValueError("Song disc must be provided") 53 | if not isinstance(disc, int): 54 | raise TypeError(f"Expected disc to be an int, not {disc!r}") 55 | 56 | if not year: 57 | raise ValueError("Song year must be provided") 58 | if not isinstance(year, int): 59 | raise TypeError(f"Expected year to be an int, not {year!r}") 60 | 61 | name = name.strip() 62 | artist = artist.strip() 63 | album = album.strip() 64 | 65 | if track <= 0: 66 | raise ValueError(f"Song track cannot be {track}") 67 | if disc <= 0: 68 | raise ValueError(f"Song disc cannot be {disc}") 69 | if year <= 0: 70 | raise ValueError(f"Song year cannot be {year}") 71 | 72 | self.name = name 73 | self.artist = artist 74 | self.album = album 75 | self.track = track 76 | self.disc = disc 77 | self.year = year 78 | 79 | def __str__(self) -> str: 80 | return "{artist} - {album} ({year}) / {track:02}. {name}".format( 81 | artist=self.artist, 82 | album=self.album, 83 | year=self.year, 84 | track=self.track, 85 | name=self.name 86 | ).strip() 87 | 88 | def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str: 89 | audio_track = next(iter(media_info.audio_tracks), None) 90 | codec = audio_track.format 91 | channel_layout = audio_track.channel_layout or audio_track.channellayout_original 92 | channels = float(sum( 93 | {"LFE": 0.1}.get(position.upper(), 1) 94 | for position in channel_layout.split(" ") 95 | )) 96 | features = audio_track.format_additionalfeatures or "" 97 | 98 | if folder: 99 | # Artist - Album (Year) 100 | name = str(self).split(" / ")[0] 101 | else: 102 | # NN. Song Name 103 | name = str(self).split(" / ")[1] 104 | 105 | # Service 106 | if show_service: 107 | name += f" {self.service.__name__}" 108 | 109 | # 'WEB-DL' 110 | name += " WEB-DL" 111 | 112 | # Audio Codec + Channels (+ feature) 113 | name += f" {AUDIO_CODEC_MAP.get(codec, codec)}{channels:.1f}" 114 | if "JOC" in features: 115 | name += " Atmos" 116 | 117 | if config.tag: 118 | name += f"-{config.tag}" 119 | 120 | return sanitize_filename(name, " ") 121 | 122 | 123 | class Album(SortedKeyList, ABC): 124 | def __init__(self, iterable: Optional[Iterable] = None): 125 | super().__init__( 126 | iterable, 127 | key=lambda x: (x.album, x.disc, x.track, x.year or 0) 128 | ) 129 | 130 | def __str__(self) -> str: 131 | if not self: 132 | return super().__str__() 133 | return f"{self[0].artist} - {self[0].album} ({self[0].year or '?'})" 134 | 135 | def tree(self, verbose: bool = False) -> Tree: 136 | num_songs = len(self) 137 | tree = Tree( 138 | f"{num_songs} Song{['s', ''][num_songs == 1]}", 139 | guide_style="bright_black" 140 | ) 141 | if verbose: 142 | for song in self: 143 | tree.add( 144 | f"[bold]Track {song.track:02}.[/] [bright_black]({song.name})", 145 | guide_style="bright_black" 146 | ) 147 | 148 | return tree 149 | 150 | 151 | __all__ = ("Song", "Album") 152 | -------------------------------------------------------------------------------- /devine/core/titles/title.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from abc import abstractmethod 4 | from typing import Any, Optional, Union 5 | 6 | from langcodes import Language 7 | from pymediainfo import MediaInfo 8 | 9 | from devine.core.tracks import Tracks 10 | 11 | 12 | class Title: 13 | def __init__( 14 | self, 15 | id_: Any, 16 | service: type, 17 | language: Optional[Union[str, Language]] = None, 18 | data: Optional[Any] = None 19 | ) -> None: 20 | """ 21 | Media Title from a Service. 22 | 23 | Parameters: 24 | id_: An identifier for this specific title. It must be unique. Can be of any 25 | value. 26 | service: Service class that this title is from. 27 | language: The original recorded language for the title. If that information 28 | is not available, this should not be set to anything. 29 | data: Arbitrary storage for the title. Often used to store extra metadata 30 | information, IDs, URIs, and so on. 31 | """ 32 | if not id_: # includes 0, false, and similar values, this is intended 33 | raise ValueError("A unique ID must be provided") 34 | if hasattr(id_, "__len__") and len(id_) < 4: 35 | raise ValueError("The unique ID is not large enough, clash likely.") 36 | 37 | if not service: 38 | raise ValueError("Service class must be provided") 39 | if not isinstance(service, type): 40 | raise TypeError(f"Expected service to be a Class (type), not {service!r}") 41 | 42 | if language is not None: 43 | if isinstance(language, str): 44 | language = Language.get(language) 45 | elif not isinstance(language, Language): 46 | raise TypeError(f"Expected language to be a {Language} or str, not {language!r}") 47 | 48 | self.id = id_ 49 | self.service = service 50 | self.language = language 51 | self.data = data 52 | 53 | self.tracks = Tracks() 54 | 55 | def __eq__(self, other: Title) -> bool: 56 | return self.id == other.id 57 | 58 | @abstractmethod 59 | def get_filename(self, media_info: MediaInfo, folder: bool = False, show_service: bool = True) -> str: 60 | """ 61 | Get a Filename for this Title with the provided Media Info. 62 | All filenames should be sanitized with the sanitize_filename() utility function. 63 | 64 | Parameters: 65 | media_info: MediaInfo object of the file this name will be used for. 66 | folder: This filename will be used as a folder name. Some changes may want to 67 | be made if this is the case. 68 | show_service: Show the service tag (e.g., iT, NF) in the filename. 69 | """ 70 | 71 | 72 | __all__ = ("Title",) 73 | -------------------------------------------------------------------------------- /devine/core/tracks/__init__.py: -------------------------------------------------------------------------------- 1 | from .audio import Audio 2 | from .chapter import Chapter 3 | from .chapters import Chapters 4 | from .subtitle import Subtitle 5 | from .track import Track 6 | from .tracks import Tracks 7 | from .video import Video 8 | 9 | __all__ = ("Audio", "Chapter", "Chapters", "Subtitle", "Track", "Tracks", "Video") 10 | -------------------------------------------------------------------------------- /devine/core/tracks/attachment.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import mimetypes 4 | from pathlib import Path 5 | from typing import Optional, Union 6 | from zlib import crc32 7 | 8 | 9 | class Attachment: 10 | def __init__( 11 | self, 12 | path: Union[Path, str], 13 | name: Optional[str] = None, 14 | mime_type: Optional[str] = None, 15 | description: Optional[str] = None 16 | ): 17 | """ 18 | Create a new Attachment. 19 | 20 | If name is not provided it will use the file name (without extension). 21 | If mime_type is not provided, it will try to guess it. 22 | """ 23 | if not isinstance(path, (str, Path)): 24 | raise ValueError("The attachment path must be provided.") 25 | if not isinstance(name, (str, type(None))): 26 | raise ValueError("The attachment name must be provided.") 27 | 28 | path = Path(path) 29 | if not path.exists(): 30 | raise ValueError("The attachment file does not exist.") 31 | 32 | name = (name or path.stem).strip() 33 | mime_type = (mime_type or "").strip() or None 34 | description = (description or "").strip() or None 35 | 36 | if not mime_type: 37 | mime_type = { 38 | ".ttf": "application/x-truetype-font", 39 | ".otf": "application/vnd.ms-opentype" 40 | }.get(path.suffix.lower(), mimetypes.guess_type(path)[0]) 41 | if not mime_type: 42 | raise ValueError("The attachment mime-type could not be automatically detected.") 43 | 44 | self.path = path 45 | self.name = name 46 | self.mime_type = mime_type 47 | self.description = description 48 | 49 | def __repr__(self) -> str: 50 | return "{name}({items})".format( 51 | name=self.__class__.__name__, 52 | items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()]) 53 | ) 54 | 55 | def __str__(self) -> str: 56 | return " | ".join(filter(bool, [ 57 | "ATT", 58 | self.name, 59 | self.mime_type, 60 | self.description 61 | ])) 62 | 63 | @property 64 | def id(self) -> str: 65 | """Compute an ID from the attachment data.""" 66 | checksum = crc32(self.path.read_bytes()) 67 | return hex(checksum) 68 | 69 | 70 | __all__ = ("Attachment",) 71 | -------------------------------------------------------------------------------- /devine/core/tracks/audio.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import math 4 | from enum import Enum 5 | from typing import Any, Optional, Union 6 | 7 | from devine.core.tracks.track import Track 8 | 9 | 10 | class Audio(Track): 11 | class Codec(str, Enum): 12 | AAC = "AAC" # https://wikipedia.org/wiki/Advanced_Audio_Coding 13 | AC3 = "DD" # https://wikipedia.org/wiki/Dolby_Digital 14 | EC3 = "DD+" # https://wikipedia.org/wiki/Dolby_Digital_Plus 15 | OPUS = "OPUS" # https://wikipedia.org/wiki/Opus_(audio_format) 16 | OGG = "VORB" # https://wikipedia.org/wiki/Vorbis 17 | DTS = "DTS" # https://en.wikipedia.org/wiki/DTS_(company)#DTS_Digital_Surround 18 | ALAC = "ALAC" # https://en.wikipedia.org/wiki/Apple_Lossless_Audio_Codec 19 | FLAC = "FLAC" # https://en.wikipedia.org/wiki/FLAC 20 | 21 | @property 22 | def extension(self) -> str: 23 | return self.name.lower() 24 | 25 | @staticmethod 26 | def from_mime(mime: str) -> Audio.Codec: 27 | mime = mime.lower().strip().split(".")[0] 28 | if mime == "mp4a": 29 | return Audio.Codec.AAC 30 | if mime == "ac-3": 31 | return Audio.Codec.AC3 32 | if mime == "ec-3": 33 | return Audio.Codec.EC3 34 | if mime == "opus": 35 | return Audio.Codec.OPUS 36 | if mime == "dtsc": 37 | return Audio.Codec.DTS 38 | if mime == "alac": 39 | return Audio.Codec.ALAC 40 | if mime == "flac": 41 | return Audio.Codec.FLAC 42 | raise ValueError(f"The MIME '{mime}' is not a supported Audio Codec") 43 | 44 | @staticmethod 45 | def from_codecs(codecs: str) -> Audio.Codec: 46 | for codec in codecs.lower().split(","): 47 | mime = codec.strip().split(".")[0] 48 | try: 49 | return Audio.Codec.from_mime(mime) 50 | except ValueError: 51 | pass 52 | raise ValueError(f"No MIME types matched any supported Audio Codecs in '{codecs}'") 53 | 54 | @staticmethod 55 | def from_netflix_profile(profile: str) -> Audio.Codec: 56 | profile = profile.lower().strip() 57 | if profile.startswith("heaac"): 58 | return Audio.Codec.AAC 59 | if profile.startswith("dd-"): 60 | return Audio.Codec.AC3 61 | if profile.startswith("ddplus"): 62 | return Audio.Codec.EC3 63 | if profile.startswith("playready-oggvorbis"): 64 | return Audio.Codec.OGG 65 | raise ValueError(f"The Content Profile '{profile}' is not a supported Audio Codec") 66 | 67 | def __init__( 68 | self, 69 | *args: Any, 70 | codec: Optional[Audio.Codec] = None, 71 | bitrate: Optional[Union[str, int, float]] = None, 72 | channels: Optional[Union[str, int, float]] = None, 73 | joc: Optional[int] = None, 74 | descriptive: Union[bool, int] = False, 75 | **kwargs: Any 76 | ): 77 | """ 78 | Create a new Audio track object. 79 | 80 | Parameters: 81 | codec: An Audio.Codec enum representing the audio codec. 82 | If not specified, MediaInfo will be used to retrieve the codec 83 | once the track has been downloaded. 84 | bitrate: A number or float representing the average bandwidth in bytes/s. 85 | Float values are rounded up to the nearest integer. 86 | channels: A number, float, or string representing the number of audio channels. 87 | Strings may represent numbers or floats. Expanded layouts like 7.1.1 is 88 | not supported. All numbers and strings will be cast to float. 89 | joc: The number of Joint-Object-Coding Channels/Objects in the audio stream. 90 | descriptive: Mark this audio as being descriptive audio for the blind. 91 | 92 | Note: If codec, bitrate, channels, or joc is not specified some checks may be 93 | skipped or assume a value. Specifying as much information as possible is highly 94 | recommended. 95 | """ 96 | super().__init__(*args, **kwargs) 97 | 98 | if not isinstance(codec, (Audio.Codec, type(None))): 99 | raise TypeError(f"Expected codec to be a {Audio.Codec}, not {codec!r}") 100 | if not isinstance(bitrate, (str, int, float, type(None))): 101 | raise TypeError(f"Expected bitrate to be a {str}, {int}, or {float}, not {bitrate!r}") 102 | if not isinstance(channels, (str, int, float, type(None))): 103 | raise TypeError(f"Expected channels to be a {str}, {int}, or {float}, not {channels!r}") 104 | if not isinstance(joc, (int, type(None))): 105 | raise TypeError(f"Expected joc to be a {int}, not {joc!r}") 106 | if ( 107 | not isinstance(descriptive, (bool, int)) or 108 | (isinstance(descriptive, int) and descriptive not in (0, 1)) 109 | ): 110 | raise TypeError(f"Expected descriptive to be a {bool} or bool-like {int}, not {descriptive!r}") 111 | 112 | self.codec = codec 113 | 114 | try: 115 | self.bitrate = int(math.ceil(float(bitrate))) if bitrate else None 116 | except (ValueError, TypeError) as e: 117 | raise ValueError(f"Expected bitrate to be a number or float, {e}") 118 | 119 | try: 120 | self.channels = self.parse_channels(channels) if channels else None 121 | except (ValueError, NotImplementedError) as e: 122 | raise ValueError(f"Expected channels to be a number, float, or a string, {e}") 123 | 124 | self.joc = joc 125 | self.descriptive = bool(descriptive) 126 | 127 | def __str__(self) -> str: 128 | return " | ".join(filter(bool, [ 129 | "AUD", 130 | f"[{self.codec.value}]" if self.codec else None, 131 | str(self.language), 132 | ", ".join(filter(bool, [ 133 | str(self.channels) if self.channels else None, 134 | f"JOC {self.joc}" if self.joc else None, 135 | ])), 136 | f"{self.bitrate // 1000} kb/s" if self.bitrate else None, 137 | self.get_track_name(), 138 | self.edition 139 | ])) 140 | 141 | @staticmethod 142 | def parse_channels(channels: Union[str, int, float]) -> float: 143 | """ 144 | Converts a Channel string to a float representing audio channel count and layout. 145 | E.g. "3" -> "3.0", "2.1" -> "2.1", ".1" -> "0.1". 146 | 147 | This does not validate channel strings as genuine channel counts or valid layouts. 148 | It does not convert the value to assume a sub speaker channel layout, e.g. 5.1->6.0. 149 | It also does not support expanded surround sound channel layout strings like 7.1.2. 150 | """ 151 | if isinstance(channels, str): 152 | # TODO: Support all possible DASH channel configurations (https://datatracker.ietf.org/doc/html/rfc8216) 153 | if channels.upper() == "A000": 154 | return 2.0 155 | elif channels.upper() == "F801": 156 | return 5.1 157 | elif channels.replace("ch", "").replace(".", "", 1).isdigit(): 158 | # e.g., '2ch', '2', '2.0', '5.1ch', '5.1' 159 | return float(channels.replace("ch", "")) 160 | raise NotImplementedError(f"Unsupported Channels string value, '{channels}'") 161 | 162 | return float(channels) 163 | 164 | def get_track_name(self) -> Optional[str]: 165 | """Return the base Track Name.""" 166 | track_name = super().get_track_name() or "" 167 | flag = self.descriptive and "Descriptive" 168 | if flag: 169 | if track_name: 170 | flag = f" ({flag})" 171 | track_name += flag 172 | return track_name or None 173 | 174 | 175 | __all__ = ("Audio",) 176 | -------------------------------------------------------------------------------- /devine/core/tracks/chapter.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from typing import Optional, Union 5 | from zlib import crc32 6 | 7 | TIMESTAMP_FORMAT = re.compile(r"^(?P\d{2}):(?P\d{2}):(?P\d{2})(?P.\d{3}|)$") 8 | 9 | 10 | class Chapter: 11 | def __init__(self, timestamp: Union[str, int, float], name: Optional[str] = None): 12 | """ 13 | Create a new Chapter with a Timestamp and optional name. 14 | 15 | The timestamp may be in the following formats: 16 | - "HH:MM:SS" string, e.g., `25:05:23`. 17 | - "HH:MM:SS.mss" string, e.g., `25:05:23.120`. 18 | - a timecode integer in milliseconds, e.g., `90323120` is `25:05:23.120`. 19 | - a timecode float in seconds, e.g., `90323.12` is `25:05:23.120`. 20 | 21 | If you have a timecode integer in seconds, just multiply it by 1000. 22 | If you have a timecode float in milliseconds (no decimal value), just convert 23 | it to an integer. 24 | """ 25 | if timestamp is None: 26 | raise ValueError("The timestamp must be provided.") 27 | 28 | if not isinstance(timestamp, (str, int, float)): 29 | raise TypeError(f"Expected timestamp to be {str}, {int} or {float}, not {type(timestamp)}") 30 | if not isinstance(name, (str, type(None))): 31 | raise TypeError(f"Expected name to be {str}, not {type(name)}") 32 | 33 | if not isinstance(timestamp, str): 34 | if isinstance(timestamp, int): # ms 35 | hours, remainder = divmod(timestamp, 1000 * 60 * 60) 36 | minutes, remainder = divmod(remainder, 1000 * 60) 37 | seconds, ms = divmod(remainder, 1000) 38 | elif isinstance(timestamp, float): # seconds.ms 39 | hours, remainder = divmod(timestamp, 60 * 60) 40 | minutes, remainder = divmod(remainder, 60) 41 | seconds, ms = divmod(int(remainder * 1000), 1000) 42 | else: 43 | raise TypeError 44 | timestamp = f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}.{str(ms).zfill(3)[:3]}" 45 | 46 | timestamp_m = TIMESTAMP_FORMAT.match(timestamp) 47 | if not timestamp_m: 48 | raise ValueError(f"The timestamp format is invalid: {timestamp}") 49 | 50 | hour, minute, second, ms = timestamp_m.groups() 51 | if not ms: 52 | timestamp += ".000" 53 | 54 | self.timestamp = timestamp 55 | self.name = name 56 | 57 | def __repr__(self) -> str: 58 | return "{name}({items})".format( 59 | name=self.__class__.__name__, 60 | items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()]) 61 | ) 62 | 63 | def __str__(self) -> str: 64 | return " | ".join(filter(bool, [ 65 | "CHP", 66 | self.timestamp, 67 | self.name 68 | ])) 69 | 70 | @property 71 | def id(self) -> str: 72 | """Compute an ID from the Chapter data.""" 73 | checksum = crc32(str(self).encode("utf8")) 74 | return hex(checksum) 75 | 76 | @property 77 | def named(self) -> bool: 78 | """Check if Chapter is named.""" 79 | return bool(self.name) 80 | 81 | 82 | __all__ = ("Chapter",) 83 | -------------------------------------------------------------------------------- /devine/core/tracks/chapters.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import re 4 | from abc import ABC 5 | from pathlib import Path 6 | from typing import Any, Iterable, Optional, Union 7 | from zlib import crc32 8 | 9 | from sortedcontainers import SortedKeyList 10 | 11 | from devine.core.tracks import Chapter 12 | 13 | OGM_SIMPLE_LINE_1_FORMAT = re.compile(r"^CHAPTER(?P\d+)=(?P\d{2,}:\d{2}:\d{2}\.\d{3})$") 14 | OGM_SIMPLE_LINE_2_FORMAT = re.compile(r"^CHAPTER(?P\d+)NAME=(?P.*)$") 15 | 16 | 17 | class Chapters(SortedKeyList, ABC): 18 | def __init__(self, iterable: Optional[Iterable[Chapter]] = None): 19 | super().__init__(key=lambda x: x.timestamp or 0) 20 | for chapter in iterable or []: 21 | self.add(chapter) 22 | 23 | def __repr__(self) -> str: 24 | return "{name}({items})".format( 25 | name=self.__class__.__name__, 26 | items=", ".join([f"{k}={repr(v)}" for k, v in self.__dict__.items()]) 27 | ) 28 | 29 | def __str__(self) -> str: 30 | return "\n".join([ 31 | " | ".join(filter(bool, [ 32 | "CHP", 33 | f"[{i:02}]", 34 | chapter.timestamp, 35 | chapter.name 36 | ])) 37 | for i, chapter in enumerate(self, start=1) 38 | ]) 39 | 40 | @classmethod 41 | def loads(cls, data: str) -> Chapters: 42 | """Load chapter data from a string.""" 43 | lines = [ 44 | line.strip() 45 | for line in data.strip().splitlines(keepends=False) 46 | ] 47 | 48 | if len(lines) % 2 != 0: 49 | raise ValueError("The number of chapter lines must be even.") 50 | 51 | chapters = [] 52 | 53 | for line_1, line_2 in zip(lines[::2], lines[1::2]): 54 | line_1_match = OGM_SIMPLE_LINE_1_FORMAT.match(line_1) 55 | if not line_1_match: 56 | raise SyntaxError(f"An unexpected syntax error occurred on: {line_1}") 57 | line_2_match = OGM_SIMPLE_LINE_2_FORMAT.match(line_2) 58 | if not line_2_match: 59 | raise SyntaxError(f"An unexpected syntax error occurred on: {line_2}") 60 | 61 | line_1_number, timestamp = line_1_match.groups() 62 | line_2_number, name = line_2_match.groups() 63 | 64 | if line_1_number != line_2_number: 65 | raise SyntaxError( 66 | f"The chapter numbers {line_1_number} and {line_2_number} do not match on:\n{line_1}\n{line_2}") 67 | 68 | if not timestamp: 69 | raise SyntaxError(f"The timestamp is missing on: {line_1}") 70 | 71 | chapters.append(Chapter(timestamp, name)) 72 | 73 | return cls(chapters) 74 | 75 | @classmethod 76 | def load(cls, path: Union[Path, str]) -> Chapters: 77 | """Load chapter data from a file.""" 78 | if isinstance(path, str): 79 | path = Path(path) 80 | return cls.loads(path.read_text(encoding="utf8")) 81 | 82 | def dumps(self, fallback_name: str = "") -> str: 83 | """ 84 | Return chapter data in OGM-based Simple Chapter format. 85 | https://mkvtoolnix.download/doc/mkvmerge.html#mkvmerge.chapters.simple 86 | 87 | Parameters: 88 | fallback_name: Name used for Chapters without a Name set. 89 | 90 | The fallback name can use the following variables in f-string style: 91 | 92 | - {i}: The Chapter number starting at 1. 93 | E.g., `"Chapter {i}"`: "Chapter 1", "Intro", "Chapter 3". 94 | - {j}: A number starting at 1 that increments any time a Chapter has no name. 95 | E.g., `"Chapter {j}"`: "Chapter 1", "Intro", "Chapter 2". 96 | 97 | These are formatted with f-strings, directives are supported. 98 | For example, `"Chapter {i:02}"` will result in `"Chapter 01"`. 99 | """ 100 | chapters = [] 101 | j = 0 102 | 103 | for i, chapter in enumerate(self, start=1): 104 | if not chapter.name: 105 | j += 1 106 | chapters.append("CHAPTER{num}={time}\nCHAPTER{num}NAME={name}".format( 107 | num=f"{i:02}", 108 | time=chapter.timestamp, 109 | name=chapter.name or fallback_name.format( 110 | i=i, 111 | j=j 112 | ) 113 | )) 114 | 115 | return "\n".join(chapters) 116 | 117 | def dump(self, path: Union[Path, str], *args: Any, **kwargs: Any) -> int: 118 | """ 119 | Write chapter data in OGM-based Simple Chapter format to a file. 120 | 121 | Parameters: 122 | path: The file path to write the Chapter data to, overwriting 123 | any existing data. 124 | 125 | See `Chapters.dumps` for more parameter documentation. 126 | """ 127 | if isinstance(path, str): 128 | path = Path(path) 129 | path.parent.mkdir(parents=True, exist_ok=True) 130 | 131 | ogm_text = self.dumps(*args, **kwargs) 132 | return path.write_text(ogm_text, encoding="utf8") 133 | 134 | def add(self, value: Chapter) -> None: 135 | if not isinstance(value, Chapter): 136 | raise TypeError(f"Can only add {Chapter} objects, not {type(value)}") 137 | 138 | if any(chapter.timestamp == value.timestamp for chapter in self): 139 | raise ValueError(f"A Chapter with the Timestamp {value.timestamp} already exists") 140 | 141 | super().add(value) 142 | 143 | if not any(chapter.timestamp == "00:00:00.000" for chapter in self): 144 | self.add(Chapter(0)) 145 | 146 | @property 147 | def id(self) -> str: 148 | """Compute an ID from the Chapter data.""" 149 | checksum = crc32("\n".join([ 150 | chapter.id 151 | for chapter in self 152 | ]).encode("utf8")) 153 | return hex(checksum) 154 | 155 | 156 | __all__ = ("Chapters", "Chapter") 157 | -------------------------------------------------------------------------------- /devine/core/utilities.py: -------------------------------------------------------------------------------- 1 | import ast 2 | import contextlib 3 | import importlib.util 4 | import os 5 | import re 6 | import socket 7 | import sys 8 | import time 9 | import unicodedata 10 | from collections import defaultdict 11 | from datetime import datetime 12 | from pathlib import Path 13 | from types import ModuleType 14 | from typing import Optional, Sequence, Union 15 | from urllib.parse import ParseResult, urlparse 16 | 17 | import chardet 18 | import requests 19 | from construct import ValidationError 20 | from langcodes import Language, closest_match 21 | from pymp4.parser import Box 22 | from unidecode import unidecode 23 | 24 | from devine.core.config import config 25 | from devine.core.constants import LANGUAGE_MAX_DISTANCE 26 | 27 | 28 | def rotate_log_file(log_path: Path, keep: int = 20) -> Path: 29 | """ 30 | Update Log Filename and delete old log files. 31 | It keeps only the 20 newest logs by default. 32 | """ 33 | if not log_path: 34 | raise ValueError("A log path must be provided") 35 | 36 | try: 37 | log_path.relative_to(Path("")) # file name only 38 | except ValueError: 39 | pass 40 | else: 41 | log_path = config.directories.logs / log_path 42 | 43 | log_path = log_path.parent / log_path.name.format_map(defaultdict( 44 | str, 45 | name="root", 46 | time=datetime.now().strftime("%Y%m%d-%H%M%S") 47 | )) 48 | 49 | if log_path.parent.exists(): 50 | log_files = [x for x in log_path.parent.iterdir() if x.suffix == log_path.suffix] 51 | for log_file in log_files[::-1][keep-1:]: 52 | # keep n newest files and delete the rest 53 | log_file.unlink() 54 | 55 | log_path.parent.mkdir(parents=True, exist_ok=True) 56 | return log_path 57 | 58 | 59 | def import_module_by_path(path: Path) -> ModuleType: 60 | """Import a Python file by Path as a Module.""" 61 | if not path: 62 | raise ValueError("Path must be provided") 63 | if not isinstance(path, Path): 64 | raise TypeError(f"Expected path to be a {Path}, not {path!r}") 65 | if not path.exists(): 66 | raise ValueError("Path does not exist") 67 | 68 | # compute package hierarchy for relative import support 69 | if path.is_relative_to(config.directories.core_dir): 70 | name = [] 71 | _path = path.parent 72 | while _path.stem != config.directories.core_dir.stem: 73 | name.append(_path.stem) 74 | _path = _path.parent 75 | name = ".".join([config.directories.core_dir.stem] + name[::-1]) 76 | else: 77 | # is outside the src package 78 | if str(path.parent.parent) not in sys.path: 79 | sys.path.insert(1, str(path.parent.parent)) 80 | name = path.parent.stem 81 | 82 | spec = importlib.util.spec_from_file_location(name, path) 83 | module = importlib.util.module_from_spec(spec) 84 | spec.loader.exec_module(module) 85 | 86 | return module 87 | 88 | 89 | def sanitize_filename(filename: str, spacer: str = ".") -> str: 90 | """ 91 | Sanitize a string to be filename safe. 92 | 93 | The spacer is safer to be a '.' for older DDL and p2p sharing spaces. 94 | This includes web-served content via direct links and such. 95 | """ 96 | # replace all non-ASCII characters with ASCII equivalents 97 | filename = unidecode(filename) 98 | 99 | # remove or replace further characters as needed 100 | filename = "".join(c for c in filename if unicodedata.category(c) != "Mn") # hidden characters 101 | filename = filename.\ 102 | replace("/", " & ").\ 103 | replace(";", " & ") # e.g. multi-episode filenames 104 | filename = re.sub(r"[:; ]", spacer, filename) # structural chars to (spacer) 105 | filename = re.sub(r"[\\*!?¿,'\"“”()<>|$#]", "", filename) # not filename safe chars 106 | filename = re.sub(rf"[{spacer}]{{2,}}", spacer, filename) # remove extra neighbouring (spacer)s 107 | 108 | return filename 109 | 110 | 111 | def is_close_match(language: Union[str, Language], languages: Sequence[Union[str, Language, None]]) -> bool: 112 | """Check if a language is a close match to any of the provided languages.""" 113 | languages = [x for x in languages if x] 114 | if not languages: 115 | return False 116 | return closest_match(language, list(map(str, languages)))[1] <= LANGUAGE_MAX_DISTANCE 117 | 118 | 119 | def get_boxes(data: bytes, box_type: bytes, as_bytes: bool = False) -> Box: 120 | """Scan a byte array for a wanted box, then parse and yield each find.""" 121 | # using slicing to get to the wanted box is done because parsing the entire box and recursively 122 | # scanning through each box and its children often wouldn't scan far enough to reach the wanted box. 123 | # since it doesn't care what child box the wanted box is from, this works fine. 124 | if not isinstance(data, (bytes, bytearray)): 125 | raise ValueError("data must be bytes") 126 | 127 | offset = 0 128 | while True: 129 | try: 130 | index = data[offset:].index(box_type) 131 | except ValueError: 132 | break 133 | if index < 0: 134 | break 135 | index -= 4 # size is before box type and is 4 bytes long 136 | try: 137 | box = Box.parse(data[offset:][index:]) 138 | except IOError: 139 | # since get_init_segment might cut off unexpectedly, pymp4 may be unable to read 140 | # the expected amounts of data and complain, so let's just end the function here 141 | break 142 | except ValidationError as e: 143 | if box_type == b"tenc": 144 | # ignore this error on tenc boxes as the tenc definition isn't consistent, 145 | # some services don't even put valid data and mix it up with avc1... 146 | continue 147 | raise e 148 | if as_bytes: 149 | box = Box.build(box) 150 | offset += index + len(Box.build(box)) 151 | yield box 152 | 153 | 154 | def ap_case(text: str, keep_spaces: bool = False, stop_words: tuple[str] = None) -> str: 155 | """ 156 | Convert a string to title case using AP/APA style. 157 | Based on https://github.com/words/ap-style-title-case 158 | 159 | Parameters: 160 | text: The text string to title case with AP/APA style. 161 | keep_spaces: To keep the original whitespace, or to just use a normal space. 162 | This would only be needed if you have special whitespace between words. 163 | stop_words: Override the default stop words with your own ones. 164 | """ 165 | if not text: 166 | return "" 167 | 168 | if not stop_words: 169 | stop_words = ("a", "an", "and", "at", "but", "by", "for", "in", "nor", 170 | "of", "on", "or", "so", "the", "to", "up", "yet") 171 | 172 | splitter = re.compile(r"(\s+|[-‑–—])") 173 | words = splitter.split(text) 174 | 175 | return "".join([ 176 | [" ", word][keep_spaces] if re.match(r"\s+", word) else 177 | word if splitter.match(word) else 178 | word.lower() if i != 0 and i != len(words) - 1 and word.lower() in stop_words else 179 | word.capitalize() 180 | for i, word in enumerate(words) 181 | ]) 182 | 183 | 184 | def get_ip_info(session: Optional[requests.Session] = None) -> dict: 185 | """ 186 | Use ipinfo.io to get IP location information. 187 | 188 | If you provide a Requests Session with a Proxy, that proxies IP information 189 | is what will be returned. 190 | """ 191 | return (session or requests.Session()).get("https://ipinfo.io/json").json() 192 | 193 | 194 | def time_elapsed_since(start: float) -> str: 195 | """ 196 | Get time elapsed since a timestamp as a string. 197 | E.g., `1h56m2s`, `15m12s`, `0m55s`, e.t.c. 198 | """ 199 | elapsed = int(time.time() - start) 200 | 201 | minutes, seconds = divmod(elapsed, 60) 202 | hours, minutes = divmod(minutes, 60) 203 | 204 | time_string = f"{minutes:d}m{seconds:d}s" 205 | if hours: 206 | time_string = f"{hours:d}h{time_string}" 207 | 208 | return time_string 209 | 210 | 211 | def try_ensure_utf8(data: bytes) -> bytes: 212 | """ 213 | Try to ensure that the given data is encoded in UTF-8. 214 | 215 | Parameters: 216 | data: Input data that may or may not yet be UTF-8 or another encoding. 217 | 218 | Returns the input data encoded in UTF-8 if successful. If unable to detect the 219 | encoding of the input data, then the original data is returned as-received. 220 | """ 221 | try: 222 | data.decode("utf8") 223 | return data 224 | except UnicodeDecodeError: 225 | try: 226 | # CP-1252 is a superset of latin1 227 | return data.decode("cp1252").encode("utf8") 228 | except UnicodeDecodeError: 229 | try: 230 | # last ditch effort to detect encoding 231 | detection_result = chardet.detect(data) 232 | if not detection_result["encoding"]: 233 | return data 234 | return data.decode(detection_result["encoding"]).encode("utf8") 235 | except UnicodeDecodeError: 236 | return data 237 | 238 | 239 | def get_free_port() -> int: 240 | """ 241 | Get an available port to use between a-b (inclusive). 242 | 243 | The port is freed as soon as this has returned, therefore, it 244 | is possible for the port to be taken before you try to use it. 245 | """ 246 | with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: 247 | s.bind(("", 0)) 248 | s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 249 | return s.getsockname()[1] 250 | 251 | 252 | def get_extension(value: Union[str, Path, ParseResult]) -> Optional[str]: 253 | """ 254 | Get a URL or Path file extension/suffix. 255 | 256 | Note: The returned value will begin with `.`. 257 | """ 258 | if isinstance(value, ParseResult): 259 | value_parsed = value 260 | elif isinstance(value, (str, Path)): 261 | value_parsed = urlparse(str(value)) 262 | else: 263 | raise TypeError(f"Expected {str}, {Path}, or {ParseResult}, got {type(value)}") 264 | 265 | if value_parsed.path: 266 | ext = os.path.splitext(value_parsed.path)[1] 267 | if ext and ext != ".": 268 | return ext 269 | 270 | 271 | def get_system_fonts() -> dict[str, Path]: 272 | if sys.platform == "win32": 273 | import winreg 274 | with winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) as reg: 275 | key = winreg.OpenKey( 276 | reg, 277 | r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Fonts", 278 | 0, 279 | winreg.KEY_READ 280 | ) 281 | total_fonts = winreg.QueryInfoKey(key)[1] 282 | return { 283 | name.replace(" (TrueType)", ""): Path(r"C:\Windows\Fonts", filename) 284 | for n in range(0, total_fonts) 285 | for name, filename, _ in [winreg.EnumValue(key, n)] 286 | } 287 | else: 288 | # TODO: Get System Fonts for Linux and mac OS 289 | return {} 290 | 291 | 292 | class FPS(ast.NodeVisitor): 293 | def visit_BinOp(self, node: ast.BinOp) -> float: 294 | if isinstance(node.op, ast.Div): 295 | return self.visit(node.left) / self.visit(node.right) 296 | raise ValueError(f"Invalid operation: {node.op}") 297 | 298 | def visit_Num(self, node: ast.Num) -> complex: 299 | return node.n 300 | 301 | def visit_Expr(self, node: ast.Expr) -> float: 302 | return self.visit(node.value) 303 | 304 | @classmethod 305 | def parse(cls, expr: str) -> float: 306 | return cls().visit(ast.parse(expr).body[0]) 307 | -------------------------------------------------------------------------------- /devine/core/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stabbedbybrick/devine/09eda168824157851e30003b196f4851298ec3ac/devine/core/utils/__init__.py -------------------------------------------------------------------------------- /devine/core/utils/click_types.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import Any, Optional, Union 3 | 4 | import click 5 | from click.shell_completion import CompletionItem 6 | from pywidevine.cdm import Cdm as WidevineCdm 7 | 8 | 9 | class ContextData: 10 | def __init__(self, config: dict, cdm: WidevineCdm, proxy_providers: list, profile: Optional[str] = None): 11 | self.config = config 12 | self.cdm = cdm 13 | self.proxy_providers = proxy_providers 14 | self.profile = profile 15 | 16 | 17 | class SeasonRange(click.ParamType): 18 | name = "ep_range" 19 | 20 | MIN_EPISODE = 0 21 | MAX_EPISODE = 999 22 | 23 | def parse_tokens(self, *tokens: str) -> list[str]: 24 | """ 25 | Parse multiple tokens or ranged tokens as '{s}x{e}' strings. 26 | 27 | Supports exclusioning by putting a `-` before the token. 28 | 29 | Example: 30 | >>> sr = SeasonRange() 31 | >>> sr.parse_tokens("S01E01") 32 | ["1x1"] 33 | >>> sr.parse_tokens("S02E01", "S02E03-S02E05") 34 | ["2x1", "2x3", "2x4", "2x5"] 35 | >>> sr.parse_tokens("S01-S05", "-S03", "-S02E01") 36 | ["1x0", "1x1", ..., "2x0", (...), "2x2", (...), "4x0", ..., "5x0", ...] 37 | """ 38 | if len(tokens) == 0: 39 | return [] 40 | computed: list = [] 41 | exclusions: list = [] 42 | for token in tokens: 43 | exclude = token.startswith("-") 44 | if exclude: 45 | token = token[1:] 46 | parsed = [ 47 | re.match(r"^S(?P\d+)(E(?P\d+))?$", x, re.IGNORECASE) 48 | for x in re.split(r"[:-]", token) 49 | ] 50 | if len(parsed) > 2: 51 | self.fail(f"Invalid token, only a left and right range is acceptable: {token}") 52 | if len(parsed) == 1: 53 | parsed.append(parsed[0]) 54 | if any(x is None for x in parsed): 55 | self.fail(f"Invalid token, syntax error occurred: {token}") 56 | from_season, from_episode = [ 57 | int(v) if v is not None else self.MIN_EPISODE 58 | for k, v in parsed[0].groupdict().items() if parsed[0] # type: ignore[union-attr] 59 | ] 60 | to_season, to_episode = [ 61 | int(v) if v is not None else self.MAX_EPISODE 62 | for k, v in parsed[1].groupdict().items() if parsed[1] # type: ignore[union-attr] 63 | ] 64 | if from_season > to_season: 65 | self.fail(f"Invalid range, left side season cannot be bigger than right side season: {token}") 66 | if from_season == to_season and from_episode > to_episode: 67 | self.fail(f"Invalid range, left side episode cannot be bigger than right side episode: {token}") 68 | for s in range(from_season, to_season + 1): 69 | for e in range( 70 | from_episode if s == from_season else 0, 71 | (self.MAX_EPISODE if s < to_season else to_episode) + 1 72 | ): 73 | (computed if not exclude else exclusions).append(f"{s}x{e}") 74 | for exclusion in exclusions: 75 | if exclusion in computed: 76 | computed.remove(exclusion) 77 | return list(set(computed)) 78 | 79 | def convert( 80 | self, value: str, param: Optional[click.Parameter] = None, ctx: Optional[click.Context] = None 81 | ) -> list[str]: 82 | return self.parse_tokens(*re.split(r"\s*[,;]\s*", value)) 83 | 84 | 85 | class LanguageRange(click.ParamType): 86 | name = "lang_range" 87 | 88 | def convert( 89 | self, value: Union[str, list], param: Optional[click.Parameter] = None, ctx: Optional[click.Context] = None 90 | ) -> list[str]: 91 | if isinstance(value, list): 92 | return value 93 | if not value: 94 | return [] 95 | return re.split(r"\s*[,;]\s*", value) 96 | 97 | 98 | class QualityList(click.ParamType): 99 | name = "quality_list" 100 | 101 | def convert( 102 | self, 103 | value: Union[str, list[str]], 104 | param: Optional[click.Parameter] = None, 105 | ctx: Optional[click.Context] = None 106 | ) -> list[int]: 107 | if not value: 108 | return [] 109 | if not isinstance(value, list): 110 | value = value.split(",") 111 | resolutions = [] 112 | for resolution in value: 113 | try: 114 | resolutions.append(int(resolution.lower().rstrip("p"))) 115 | except TypeError: 116 | self.fail( 117 | f"Expected string for int() conversion, got {resolution!r} of type {type(resolution).__name__}", 118 | param, 119 | ctx 120 | ) 121 | except ValueError: 122 | self.fail(f"{resolution!r} is not a valid integer", param, ctx) 123 | return sorted(resolutions, reverse=True) 124 | 125 | 126 | class MultipleChoice(click.Choice): 127 | """ 128 | The multiple choice type allows multiple values to be checked against 129 | a fixed set of supported values. 130 | 131 | It internally uses and is based off of click.Choice. 132 | """ 133 | 134 | name = "multiple_choice" 135 | 136 | def __repr__(self) -> str: 137 | return f"MultipleChoice({list(self.choices)})" 138 | 139 | def convert( 140 | self, 141 | value: Any, 142 | param: Optional[click.Parameter] = None, 143 | ctx: Optional[click.Context] = None 144 | ) -> list[Any]: 145 | if not value: 146 | return [] 147 | if isinstance(value, str): 148 | values = value.split(",") 149 | elif isinstance(value, list): 150 | values = value 151 | else: 152 | self.fail( 153 | f"{value!r} is not a supported value.", 154 | param, 155 | ctx 156 | ) 157 | 158 | chosen_values: list[Any] = [] 159 | for value in values: 160 | chosen_values.append(super().convert(value, param, ctx)) 161 | 162 | return chosen_values 163 | 164 | def shell_complete( 165 | self, 166 | ctx: click.Context, 167 | param: click.Parameter, 168 | incomplete: str 169 | ) -> list[CompletionItem]: 170 | """ 171 | Complete choices that start with the incomplete value. 172 | 173 | Parameters: 174 | ctx: Invocation context for this command. 175 | param: The parameter that is requesting completion. 176 | incomplete: Value being completed. May be empty. 177 | """ 178 | incomplete = incomplete.rsplit(",")[-1] 179 | return super(self).shell_complete(ctx, param, incomplete) 180 | 181 | 182 | SEASON_RANGE = SeasonRange() 183 | LANGUAGE_RANGE = LanguageRange() 184 | QUALITY_LIST = QualityList() 185 | -------------------------------------------------------------------------------- /devine/core/utils/collections.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | from typing import Any, Iterable, Iterator, Sequence, Tuple, Type, Union 3 | 4 | 5 | def as_lists(*args: Any) -> Iterator[Any]: 6 | """Converts any input objects to list objects.""" 7 | for item in args: 8 | yield item if isinstance(item, list) else [item] 9 | 10 | 11 | def as_list(*args: Any) -> list: 12 | """ 13 | Convert any input objects to a single merged list object. 14 | 15 | Example: 16 | >>> as_list('foo', ['buzz', 'bizz'], 'bazz', 'bozz', ['bar'], ['bur']) 17 | ['foo', 'buzz', 'bizz', 'bazz', 'bozz', 'bar', 'bur'] 18 | """ 19 | return list(itertools.chain.from_iterable(as_lists(*args))) 20 | 21 | 22 | def flatten(items: Any, ignore_types: Union[Type, Tuple[Type, ...]] = str) -> Iterator: 23 | """ 24 | Flattens items recursively. 25 | 26 | Example: 27 | >>> list(flatten(["foo", [["bar", ["buzz", [""]], "bee"]]])) 28 | ['foo', 'bar', 'buzz', '', 'bee'] 29 | >>> list(flatten("foo")) 30 | ['foo'] 31 | >>> list(flatten({1}, set)) 32 | [{1}] 33 | """ 34 | if isinstance(items, (Iterable, Sequence)) and not isinstance(items, ignore_types): 35 | for i in items: 36 | yield from flatten(i, ignore_types) 37 | else: 38 | yield items 39 | 40 | 41 | def merge_dict(source: dict, destination: dict) -> None: 42 | """Recursively merge Source into Destination in-place.""" 43 | if not source: 44 | return 45 | for key, value in source.items(): 46 | if isinstance(value, dict): 47 | # get node or create one 48 | node = destination.setdefault(key, {}) 49 | merge_dict(value, node) 50 | else: 51 | destination[key] = value 52 | -------------------------------------------------------------------------------- /devine/core/utils/sslciphers.py: -------------------------------------------------------------------------------- 1 | import ssl 2 | from typing import Optional 3 | 4 | from requests.adapters import HTTPAdapter 5 | 6 | 7 | class SSLCiphers(HTTPAdapter): 8 | """ 9 | Custom HTTP Adapter to change the TLS Cipher set and security requirements. 10 | 11 | Security Level may optionally be provided. A level above 0 must be used at all times. 12 | A list of Security Levels and their security is listed below. Usually 2 is used by default. 13 | Do not set the Security level via @SECLEVEL in the cipher list. 14 | 15 | Level 0: 16 | Everything is permitted. This retains compatibility with previous versions of OpenSSL. 17 | 18 | Level 1: 19 | The security level corresponds to a minimum of 80 bits of security. Any parameters 20 | offering below 80 bits of security are excluded. As a result RSA, DSA and DH keys 21 | shorter than 1024 bits and ECC keys shorter than 160 bits are prohibited. All export 22 | cipher suites are prohibited since they all offer less than 80 bits of security. SSL 23 | version 2 is prohibited. Any cipher suite using MD5 for the MAC is also prohibited. 24 | 25 | Level 2: 26 | Security level set to 112 bits of security. As a result RSA, DSA and DH keys shorter 27 | than 2048 bits and ECC keys shorter than 224 bits are prohibited. In addition to the 28 | level 1 exclusions any cipher suite using RC4 is also prohibited. SSL version 3 is 29 | also not allowed. Compression is disabled. 30 | 31 | Level 3: 32 | Security level set to 128 bits of security. As a result RSA, DSA and DH keys shorter 33 | than 3072 bits and ECC keys shorter than 256 bits are prohibited. In addition to the 34 | level 2 exclusions cipher suites not offering forward secrecy are prohibited. TLS 35 | versions below 1.1 are not permitted. Session tickets are disabled. 36 | 37 | Level 4: 38 | Security level set to 192 bits of security. As a result RSA, DSA and DH keys shorter 39 | than 7680 bits and ECC keys shorter than 384 bits are prohibited. Cipher suites using 40 | SHA1 for the MAC are prohibited. TLS versions below 1.2 are not permitted. 41 | 42 | Level 5: 43 | Security level set to 256 bits of security. As a result RSA, DSA and DH keys shorter 44 | than 15360 bits and ECC keys shorter than 512 bits are prohibited. 45 | """ 46 | 47 | def __init__(self, cipher_list: Optional[str] = None, security_level: int = 0, *args, **kwargs): 48 | if cipher_list: 49 | if not isinstance(cipher_list, str): 50 | raise TypeError(f"Expected cipher_list to be a str, not {cipher_list!r}") 51 | if "@SECLEVEL" in cipher_list: 52 | raise ValueError("You must not specify the Security Level manually in the cipher list.") 53 | if not isinstance(security_level, int): 54 | raise TypeError(f"Expected security_level to be an int, not {security_level!r}") 55 | if security_level not in range(6): 56 | raise ValueError(f"The security_level must be a value between 0 and 5, not {security_level}") 57 | 58 | if not cipher_list: 59 | # cpython's default cipher list differs to Python-requests cipher list 60 | cipher_list = "DEFAULT" 61 | 62 | cipher_list += f":@SECLEVEL={security_level}" 63 | 64 | ctx = ssl.create_default_context() 65 | ctx.check_hostname = False # For some reason this is needed to avoid a verification error 66 | ctx.set_ciphers(cipher_list) 67 | 68 | self._ssl_context = ctx 69 | super().__init__(*args, **kwargs) 70 | 71 | def init_poolmanager(self, *args, **kwargs): 72 | kwargs["ssl_context"] = self._ssl_context 73 | return super().init_poolmanager(*args, **kwargs) 74 | 75 | def proxy_manager_for(self, *args, **kwargs): 76 | kwargs["ssl_context"] = self._ssl_context 77 | return super().proxy_manager_for(*args, **kwargs) 78 | -------------------------------------------------------------------------------- /devine/core/utils/subprocess.py: -------------------------------------------------------------------------------- 1 | import json 2 | import subprocess 3 | from pathlib import Path 4 | from typing import Union 5 | 6 | from devine.core import binaries 7 | 8 | 9 | def ffprobe(uri: Union[bytes, Path]) -> dict: 10 | """Use ffprobe on the provided data to get stream information.""" 11 | if not binaries.FFProbe: 12 | raise EnvironmentError("FFProbe executable \"ffprobe\" not found but is required.") 13 | 14 | args = [ 15 | binaries.FFProbe, 16 | "-v", "quiet", 17 | "-of", "json", 18 | "-show_streams" 19 | ] 20 | if isinstance(uri, Path): 21 | args.extend([ 22 | "-f", "lavfi", 23 | "-i", "movie={}[out+subcc]".format(str(uri).replace("\\", '/').replace(":", "\\\\:")) 24 | ]) 25 | elif isinstance(uri, bytes): 26 | args.append("pipe:") 27 | try: 28 | ff = subprocess.run( 29 | args, 30 | input=uri if isinstance(uri, bytes) else None, 31 | check=True, 32 | capture_output=True 33 | ) 34 | except subprocess.CalledProcessError: 35 | return {} 36 | return json.loads(ff.stdout.decode("utf8")) 37 | -------------------------------------------------------------------------------- /devine/core/utils/webvtt.py: -------------------------------------------------------------------------------- 1 | import re 2 | import sys 3 | import typing 4 | from typing import Optional 5 | 6 | from pycaption import Caption, CaptionList, CaptionNode, CaptionReadError, WebVTTReader, WebVTTWriter 7 | 8 | 9 | class CaptionListExt(CaptionList): 10 | @typing.no_type_check 11 | def __init__(self, iterable=None, layout_info=None): 12 | self.first_segment_mpegts = 0 13 | super().__init__(iterable, layout_info) 14 | 15 | 16 | class CaptionExt(Caption): 17 | @typing.no_type_check 18 | def __init__(self, start, end, nodes, style=None, layout_info=None, segment_index=0, mpegts=0, cue_time=0.0): 19 | style = style or {} 20 | self.segment_index: int = segment_index 21 | self.mpegts: float = mpegts 22 | self.cue_time: float = cue_time 23 | super().__init__(start, end, nodes, style, layout_info) 24 | 25 | 26 | class WebVTTReaderExt(WebVTTReader): 27 | # HLS extension support 28 | RE_TIMESTAMP_MAP = re.compile(r"X-TIMESTAMP-MAP.*") 29 | RE_MPEGTS = re.compile(r"MPEGTS:(\d+)") 30 | RE_LOCAL = re.compile(r"LOCAL:((?:(\d{1,}):)?(\d{2}):(\d{2})\.(\d{3}))") 31 | 32 | def _parse(self, lines: list[str]) -> CaptionList: 33 | captions = CaptionListExt() 34 | start = None 35 | end = None 36 | nodes: list[CaptionNode] = [] 37 | layout_info = None 38 | found_timing = False 39 | segment_index = -1 40 | mpegts = 0 41 | cue_time = 0.0 42 | 43 | # The first segment MPEGTS is needed to calculate the rest. It is possible that 44 | # the first segment contains no cue and is ignored by pycaption, this acts as a fallback. 45 | captions.first_segment_mpegts = 0 46 | 47 | for i, line in enumerate(lines): 48 | if "-->" in line: 49 | found_timing = True 50 | timing_line = i 51 | last_start_time = captions[-1].start if captions else 0 52 | try: 53 | start, end, layout_info = self._parse_timing_line(line, last_start_time) 54 | except CaptionReadError as e: 55 | new_msg = f"{e.args[0]} (line {timing_line})" 56 | tb = sys.exc_info()[2] 57 | raise type(e)(new_msg).with_traceback(tb) from None 58 | 59 | elif "" == line: 60 | if found_timing and nodes: 61 | found_timing = False 62 | caption = CaptionExt( 63 | start, 64 | end, 65 | nodes, 66 | layout_info=layout_info, 67 | segment_index=segment_index, 68 | mpegts=mpegts, 69 | cue_time=cue_time, 70 | ) 71 | captions.append(caption) 72 | nodes = [] 73 | 74 | elif "WEBVTT" in line: 75 | # Merged segmented VTT doesn't have index information, track manually. 76 | segment_index += 1 77 | mpegts = 0 78 | cue_time = 0.0 79 | elif m := self.RE_TIMESTAMP_MAP.match(line): 80 | if r := self.RE_MPEGTS.search(m.group()): 81 | mpegts = int(r.group(1)) 82 | 83 | cue_time = self._parse_local(m.group()) 84 | 85 | # Early assignment in case the first segment contains no cue. 86 | if segment_index == 0: 87 | captions.first_segment_mpegts = mpegts 88 | 89 | else: 90 | if found_timing: 91 | if nodes: 92 | nodes.append(CaptionNode.create_break()) 93 | nodes.append(CaptionNode.create_text(self._decode(line))) 94 | else: 95 | # it's a comment or some metadata; ignore it 96 | pass 97 | 98 | # Add a last caption if there are remaining nodes 99 | if nodes: 100 | caption = CaptionExt(start, end, nodes, layout_info=layout_info, segment_index=segment_index, mpegts=mpegts) 101 | captions.append(caption) 102 | 103 | return captions 104 | 105 | @staticmethod 106 | def _parse_local(string: str) -> float: 107 | """ 108 | Parse WebVTT LOCAL time and convert it to seconds. 109 | """ 110 | m = WebVTTReaderExt.RE_LOCAL.search(string) 111 | if not m: 112 | return 0 113 | 114 | parsed = m.groups() 115 | if not parsed: 116 | return 0 117 | hours = int(parsed[1]) 118 | minutes = int(parsed[2]) 119 | seconds = int(parsed[3]) 120 | milliseconds = int(parsed[4]) 121 | return (milliseconds / 1000) + seconds + (minutes * 60) + (hours * 3600) 122 | 123 | 124 | def merge_segmented_webvtt(vtt_raw: str, segment_durations: Optional[list[int]] = None, timescale: int = 1) -> str: 125 | """ 126 | Merge Segmented WebVTT data. 127 | 128 | Parameters: 129 | vtt_raw: The concatenated WebVTT files to merge. All WebVTT headers must be 130 | appropriately spaced apart, or it may produce unwanted effects like 131 | considering headers as captions, timestamp lines, etc. 132 | segment_durations: A list of each segment's duration. If not provided it will try 133 | to get it from the X-TIMESTAMP-MAP headers, specifically the MPEGTS number. 134 | timescale: The number of time units per second. 135 | 136 | This parses the X-TIMESTAMP-MAP data to compute new absolute timestamps, replacing 137 | the old start and end timestamp values. All X-TIMESTAMP-MAP header information will 138 | be removed from the output as they are no longer of concern. Consider this function 139 | the opposite of a WebVTT Segmenter, a WebVTT Joiner of sorts. 140 | 141 | Algorithm borrowed from N_m3u8DL-RE and shaka-player. 142 | """ 143 | MPEG_TIMESCALE = 90_000 144 | 145 | vtt = WebVTTReaderExt().read(vtt_raw) 146 | for lang in vtt.get_languages(): 147 | prev_caption = None 148 | duplicate_index: list[int] = [] 149 | captions = vtt.get_captions(lang) 150 | 151 | if captions[0].segment_index == 0: 152 | first_segment_mpegts = captions[0].mpegts 153 | else: 154 | first_segment_mpegts = segment_durations[0] if segment_durations else captions.first_segment_mpegts 155 | 156 | caption: CaptionExt 157 | for i, caption in enumerate(captions): 158 | # DASH WebVTT doesn't have MPEGTS timestamp like HLS. Instead, 159 | # calculate the timestamp from SegmentTemplate/SegmentList duration. 160 | likely_dash = first_segment_mpegts == 0 and caption.mpegts == 0 161 | if likely_dash and segment_durations: 162 | duration = segment_durations[caption.segment_index] 163 | caption.mpegts = MPEG_TIMESCALE * (duration / timescale) 164 | 165 | if caption.mpegts == 0: 166 | continue 167 | 168 | seconds = (caption.mpegts - first_segment_mpegts) / MPEG_TIMESCALE - caption.cue_time 169 | offset = seconds * 1_000_000 # pycaption use microseconds 170 | 171 | if caption.start < offset: 172 | caption.start += offset 173 | caption.end += offset 174 | 175 | # If the difference between current and previous captions is <=1ms 176 | # and the payload is equal then splice. 177 | if ( 178 | prev_caption 179 | and not caption.is_empty() 180 | and (caption.start - prev_caption.end) <= 1000 # 1ms in microseconds 181 | and caption.get_text() == prev_caption.get_text() 182 | ): 183 | prev_caption.end = caption.end 184 | duplicate_index.append(i) 185 | 186 | prev_caption = caption 187 | 188 | # Remove duplicate 189 | captions[:] = [c for c_index, c in enumerate(captions) if c_index not in set(duplicate_index)] 190 | 191 | return WebVTTWriter().write(vtt) 192 | -------------------------------------------------------------------------------- /devine/core/utils/xml.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from lxml import etree 4 | from lxml.etree import ElementTree 5 | 6 | 7 | def load_xml(xml: Union[str, bytes]) -> ElementTree: 8 | """Safely parse XML data to an ElementTree, without namespaces in tags.""" 9 | if not isinstance(xml, bytes): 10 | xml = xml.encode("utf8") 11 | root = etree.fromstring(xml) 12 | for elem in root.getiterator(): 13 | if not hasattr(elem.tag, "find"): 14 | # e.g. comment elements 15 | continue 16 | elem.tag = etree.QName(elem).localname 17 | for name, value in elem.attrib.items(): 18 | local_name = etree.QName(name).localname 19 | if local_name == name: 20 | continue 21 | del elem.attrib[name] 22 | elem.attrib[local_name] = value 23 | etree.cleanup_namespaces(root) 24 | return root 25 | -------------------------------------------------------------------------------- /devine/core/vault.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta, abstractmethod 2 | from typing import Iterator, Optional, Union 3 | from uuid import UUID 4 | 5 | 6 | class Vault(metaclass=ABCMeta): 7 | def __init__(self, name: str): 8 | self.name = name 9 | 10 | def __str__(self) -> str: 11 | return f"{self.name} {type(self).__name__}" 12 | 13 | @abstractmethod 14 | def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]: 15 | """ 16 | Get Key from Vault by KID (Key ID) and Service. 17 | 18 | It does not get Key by PSSH as the PSSH can be different depending on it's implementation, 19 | or even how it was crafted. Some PSSH values may also actually be a CENC Header rather 20 | than a PSSH MP4 Box too, which makes the value even more confusingly different. 21 | 22 | However, the KID never changes unless the video file itself has changed too, meaning the 23 | key for the presumed-matching KID wouldn't work, further proving matching by KID is 24 | superior. 25 | """ 26 | 27 | @abstractmethod 28 | def get_keys(self, service: str) -> Iterator[tuple[str, str]]: 29 | """Get All Keys from Vault by Service.""" 30 | 31 | @abstractmethod 32 | def add_key(self, service: str, kid: Union[UUID, str], key: str) -> bool: 33 | """Add KID:KEY to the Vault.""" 34 | 35 | @abstractmethod 36 | def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int: 37 | """ 38 | Add Multiple Content Keys with Key IDs for Service to the Vault. 39 | Pre-existing Content Keys are ignored/skipped. 40 | Raises PermissionError if the user has no permission to create the table. 41 | """ 42 | 43 | @abstractmethod 44 | def get_services(self) -> Iterator[str]: 45 | """Get a list of Service Tags from Vault.""" 46 | 47 | 48 | __all__ = ("Vault",) 49 | -------------------------------------------------------------------------------- /devine/core/vaults.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Iterator, Optional, Union 2 | from uuid import UUID 3 | 4 | from devine.core.config import config 5 | from devine.core.utilities import import_module_by_path 6 | from devine.core.vault import Vault 7 | 8 | _VAULTS = sorted( 9 | ( 10 | path 11 | for path in config.directories.vaults.glob("*.py") 12 | if path.stem.lower() != "__init__" 13 | ), 14 | key=lambda x: x.stem 15 | ) 16 | 17 | _MODULES = { 18 | path.stem: getattr(import_module_by_path(path), path.stem) 19 | for path in _VAULTS 20 | } 21 | 22 | 23 | class Vaults: 24 | """Keeps hold of Key Vaults with convenience functions, e.g. searching all vaults.""" 25 | 26 | def __init__(self, service: Optional[str] = None): 27 | self.service = service or "" 28 | self.vaults = [] 29 | 30 | def __iter__(self) -> Iterator[Vault]: 31 | return iter(self.vaults) 32 | 33 | def __len__(self) -> int: 34 | return len(self.vaults) 35 | 36 | def load(self, type_: str, **kwargs: Any) -> None: 37 | """Load a Vault into the vaults list.""" 38 | module = _MODULES.get(type_) 39 | if not module: 40 | raise ValueError(f"Unable to find vault command by the name '{type_}'.") 41 | vault = module(**kwargs) 42 | self.vaults.append(vault) 43 | 44 | def get_key(self, kid: Union[UUID, str]) -> tuple[Optional[str], Optional[Vault]]: 45 | """Get Key from the first Vault it can by KID (Key ID) and Service.""" 46 | for vault in self.vaults: 47 | key = vault.get_key(kid, self.service) 48 | if key and key.count("0") != len(key): 49 | return key, vault 50 | return None, None 51 | 52 | def add_key(self, kid: Union[UUID, str], key: str, excluding: Optional[Vault] = None) -> int: 53 | """Add a KID:KEY to all Vaults, optionally with an exclusion.""" 54 | success = 0 55 | for vault in self.vaults: 56 | if vault != excluding: 57 | try: 58 | success += vault.add_key(self.service, kid, key) 59 | except (PermissionError, NotImplementedError): 60 | pass 61 | return success 62 | 63 | def add_keys(self, kid_keys: dict[Union[UUID, str], str]) -> int: 64 | """ 65 | Add multiple KID:KEYs to all Vaults. Duplicate Content Keys are skipped. 66 | PermissionErrors when the user cannot create Tables are absorbed and ignored. 67 | """ 68 | success = 0 69 | for vault in self.vaults: 70 | try: 71 | success += bool(vault.add_keys(self.service, kid_keys)) 72 | except (PermissionError, NotImplementedError): 73 | pass 74 | return success 75 | 76 | 77 | __all__ = ("Vaults",) 78 | -------------------------------------------------------------------------------- /devine/vaults/API.py: -------------------------------------------------------------------------------- 1 | from typing import Iterator, Optional, Union 2 | from uuid import UUID 3 | 4 | from requests import Session 5 | 6 | from devine.core import __version__ 7 | from devine.core.vault import Vault 8 | 9 | 10 | class API(Vault): 11 | """Key Vault using a simple RESTful HTTP API call.""" 12 | 13 | def __init__(self, name: str, uri: str, token: str): 14 | super().__init__(name) 15 | self.uri = uri.rstrip("/") 16 | self.session = Session() 17 | self.session.headers.update({ 18 | "User-Agent": f"Devine v{__version__}" 19 | }) 20 | self.session.headers.update({ 21 | "Authorization": f"Bearer {token}" 22 | }) 23 | 24 | def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]: 25 | if isinstance(kid, UUID): 26 | kid = kid.hex 27 | 28 | data = self.session.get( 29 | url=f"{self.uri}/{service.lower()}/{kid}", 30 | headers={ 31 | "Accept": "application/json" 32 | } 33 | ).json() 34 | 35 | code = int(data.get("code", 0)) 36 | message = data.get("message") 37 | error = { 38 | 0: None, 39 | 1: Exceptions.AuthRejected, 40 | 2: Exceptions.TooManyRequests, 41 | 3: Exceptions.ServiceTagInvalid, 42 | 4: Exceptions.KeyIdInvalid 43 | }.get(code, ValueError) 44 | 45 | if error: 46 | raise error(f"{message} ({code})") 47 | 48 | content_key = data.get("content_key") 49 | if not content_key: 50 | return None 51 | 52 | if not isinstance(content_key, str): 53 | raise ValueError(f"Expected {content_key} to be {str}, was {type(content_key)}") 54 | 55 | return content_key 56 | 57 | def get_keys(self, service: str) -> Iterator[tuple[str, str]]: 58 | page = 1 59 | 60 | while True: 61 | data = self.session.get( 62 | url=f"{self.uri}/{service.lower()}", 63 | params={ 64 | "page": page, 65 | "total": 10 66 | }, 67 | headers={ 68 | "Accept": "application/json" 69 | } 70 | ).json() 71 | 72 | code = int(data.get("code", 0)) 73 | message = data.get("message") 74 | error = { 75 | 0: None, 76 | 1: Exceptions.AuthRejected, 77 | 2: Exceptions.TooManyRequests, 78 | 3: Exceptions.PageInvalid, 79 | 4: Exceptions.ServiceTagInvalid, 80 | }.get(code, ValueError) 81 | 82 | if error: 83 | raise error(f"{message} ({code})") 84 | 85 | content_keys = data.get("content_keys") 86 | if content_keys: 87 | if not isinstance(content_keys, dict): 88 | raise ValueError(f"Expected {content_keys} to be {dict}, was {type(content_keys)}") 89 | 90 | for key_id, key in content_keys.items(): 91 | yield key_id, key 92 | 93 | pages = int(data["pages"]) 94 | if pages <= page: 95 | break 96 | 97 | page += 1 98 | 99 | def add_key(self, service: str, kid: Union[UUID, str], key: str) -> bool: 100 | if isinstance(kid, UUID): 101 | kid = kid.hex 102 | 103 | data = self.session.post( 104 | url=f"{self.uri}/{service.lower()}/{kid}", 105 | json={ 106 | "content_key": key 107 | }, 108 | headers={ 109 | "Accept": "application/json" 110 | } 111 | ).json() 112 | 113 | code = int(data.get("code", 0)) 114 | message = data.get("message") 115 | error = { 116 | 0: None, 117 | 1: Exceptions.AuthRejected, 118 | 2: Exceptions.TooManyRequests, 119 | 3: Exceptions.ServiceTagInvalid, 120 | 4: Exceptions.KeyIdInvalid, 121 | 5: Exceptions.ContentKeyInvalid 122 | }.get(code, ValueError) 123 | 124 | if error: 125 | raise error(f"{message} ({code})") 126 | 127 | # the kid:key was new to the vault (optional) 128 | added = bool(data.get("added")) 129 | # the key for kid was changed/updated (optional) 130 | updated = bool(data.get("updated")) 131 | 132 | return added or updated 133 | 134 | def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int: 135 | data = self.session.post( 136 | url=f"{self.uri}/{service.lower()}", 137 | json={ 138 | "content_keys": { 139 | str(kid).replace("-", ""): key 140 | for kid, key in kid_keys.items() 141 | } 142 | }, 143 | headers={ 144 | "Accept": "application/json" 145 | } 146 | ).json() 147 | 148 | code = int(data.get("code", 0)) 149 | message = data.get("message") 150 | error = { 151 | 0: None, 152 | 1: Exceptions.AuthRejected, 153 | 2: Exceptions.TooManyRequests, 154 | 3: Exceptions.ServiceTagInvalid, 155 | 4: Exceptions.KeyIdInvalid, 156 | 5: Exceptions.ContentKeyInvalid 157 | }.get(code, ValueError) 158 | 159 | if error: 160 | raise error(f"{message} ({code})") 161 | 162 | # each kid:key that was new to the vault (optional) 163 | added = int(data.get("added")) 164 | # each key for a kid that was changed/updated (optional) 165 | updated = int(data.get("updated")) 166 | 167 | return added + updated 168 | 169 | def get_services(self) -> Iterator[str]: 170 | data = self.session.post( 171 | url=self.uri, 172 | headers={ 173 | "Accept": "application/json" 174 | } 175 | ).json() 176 | 177 | code = int(data.get("code", 0)) 178 | message = data.get("message") 179 | error = { 180 | 0: None, 181 | 1: Exceptions.AuthRejected, 182 | 2: Exceptions.TooManyRequests, 183 | }.get(code, ValueError) 184 | 185 | if error: 186 | raise error(f"{message} ({code})") 187 | 188 | service_list = data.get("service_list", []) 189 | 190 | if not isinstance(service_list, list): 191 | raise ValueError(f"Expected {service_list} to be {list}, was {type(service_list)}") 192 | 193 | for service in service_list: 194 | yield service 195 | 196 | 197 | class Exceptions: 198 | class AuthRejected(Exception): 199 | """Authentication Error Occurred, is your token valid? Do you have permission to make this call?""" 200 | 201 | class TooManyRequests(Exception): 202 | """Rate Limited; Sent too many requests in a given amount of time.""" 203 | 204 | class PageInvalid(Exception): 205 | """Requested page does not exist.""" 206 | 207 | class ServiceTagInvalid(Exception): 208 | """The Service Tag is invalid.""" 209 | 210 | class KeyIdInvalid(Exception): 211 | """The Key ID is invalid.""" 212 | 213 | class ContentKeyInvalid(Exception): 214 | """The Content Key is invalid.""" 215 | -------------------------------------------------------------------------------- /devine/vaults/MySQL.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from typing import Iterator, Optional, Union 3 | from uuid import UUID 4 | 5 | import pymysql 6 | from pymysql.cursors import DictCursor 7 | 8 | from devine.core.services import Services 9 | from devine.core.vault import Vault 10 | 11 | 12 | class MySQL(Vault): 13 | """Key Vault using a remotely-accessed mysql database connection.""" 14 | 15 | def __init__(self, name: str, host: str, database: str, username: str, **kwargs): 16 | """ 17 | All extra arguments provided via **kwargs will be sent to pymysql.connect. 18 | This can be used to provide more specific connection information. 19 | """ 20 | super().__init__(name) 21 | self.slug = f"{host}:{database}:{username}" 22 | self.conn_factory = ConnectionFactory(dict( 23 | host=host, 24 | db=database, 25 | user=username, 26 | cursorclass=DictCursor, 27 | **kwargs 28 | )) 29 | 30 | self.permissions = self.get_permissions() 31 | if not self.has_permission("SELECT"): 32 | raise PermissionError(f"MySQL vault {self.slug} has no SELECT permission.") 33 | 34 | def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]: 35 | if not self.has_table(service): 36 | # no table, no key, simple 37 | return None 38 | 39 | if isinstance(kid, UUID): 40 | kid = kid.hex 41 | 42 | conn = self.conn_factory.get() 43 | cursor = conn.cursor() 44 | 45 | try: 46 | cursor.execute( 47 | # TODO: SQL injection risk 48 | f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=%s AND `key_`!=%s", 49 | (kid, "0" * 32) 50 | ) 51 | cek = cursor.fetchone() 52 | if not cek: 53 | return None 54 | return cek["key_"] 55 | finally: 56 | cursor.close() 57 | 58 | def get_keys(self, service: str) -> Iterator[tuple[str, str]]: 59 | if not self.has_table(service): 60 | # no table, no keys, simple 61 | return None 62 | 63 | conn = self.conn_factory.get() 64 | cursor = conn.cursor() 65 | 66 | try: 67 | cursor.execute( 68 | # TODO: SQL injection risk 69 | f"SELECT `kid`, `key_` FROM `{service}` WHERE `key_`!=%s", 70 | ("0" * 32,) 71 | ) 72 | for row in cursor.fetchall(): 73 | yield row["kid"], row["key_"] 74 | finally: 75 | cursor.close() 76 | 77 | def add_key(self, service: str, kid: Union[UUID, str], key: str) -> bool: 78 | if not key or key.count("0") == len(key): 79 | raise ValueError("You cannot add a NULL Content Key to a Vault.") 80 | 81 | if not self.has_permission("INSERT", table=service): 82 | raise PermissionError(f"MySQL vault {self.slug} has no INSERT permission.") 83 | 84 | if not self.has_table(service): 85 | try: 86 | self.create_table(service) 87 | except PermissionError: 88 | return False 89 | 90 | if isinstance(kid, UUID): 91 | kid = kid.hex 92 | 93 | conn = self.conn_factory.get() 94 | cursor = conn.cursor() 95 | 96 | try: 97 | cursor.execute( 98 | # TODO: SQL injection risk 99 | f"SELECT `id` FROM `{service}` WHERE `kid`=%s AND `key_`=%s", 100 | (kid, key) 101 | ) 102 | if cursor.fetchone(): 103 | # table already has this exact KID:KEY stored 104 | return True 105 | cursor.execute( 106 | # TODO: SQL injection risk 107 | f"INSERT INTO `{service}` (kid, key_) VALUES (%s, %s)", 108 | (kid, key) 109 | ) 110 | finally: 111 | conn.commit() 112 | cursor.close() 113 | 114 | return True 115 | 116 | def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int: 117 | for kid, key in kid_keys.items(): 118 | if not key or key.count("0") == len(key): 119 | raise ValueError("You cannot add a NULL Content Key to a Vault.") 120 | 121 | if not self.has_permission("INSERT", table=service): 122 | raise PermissionError(f"MySQL vault {self.slug} has no INSERT permission.") 123 | 124 | if not self.has_table(service): 125 | try: 126 | self.create_table(service) 127 | except PermissionError: 128 | return 0 129 | 130 | if not isinstance(kid_keys, dict): 131 | raise ValueError(f"The kid_keys provided is not a dictionary, {kid_keys!r}") 132 | if not all(isinstance(kid, (str, UUID)) and isinstance(key_, str) for kid, key_ in kid_keys.items()): 133 | raise ValueError("Expecting dict with Key of str/UUID and value of str.") 134 | 135 | if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()): 136 | kid_keys = { 137 | kid.hex if isinstance(kid, UUID) else kid: key_ 138 | for kid, key_ in kid_keys.items() 139 | } 140 | 141 | conn = self.conn_factory.get() 142 | cursor = conn.cursor() 143 | 144 | try: 145 | cursor.executemany( 146 | # TODO: SQL injection risk 147 | f"INSERT IGNORE INTO `{service}` (kid, key_) VALUES (%s, %s)", 148 | kid_keys.items() 149 | ) 150 | return cursor.rowcount 151 | finally: 152 | conn.commit() 153 | cursor.close() 154 | 155 | def get_services(self) -> Iterator[str]: 156 | conn = self.conn_factory.get() 157 | cursor = conn.cursor() 158 | 159 | try: 160 | cursor.execute("SHOW TABLES") 161 | for table in cursor.fetchall(): 162 | # each entry has a key named `Tables_in_` 163 | yield Services.get_tag(list(table.values())[0]) 164 | finally: 165 | cursor.close() 166 | 167 | def has_table(self, name: str) -> bool: 168 | """Check if the Vault has a Table with the specified name.""" 169 | conn = self.conn_factory.get() 170 | cursor = conn.cursor() 171 | 172 | try: 173 | cursor.execute( 174 | "SELECT count(TABLE_NAME) FROM information_schema.TABLES WHERE TABLE_SCHEMA=%s AND TABLE_NAME=%s", 175 | (conn.db, name) 176 | ) 177 | return list(cursor.fetchone().values())[0] == 1 178 | finally: 179 | cursor.close() 180 | 181 | def create_table(self, name: str): 182 | """Create a Table with the specified name if not yet created.""" 183 | if self.has_table(name): 184 | return 185 | 186 | if not self.has_permission("CREATE"): 187 | raise PermissionError(f"MySQL vault {self.slug} has no CREATE permission.") 188 | 189 | conn = self.conn_factory.get() 190 | cursor = conn.cursor() 191 | 192 | try: 193 | cursor.execute( 194 | # TODO: SQL injection risk 195 | f""" 196 | CREATE TABLE IF NOT EXISTS {name} ( 197 | id int AUTO_INCREMENT PRIMARY KEY, 198 | kid VARCHAR(255) NOT NULL, 199 | key_ VARCHAR(255) NOT NULL, 200 | UNIQUE(kid, key_) 201 | ); 202 | """ 203 | ) 204 | finally: 205 | conn.commit() 206 | cursor.close() 207 | 208 | def get_permissions(self) -> list: 209 | """Get and parse Grants to a more easily usable list tuple array.""" 210 | conn = self.conn_factory.get() 211 | cursor = conn.cursor() 212 | 213 | try: 214 | cursor.execute("SHOW GRANTS") 215 | grants = cursor.fetchall() 216 | grants = [next(iter(x.values())) for x in grants] 217 | grants = [tuple(x[6:].split(" TO ")[0].split(" ON ")) for x in list(grants)] 218 | grants = [( 219 | list(map(str.strip, perms.replace("ALL PRIVILEGES", "*").split(","))), 220 | location.replace("`", "").split(".") 221 | ) for perms, location in grants] 222 | return grants 223 | finally: 224 | conn.commit() 225 | cursor.close() 226 | 227 | def has_permission(self, operation: str, database: Optional[str] = None, table: Optional[str] = None) -> bool: 228 | """Check if the current connection has a specific permission.""" 229 | grants = [x for x in self.permissions if x[0] == ["*"] or operation.upper() in x[0]] 230 | if grants and database: 231 | grants = [x for x in grants if x[1][0] in (database, "*")] 232 | if grants and table: 233 | grants = [x for x in grants if x[1][1] in (table, "*")] 234 | return bool(grants) 235 | 236 | 237 | class ConnectionFactory: 238 | def __init__(self, con: dict): 239 | self._con = con 240 | self._store = threading.local() 241 | 242 | def _create_connection(self) -> pymysql.Connection: 243 | return pymysql.connect(**self._con) 244 | 245 | def get(self) -> pymysql.Connection: 246 | if not hasattr(self._store, "conn"): 247 | self._store.conn = self._create_connection() 248 | return self._store.conn 249 | -------------------------------------------------------------------------------- /devine/vaults/SQLite.py: -------------------------------------------------------------------------------- 1 | import sqlite3 2 | import threading 3 | from pathlib import Path 4 | from sqlite3 import Connection 5 | from typing import Iterator, Optional, Union 6 | from uuid import UUID 7 | 8 | from devine.core.services import Services 9 | from devine.core.vault import Vault 10 | 11 | 12 | class SQLite(Vault): 13 | """Key Vault using a locally-accessed sqlite DB file.""" 14 | 15 | def __init__(self, name: str, path: Union[str, Path]): 16 | super().__init__(name) 17 | self.path = Path(path).expanduser() 18 | # TODO: Use a DictCursor or such to get fetches as dict? 19 | self.conn_factory = ConnectionFactory(self.path) 20 | 21 | def get_key(self, kid: Union[UUID, str], service: str) -> Optional[str]: 22 | if not self.has_table(service): 23 | # no table, no key, simple 24 | return None 25 | 26 | if isinstance(kid, UUID): 27 | kid = kid.hex 28 | 29 | conn = self.conn_factory.get() 30 | cursor = conn.cursor() 31 | 32 | try: 33 | cursor.execute( 34 | f"SELECT `id`, `key_` FROM `{service}` WHERE `kid`=? AND `key_`!=?", 35 | (kid, "0" * 32) 36 | ) 37 | cek = cursor.fetchone() 38 | if not cek: 39 | return None 40 | return cek[1] 41 | finally: 42 | cursor.close() 43 | 44 | def get_keys(self, service: str) -> Iterator[tuple[str, str]]: 45 | if not self.has_table(service): 46 | # no table, no keys, simple 47 | return None 48 | 49 | conn = self.conn_factory.get() 50 | cursor = conn.cursor() 51 | 52 | try: 53 | cursor.execute( 54 | f"SELECT `kid`, `key_` FROM `{service}` WHERE `key_`!=?", 55 | ("0" * 32,) 56 | ) 57 | for (kid, key_) in cursor.fetchall(): 58 | yield kid, key_ 59 | finally: 60 | cursor.close() 61 | 62 | def add_key(self, service: str, kid: Union[UUID, str], key: str) -> bool: 63 | if not key or key.count("0") == len(key): 64 | raise ValueError("You cannot add a NULL Content Key to a Vault.") 65 | 66 | if not self.has_table(service): 67 | self.create_table(service) 68 | 69 | if isinstance(kid, UUID): 70 | kid = kid.hex 71 | 72 | conn = self.conn_factory.get() 73 | cursor = conn.cursor() 74 | 75 | try: 76 | cursor.execute( 77 | # TODO: SQL injection risk 78 | f"SELECT `id` FROM `{service}` WHERE `kid`=? AND `key_`=?", 79 | (kid, key) 80 | ) 81 | if cursor.fetchone(): 82 | # table already has this exact KID:KEY stored 83 | return True 84 | cursor.execute( 85 | # TODO: SQL injection risk 86 | f"INSERT INTO `{service}` (kid, key_) VALUES (?, ?)", 87 | (kid, key) 88 | ) 89 | finally: 90 | conn.commit() 91 | cursor.close() 92 | 93 | return True 94 | 95 | def add_keys(self, service: str, kid_keys: dict[Union[UUID, str], str]) -> int: 96 | for kid, key in kid_keys.items(): 97 | if not key or key.count("0") == len(key): 98 | raise ValueError("You cannot add a NULL Content Key to a Vault.") 99 | 100 | if not self.has_table(service): 101 | self.create_table(service) 102 | 103 | if not isinstance(kid_keys, dict): 104 | raise ValueError(f"The kid_keys provided is not a dictionary, {kid_keys!r}") 105 | if not all(isinstance(kid, (str, UUID)) and isinstance(key_, str) for kid, key_ in kid_keys.items()): 106 | raise ValueError("Expecting dict with Key of str/UUID and value of str.") 107 | 108 | if any(isinstance(kid, UUID) for kid, key_ in kid_keys.items()): 109 | kid_keys = { 110 | kid.hex if isinstance(kid, UUID) else kid: key_ 111 | for kid, key_ in kid_keys.items() 112 | } 113 | 114 | conn = self.conn_factory.get() 115 | cursor = conn.cursor() 116 | 117 | try: 118 | cursor.executemany( 119 | # TODO: SQL injection risk 120 | f"INSERT OR IGNORE INTO `{service}` (kid, key_) VALUES (?, ?)", 121 | kid_keys.items() 122 | ) 123 | return cursor.rowcount 124 | finally: 125 | conn.commit() 126 | cursor.close() 127 | 128 | def get_services(self) -> Iterator[str]: 129 | conn = self.conn_factory.get() 130 | cursor = conn.cursor() 131 | 132 | try: 133 | cursor.execute("SELECT name FROM sqlite_master WHERE type='table';") 134 | for (name,) in cursor.fetchall(): 135 | if name != "sqlite_sequence": 136 | yield Services.get_tag(name) 137 | finally: 138 | cursor.close() 139 | 140 | def has_table(self, name: str) -> bool: 141 | """Check if the Vault has a Table with the specified name.""" 142 | conn = self.conn_factory.get() 143 | cursor = conn.cursor() 144 | 145 | try: 146 | cursor.execute( 147 | "SELECT count(name) FROM sqlite_master WHERE type='table' AND name=?", 148 | (name,) 149 | ) 150 | return cursor.fetchone()[0] == 1 151 | finally: 152 | cursor.close() 153 | 154 | def create_table(self, name: str): 155 | """Create a Table with the specified name if not yet created.""" 156 | if self.has_table(name): 157 | return 158 | 159 | conn = self.conn_factory.get() 160 | cursor = conn.cursor() 161 | 162 | try: 163 | cursor.execute( 164 | # TODO: SQL injection risk 165 | f""" 166 | CREATE TABLE IF NOT EXISTS {name} ( 167 | "id" INTEGER NOT NULL UNIQUE, 168 | "kid" TEXT NOT NULL COLLATE NOCASE, 169 | "key_" TEXT NOT NULL COLLATE NOCASE, 170 | PRIMARY KEY("id" AUTOINCREMENT), 171 | UNIQUE("kid", "key_") 172 | ); 173 | """ 174 | ) 175 | finally: 176 | conn.commit() 177 | cursor.close() 178 | 179 | 180 | class ConnectionFactory: 181 | def __init__(self, path: Union[str, Path]): 182 | self._path = path 183 | self._store = threading.local() 184 | 185 | def _create_connection(self) -> Connection: 186 | return sqlite3.connect(self._path) 187 | 188 | def get(self) -> Connection: 189 | if not hasattr(self._store, "conn"): 190 | self._store.conn = self._create_connection() 191 | return self._store.conn 192 | -------------------------------------------------------------------------------- /devine/vaults/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stabbedbybrick/devine/09eda168824157851e30003b196f4851298ec3ac/devine/vaults/__init__.py -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core>=1.0.0"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "devine" 7 | version = "3.3.3" 8 | description = "Modular Movie, TV, and Music Archival Software." 9 | license = "GPL-3.0-only" 10 | authors = ["rlaphoenix "] 11 | readme = "README.md" 12 | homepage = "https://github.com/devine-dl/devine" 13 | repository = "https://github.com/devine-dl/devine" 14 | keywords = ["python", "downloader", "drm", "widevine"] 15 | classifiers = [ 16 | "Development Status :: 4 - Beta", 17 | "Environment :: Console", 18 | "Intended Audience :: End Users/Desktop", 19 | "Natural Language :: English", 20 | "Operating System :: OS Independent", 21 | "Topic :: Multimedia :: Video", 22 | "Topic :: Security :: Cryptography", 23 | ] 24 | include = [ 25 | { path = "CHANGELOG.md", format = "sdist" }, 26 | { path = "README.md", format = "sdist" }, 27 | { path = "LICENSE", format = "sdist" }, 28 | ] 29 | 30 | [tool.poetry.urls] 31 | "Issues" = "https://github.com/devine-dl/devine/issues" 32 | "Discussions" = "https://github.com/devine-dl/devine/discussions" 33 | "Changelog" = "https://github.com/devine-dl/devine/blob/master/CHANGELOG.md" 34 | 35 | [tool.poetry.dependencies] 36 | python = ">=3.9,<4.0" 37 | appdirs = "^1.4.4" 38 | Brotli = "^1.1.0" 39 | click = "^8.1.7" 40 | construct = "^2.8.8" 41 | crccheck = "^1.3.0" 42 | jsonpickle = "^3.0.4" 43 | langcodes = { extras = ["data"], version = "^3.4.0" } 44 | lxml = "^5.2.1" 45 | pproxy = "^2.7.9" 46 | protobuf = "^4.25.3" 47 | pycaption = "^2.2.6" 48 | pycryptodomex = "^3.20.0" 49 | pyjwt = "^2.8.0" 50 | pymediainfo = "^6.1.0" 51 | pymp4 = "^1.4.0" 52 | pymysql = "^1.1.0" 53 | pywidevine = { extras = ["serve"], version = "^1.8.0" } 54 | PyYAML = "^6.0.1" 55 | requests = { extras = ["socks"], version = "^2.31.0" } 56 | rich = "^13.7.1" 57 | "rlaphoenix.m3u8" = "^3.4.0" 58 | "ruamel.yaml" = "^0.18.6" 59 | sortedcontainers = "^2.4.0" 60 | subtitle-filter = "^1.4.9" 61 | Unidecode = "^1.3.8" 62 | urllib3 = "^2.2.1" 63 | chardet = "^5.2.0" 64 | curl-cffi = "^0.7.0b4" 65 | 66 | [tool.poetry.dev-dependencies] 67 | pre-commit = "^3.7.0" 68 | mypy = "^1.9.0" 69 | mypy-protobuf = "^3.6.0" 70 | types-protobuf = "^4.24.0.20240408" 71 | types-PyMySQL = "^1.1.0.1" 72 | types-requests = "^2.31.0.20240406" 73 | isort = "^5.13.2" 74 | ruff = "~0.3.7" 75 | 76 | [tool.poetry.scripts] 77 | devine = "devine.core.__main__:main" 78 | 79 | [tool.ruff] 80 | force-exclude = true 81 | line-length = 120 82 | 83 | [tool.ruff.lint] 84 | select = ["E4", "E7", "E9", "F", "W"] 85 | 86 | [tool.isort] 87 | line_length = 118 88 | 89 | [tool.mypy] 90 | check_untyped_defs = true 91 | disallow_incomplete_defs = true 92 | disallow_untyped_defs = true 93 | follow_imports = "silent" 94 | ignore_missing_imports = true 95 | no_implicit_optional = true 96 | --------------------------------------------------------------------------------